repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
TransportLayer/mc-id2name | id2name.py | 1 | 45678 | ###############################################################################
# Minecraft ID to Friendly Name #
# Copyright (C) 2016 TransportLayer #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
###############################################################################
items = {
'minecraft': {
'__VERSION__': 1.10,
'__LANGUAGE__': 'en_US',
'stone': {
'id': 1,
'category': 'Building Blocks',
0: 'Stone',
1: 'Granite',
2: 'Polished Granite',
3: 'Diorite',
4: 'Polished Diorite',
5: 'Andesite',
6: 'Polished Andesite'
},
'grass': {
'id': 2,
'category': 'Building Blocks',
0: 'Grass Block'
},
'dirt': {
'id': 3,
'category': 'Building Blocks',
0: 'Dirt',
1: 'Coarse Dirt',
2: 'Podzol'
},
'cobblestone': {
'id': 4,
'category': 'Building Blocks',
0: 'Cobblestone'
},
'planks': {
'id': 5,
'category': 'Building Blocks',
0: 'Oak Wood Planks',
1: 'Spruce Wood Planks',
2: 'Birch Wood Planks',
3: 'Jungle Wood Planks',
4: 'Acacia Wood Planks',
5: 'Dark Oak Wood Planks'
},
'sapling': {
'id': 6,
'category': 'Decoration Blocks',
0: 'Oak Sapling',
1: 'Spruce Sapling',
2: 'Birch Sapling',
3: 'Jungle Sapling',
4: 'Acacia Sapling',
5: 'Dark Oak Sapling'
},
'bedrock': {
'id': 7,
'category': 'Building Blocks',
0: 'Bedrock'
},
# No item 8?
# No item 9?
# No item 10?
# No item 11?
'sand': {
'id': 12,
'category': 'Building Blocks',
0: 'Sand',
1: 'Red Sand'
},
'gravel': {
'id': 13,
'category': 'Building Blocks',
0: 'Gravel'
},
'gold_ore': {
'id': 14,
'category': 'Building Blocks',
0: 'Gold Ore'
},
'iron_ore': {
'id': 15,
'category': 'Building Blocks',
0: 'Iron Ore'
},
'coal_ore': {
'id': 16,
'category': 'Building Blocks',
0: 'Coal Ore'
},
'log': {
'id': 17,
'category': 'Building Blocks',
0: 'Oak Wood',
1: 'Spruce Wood',
2: 'Birch Wood',
3: 'Jungle Wood'
},
'leaves': {
'id': 18,
'category': 'Decoration Blocks',
0: 'Oak Leaves',
1: 'Spruce Leaves',
2: 'Birch Leaves',
3: 'Jungle Leaves'
},
'sponge': {
'id': 19,
'category': 'Building Blocks',
0: 'Sponge',
1: 'Wet Sponge'
},
'glass': {
'id': 20,
'category': 'Building Blocks',
0: 'Glass'
},
'lapis_ore': {
'id': 21,
'category': 'Building Blocks',
0: 'Lapis Lazuli Ore'
},
'lapis_block': {
'id': 22,
'category': 'Building Blocks',
0: 'Lapis Lazuli Block'
},
'dispenser': {
'id': 23,
'category': 'Redstone',
0: 'Dispenser'
},
'sandstone': {
'id': 24,
'category': 'Building Blocks',
0: 'Sandstone',
1: 'Chiseled Sandstone',
2: 'Smooth Sandstone'
},
'noteblock': {
'id': 25,
'category': 'Redstone',
0: 'Note Block'
},
# No item 26?
'golden_rail': {
'id': 27,
'category': 'Transportation',
0: 'Powered Rail'
},
'detector_rail': {
'id': 28,
'category': 'Transportation',
0: 'Detector Rail'
},
'sticky_piston': {
'id': 29,
'category': 'Redstone',
0: 'Sticky Piston'
},
'web': {
'id': 30,
'category': 'Decoration Blocks',
0: 'Cobweb'
},
'tallgrass': {
'id': 31,
'category': 'Decoration Blocks',
# Missing DV 0?
1: 'Grass',
2: 'Fern'
},
'deadbush': {
'id': 32,
'category': 'Decoration Blocks',
0: 'Dead Bush'
},
'piston': {
'id': 33,
'category': 'Redstone',
0: 'Piston'
},
# No item 34?
'wool': {
'id': 35,
'category': 'Building Blocks',
0: 'Wool',
1: 'Orange Wool',
2: 'Magenta Wool',
3: 'Light Blue Wool',
4: 'Yellow Wool',
5: 'Lime Wool',
6: 'Pink Wool',
7: 'Gray Wool',
8: 'Light Gray Wool',
9: 'Cyan Wool',
10: 'Purple Wool',
11: 'Blue Wool',
12: 'Brown Wool',
13: 'Green Wool',
14: 'Red Wool',
15: 'Black Wool'
},
# No item 36?
'yellow_flower': {
'id': 37,
'category': 'Decoration Blocks',
0: 'Dandelion'
# Marked for more DVs.
},
'red_flower': {
'id': 38,
'category': 'Decoration Blocks',
0: 'Poppy',
1: 'Blue Orchid', # Not red.
2: 'Allium', # Also not red.
3: 'Azure Bluet', # Still not red.
4: 'Red Tulip', # Wow, good job, this one's red.
5: 'Orange Tulip', # Closer to red...?
6: 'White Tulip', # Farther from red.
7: 'Pink Tulip', # Ah, there we go, back on track.
8: 'Oxeye Daisy' # I give up at this point.
},
'brown_mushroom': {
'id': 39,
'category': 'Decoration Blocks',
0: 'Mushroom'
},
'red_mushroom': {
'id': 40,
'category': 'Decoration Blocks',
0: 'Mushroom'
},
'gold_block': {
'id': 41,
'category': 'Building Blocks',
0: 'Block of Gold'
},
'iron_block': {
'id': 42,
'category': 'Building Blocks',
0: 'Block of Iron'
},
# No item 43?
'stone_slab': {
'id': 44,
'category': 'Building Blocks',
0: 'Stone Slab',
1: 'Sandstone Slab',
# No DV 2?
3: 'Cobblestone Slab',
4: 'Bricks Slab',
5: 'Stone Bricks Slab',
6: 'Nether Brick Slab',
7: 'Quartz Slab'
},
'brick_block': {
'id': 45,
'category': 'Building Blocks',
0: 'Bricks'
},
'tnt': {
'id': 46,
'category': 'Redstone',
0: 'TNT'
},
'bookshelf': {
'id': 47,
'category': 'Building Blocks',
0: 'Bookshelf'
},
'mossy_cobblestone': {
'id': 48,
'category': 'Building Blocks',
0: 'Moss Stone'
},
'obsidian': {
'id': 49,
'category': 'Building Blocks',
0: 'Obsidian'
},
'torch': {
'id': 50,
'category': 'Decoration Blocks',
0: 'Torch'
},
# No item 51?
# No item 52?
'oak_stairs': {
'id': 53,
'category': 'Building Blocks',
0: 'Oak Wood Stairs'
},
'chest': {
'id': 54,
'category': 'Decoration Blocks',
0: 'Chest'
},
# No item 55?
'diamond_ore': {
'id': 56,
'category': 'Building Blocks',
0: 'Diamond Ore'
},
'diamond_block': {
'id': 57,
'category': 'Building Blocks',
0: 'Block of Diamond'
},
'crafting_table': {
'id': 58,
'category': 'Decoration Blocks',
0: 'Crafting Table'
},
# No item 59?
# No item 60?
'furnace': {
'id': 61,
'category': 'Decoration Blocks',
0: 'Furnace'
},
# No item 62?
# No item 63?
# No item 64?
'ladder': {
'id': 65,
'category': 'Decoration Blocks',
0: 'Ladder'
},
'rail': {
'id': 66,
'category': 'Transportation',
0: 'Rail'
},
'stone_stairs': {
'id': 67,
'category': 'Building Blocks',
0: 'Cobblestone Stairs'
},
# No item 68?
'lever': {
'id': 69,
'category': 'Redstone',
0: 'Lever'
},
'stone_pressure_plate': {
'id': 70,
'category': 'Redstone',
0: 'Stone Pressure Plate'
},
# No item 71?
'wooden_pressure_plate': {
'id': 72,
'category': 'Redstone',
0: 'Wooden Pressure Plate'
},
'redstone_ore': {
'id': 73,
'category': 'Building Blocks',
0: 'Redstone Ore'
},
# No item 74?
# No item 75?
'redstone_torch': {
'id': 76,
'category': 'Redstone',
0: 'Redstone Torch'
},
'stone_button': {
'id': 77,
'category': 'Redstone',
0: 'Button'
},
'snow_layer': {
'id': 78,
'category': 'Decoration Blocks',
0: 'Snow'
# Marked for more DVs.
},
'ice': {
'id': 79,
'category': 'Building Blocks',
0: 'Ice'
},
'snow': {
'id': 80,
'category': 'Building Blocks',
0: 'Snow'
},
'cactus': {
'id': 81,
'category': 'Decoration Blocks',
0: 'Cactus'
},
'clay': {
'id': 82,
'category': 'Building Blocks',
0: 'Clay'
},
# No item 83?
'jukebox': {
'id': 84,
'category': 'Decoration Blocks',
0: 'Jukebox'
},
'fence': {
'id': 85,
'category': 'Decoration Blocks',
0: 'Oak Fence'
},
'pumpkin': {
'id': 86,
'category': 'Building Blocks',
0: 'Pumpkin'
},
'netherrack': {
'id': 87,
'category': 'Building Blocks',
0: 'Netherrack'
},
'soul_sand': {
'id': 88,
'category': 'Building Blocks',
0: 'Soul Sand'
},
'glowstone': {
'id': 89,
'category': 'Building Blocks',
0: 'Glowstone'
},
# No item 90?
'lit_pumpkin': {
'id': 91,
'category': 'Building Blocks',
0: 'Jack o\'Lantern'
},
# No item 92?
# No item 93?
# No item 94?
'stained_glass': {
'id': 95,
'category': 'Building Blocks',
0: 'White Stained Glass',
1: 'Orange Stained Glass',
2: 'Magenta Stained Glass',
3: 'Light Blue Stained Glass',
4: 'Yellow Stained Glass',
5: 'Lime Stained Glass',
6: 'Pink Stained Glass',
7: 'Gray Stained Glass',
8: 'Light Gray Stained Glass',
9: 'Cyan Stained Glass',
10: 'Purple Stained Glass',
11: 'Blue Stained Glass',
12: 'Brown Stained Glass',
13: 'Green Stained Glass',
14: 'Red Stained Glass',
15: 'Black Stained Glass'
},
'trapdoor': {
'id': 96,
'category': 'Redstone',
0: 'Wooden Trapdoor'
},
'monster_egg': {
'id': 97,
'category': 'Decoration Blocks',
0: 'Stone Monster Egg',
1: 'Cobblestone Monster Egg',
2: 'Stone Brick Monster Egg',
3: 'Mossy Stone Brick Monster Egg',
4: 'Cracked Stone Brick Monster Egg',
5: 'Chiseled Stone Brick Monster Egg'
},
'stonebrick': {
'id': 98,
'category': 'Building Blocks',
0: 'Stone Bricks',
1: 'Mossy Stone Bricks',
2: 'Cracked Stone Bricks',
3: 'Chiseled Stone Bricks'
},
# No item 99?
# No item 100?
'iron_bars': {
'id': 101,
'category': 'Decoration Blocks',
0: 'Iron Bars'
},
'glass_pane': {
'id': 102,
'category': 'Decoration Blocks',
0: 'Glass Pane'
},
'melon_block': {
'id': 103,
'category': 'Building Blocks',
0: 'Melon'
},
# No item 104?
# No item 105?
'vine': {
'id': 106,
'category': 'Decoration Blocks',
0: 'Vines'
},
'fence_gate': {
'id': 107,
'category': 'Redstone',
0: 'Oak Fence Gate'
},
'brick_stairs': {
'id': 108,
'category': 'Building Blocks',
0: 'Brick Stairs'
},
'stone_brick_stairs': {
'id': 109,
'category': 'Building Blocks',
0: 'Stone Brick Stairs'
},
'mycelium': {
'id': 110,
'category': 'Building Blocks',
0: 'Mycelium'
},
'waterlily': {
'id': 111,
'category': 'Decoration Blocks',
0: 'Lily Pad'
},
'nether_brick': {
'id': 112,
'category': 'Building Blocks',
0: 'Nether Brick'
},
'nether_brick_fence': {
'id': 113,
'category': 'Decoration Blocks',
0: 'Nether Brick Fence'
},
'nether_brick_stairs': {
'id': 114,
'category': 'Building Blocks',
0: 'Nether Brick Stairs'
},
# No item 115?
'enchanting_table': {
'id': 116,
'category': 'Decoration Blocks',
0: 'Enchantment Table'
},
# No item 117?
# No item 118?
# No item 119?
'end_portal_frame': {
'id': 120,
'category': 'Decoration Blocks',
0: 'End Portal'
},
'end_stone': {
'id': 121,
'category': 'Building Blocks',
0: 'End Stone'
},
'redstone_lamp': {
'id': 123,
'category': 'Redstone',
0: 'Redstone Lamp'
},
# No item 124?
# No item 125?
'wooden_slab': {
'id': 126,
'category': 'Building Blocks',
0: 'Oak Wood Slab',
1: 'Spruce Wood Slab',
2: 'Birch Wood Slab',
3: 'Jungle Wood Slab',
4: 'Acacia Wood Slab',
5: 'Dark Oak Wood Slab'
},
# No item 127?
'sandstone_stairs': {
'id': 128,
'category': 'Building Blocks',
0: 'Sandstone Stairs'
},
'emerald_ore': {
'id': 129,
'category': 'Building Blocks',
0: 'Emerald Ore'
},
'ender_chest': {
'id': 130,
'category': 'Decoration Blocks',
0: 'Ender Chest'
},
'tripwire_hook': {
'id': 131,
'category': 'Redstone',
0: 'Tripwire Hook'
},
# No item 132?
'emerald_block': {
'id': 133,
'category': 'Building Blocks',
0: 'Block of Emerald'
},
'spruce_stairs': {
'id': 134,
'category': 'Building Blocks',
0: 'Spruce Wood Stairs'
},
'birch_stairs': {
'id': 135,
'category': 'Building Blocks',
0: 'Birch Wood Stairs'
},
'jungle_stairs': {
'id': 136,
'category': 'Building Blocks',
0: 'Jungle Wood Stairs'
},
# No item 137?
'beacon': {
'id': 138,
'category': 'Miscellaneous',
0: 'Beacon'
},
'cobblestone_wall': {
'id': 139,
'category': 'Building Blocks',
0: 'Cobblestone Wall',
1: 'Mossy Cobblestone Wall'
},
# No item 140?
# No item 141?
# No item 142?
'wooden_button': {
'id': 143,
'category': 'Redstone',
0: 'Button'
},
# No item 144?
'anvil': {
'id': 145,
'category': 'Decoration Blocks',
0: 'Anvil',
1: 'Slightly Damaged Anvil',
2: 'Very Damaged Anvil'
},
'trapped_chest': {
'id': 146,
'category': 'Redstone',
0: 'Trapped Chest'
},
'light_weighted_pressure_plate': {
'id': 147,
'category': 'Redstone',
0: 'Weighted Pressure Plate (Light)'
},
'heavy_weighted_pressure_plate': {
'id': 148,
'category': 'Redstone',
0: 'Weighted Pressure Plate (Heavy)'
},
# No item 149?
# No item 150?
'daylight_detector': {
'id': 151,
'category': 'Redstone',
0: 'Daylight Sensor'
},
'redstone_block': {
'id': 152,
'category': 'Redstone',
0: 'Block of Redstone'
},
'quartz_ore': {
'id': 153,
'category': 'Building Blocks',
0: 'Nether Quartz Ore'
},
'hopper': {
'id': 154,
'category': 'Redstone',
0: 'Hopper'
},
'quartz_block': {
'id': 155,
'category': 'Building Blocks',
0: 'Block of Quartz',
1: 'Chiseled Quartz Block',
2: 'Pillar Quartz Block'
},
'quartz_stairs': {
'id': 156,
'category': 'Building Blocks',
0: 'Quartz Stairs'
},
'activator_rail': {
'id': 157,
'category': 'Transportation',
0: 'Activator Rail'
},
'dropper': {
'id': 158,
'category': 'Redstone',
0: 'Dropper'
},
'stained_hardened_clay': {
'id': 159,
'category': 'Building Blocks',
0: 'White Hardened Clay',
1: 'Orange Hardened Clay',
2: 'Magenta Hardened Clay',
3: 'Light Blue Hardened Clay',
4: 'Yellow Hardened Clay',
5: 'Lime Hardened Clay',
6: 'Pink Hardened Clay',
7: 'Gray Hardened Clay',
8: 'Light Gray Hardened Clay',
9: 'Cyan Hardened Clay',
10: 'Purple Hardened Clay',
11: 'Blue Hardened Clay',
12: 'Brown Hardened Clay',
13: 'Green Hardened Clay',
14: 'Red Hardened Clay',
15: 'Black Hardened Clay'
},
'stained_glass_pane': {
'id': 160,
'category': 'Decoration Blocks',
0: 'White Stained Glass Pane',
1: 'Orange Stained Glass Pane',
2: 'Magenta Stained Glass Pane',
3: 'Light Blue Stained Glass Pane',
4: 'Yellow Stained Glass Pane',
5: 'Lime Stained Glass Pane',
6: 'Pink Stained Glass Pane',
7: 'Gray Stained Glass Pane',
8: 'Light Gray Stained Glass Pane',
9: 'Cyan Stained Glass Pane',
10: 'Purple Stained Glass Pane',
11: 'Blue Stained Glass Pane',
12: 'Brown Stained Glass Pane',
13: 'Green Stained Glass Pane',
14: 'Red Stained Glass Pane',
15: 'Black Stained Glass Pane'
},
'leaves2': {
'id': 161,
'category': 'Decoration Blocks',
0: 'Acacia Leaves',
1: 'Dark Oak Leaves'
},
'log2': {
'id': 162,
'category': 'Building Blocks',
0: 'Acacia Wood',
1: 'Dark Oak Wood'
},
'acacia_stairs': {
'id': 163,
'category': 'Building Blocks',
0: 'Acacia Wood Stairs'
},
'dark_oak_stairs': {
'id': 164,
'category': 'Building Blocks',
0: 'Dark Oak Wood Stairs'
},
'slime': {
'id': 165,
'category': 'Decoration Blocks',
0: 'Slime Block'
},
'iron_trapdoor': {
'id': 167,
'category': 'Redstone',
0: 'Iron Trapdoor'
},
'prismarine': {
'id': 168,
'category': 'Building Blocks',
0: 'Prismarine',
1: 'Prismarine Bricks',
2: 'Dark Prismarine'
},
'sea_lantern': {
'id': 169,
'category': 'Building Blocks',
0: 'Sea Lantern'
},
'hay_block': {
'id': 170,
'category': 'Building Blocks',
0: 'Hay Bale'
},
'carpet': {
'id': 171,
'category': 'Decoration Blocks',
0: 'Carpet',
1: 'Orange Carpet',
2: 'Magenta Carpet',
3: 'Light Blue Carpet',
4: 'Yellow Carpet',
5: 'Lime Carpet',
6: 'Pink Carpet',
7: 'Gray Carpet',
8: 'Light Gray Carpet',
9: 'Cyan Carpet',
10: 'Purple Carpet',
11: 'Blue Carpet',
12: 'Brown Carpet',
13: 'Green Carpet',
14: 'Red Carpet',
15: 'Black Carpet'
},
'hardened_clay': {
'id': 172,
'category': 'Building Blocks',
0: 'Hardened Clay'
},
'coal_block': {
'id': 173,
'category': 'Building Blocks',
0: 'Block of Coal'
},
'packed_ice': {
'id': 174,
'category': 'Building Blocks',
0: 'Packed Ice'
},
'double_plant': {
'id': 175,
'category': 'Decoration Blocks',
0: 'Sunflower',
1: 'Lilac',
2: 'Double Tallgrass',
3: 'Large Fern',
4: 'Rose Bush',
5: 'Peony'
},
# No item 176?
# No item 177?
# No item 178?
'red_sandstone': {
'id': 179,
'category': 'Building Blocks',
0: 'Red Sandstone',
1: 'Chiseled Red Sandstone',
2: 'Smooth Red Sandstone'
},
'red_sandstone_stairs': {
'id': 180,
'category': 'Building Blocks',
0: 'Red Sandstone Stairs'
},
# No item 181?
'stone_slab2': {
'id': 182,
'category': 'Building Blocks',
0: 'Red Sandstone Slab'
# Marked for more DVs.
},
'spruce_fence_gate': {
'id': 183,
'category': 'Redstone',
0: 'Spruce Fence Gate'
},
'birch_fence_gate': {
'id': 184,
'category': 'Redstone',
0: 'Birch Fence Gate'
},
'jungle_fence_gate': {
'id': 185,
'category': 'Redstone',
0: 'Jungle Fence Gate'
},
'dark_oak_fence_gate': {
'id': 186,
'category': 'Redstone',
0: 'Dark Oak Fence Gate'
},
'acacia_fence_gate': {
'id': 187,
'category': 'Redstone',
0: 'Acacia Fence Gate'
},
'spruce_fence': {
'id': 188,
'category': 'Decoration Blocks',
0: 'Spruce Fence'
},
'birch_fence': {
'id': 189,
'category': 'Decoration Blocks',
0: 'Birch Fence'
},
'jungle_fence': {
'id': 190,
'category': 'Decoration Blocks',
0: 'Jungle Fence'
},
'dark_oak_fence': {
'id': 191,
'category': 'Decoration Blocks',
0: 'Dark Oak Fence'
},
'acacia_fence': {
'id': 192,
'category': 'Decoration Blocks',
0: 'Acacia Fence'
},
# No item 193?
# No item 194?
# No item 195?
# No item 196?
# No item 197?
'end_rod': {
'id': 198,
'category': 'Decoration Blocks',
0: 'End Rod'
},
'chorus_plant': {
'id': 199,
'category': 'Decoration Blocks',
0: 'Chorus Plant'
},
'chorus_flower': {
'id': 200,
'category': 'Decoration Blocks',
0: 'Chorus Flower'
},
'purpur_block': {
'id': 201,
'category': 'Building Blocks',
0: 'Purpur Block'
},
'purpur_pillar': {
'id': 202,
'category': 'Building Blocks',
0: 'Purpur Pillar'
},
'purpur_stairs': {
'id': 203,
'category': 'Building Blocks',
0: 'Purpur Stairs'
},
# No item 204?
'purpur_slab': {
'id': 205,
'category': 'Building Blocks',
0: 'Purpur Slab'
# Marked for more DVs.
},
'end_bricks': {
'id': 206,
'category': 'Building Blocks',
0: 'End Stone Bricks'
},
# No item 207?
# No item 208?
# No item 209?
# No item 210?
# No item 211?
# No item 212?
'magma': {
'id': 213,
'category': 'Building Blocks',
0: 'Magma Block'
},
'nether_wart_block': {
'id': 214,
'category': 'Building Blocks',
0: 'Nether Wart Block'
},
'red_nether_brick': {
'id': 215,
'category': 'Building Blocks',
0: 'Red Nether Brick'
},
'bone_block': {
'id': 216,
'category': 'Building Blocks',
0: 'Bone Block'
},
# No item...
# ...
# Start of 256 block.
'iron_shovel': {
'id': 256,
'category': 'Tools',
'name': 'Iron Shovel',
'uses': 251
},
'iron_pickaxe': {
'id': 257,
'category': 'Tools',
'name': 'Iron Pickaxe',
'uses': 251
},
'iron_axe': {
'id': 258,
'category': 'Tools',
'name': 'Iron Axe',
'uses': 251
},
'flint_and_steel': {
'id': 259,
'category': 'Tools',
'name': 'Flint and Steel',
'uses': 65
},
'apple': {
'id': 260,
'category': 'Foodstuffs',
0: 'Apple'
},
'bow': {
'id': 261,
'category': 'Combat',
'name': 'Bow',
'uses': 385
},
'arrow': {
'id': 262,
'category': 'Combat',
0: 'Arrow'
},
'coal': {
'id': 263,
'category': 'Materials',
0: 'Coal',
1: 'Charcoal'
},
'diamond': {
'id': 264,
'category': 'Materials',
0: 'Diamond'
},
'iron_ingot': {
'id': 265,
'category': 'Materials',
0: 'Iron Ingot'
},
'gold_ingot': {
'id': 266,
'category': 'Materials',
0: 'Gold Ingot'
},
'iron_sword': {
'id': 267,
'category': 'Combat',
'name': 'Iron Sword',
'uses': 251
},
'wooden_sword': {
'id': 268,
'category': 'Combat',
'name': 'Wooden Sword',
'uses': 60
},
'wooden_shovel': {
'id': 269,
'category': 'Tools',
'name': 'Wooden Shovel',
'uses': 60
},
'wooden_pickaxe': {
'id': 270,
'category': 'Tools',
'name': 'Wooden Pickaxe',
'uses': 60
},
'wooden_axe': {
'id': 271,
'category': 'Tools',
'name': 'Wooden Axe',
'uses': 60
},
'stone_sword': {
'id': 272,
'category': 'Combat',
'name': 'Stone Sword',
'uses': 132
},
'stone_shovel': {
'id': 273,
'category': 'Tools',
'name': 'Stone Shovel',
'uses': 132
},
'stone_pickaxe': {
'id': 274,
'category': 'Tools',
'name': 'Stone Pickaxe',
'uses': 132
},
'stone_axe': {
'id': 275,
'category': 'Tools',
'name': 'Stone Axe',
'uses': 132
},
'diamond_sword': {
'id': 276,
'category': 'Combat',
'name': 'Diamond Sword',
'uses': 1562
},
'diamond_shovel': {
'id': 277,
'category': 'Tools',
'name': 'Diamond Shovel',
'uses': 1562
},
'diamond_pickaxe': {
'id': 278,
'category': 'Tools',
'name': 'Diamond Pickaxe',
'uses': 1562
},
'diamond_axe': {
'id': 279,
'category': 'Tools',
'name': 'Diamond Axe',
'uses': 1562
},
'stick': {
'id': 280,
'category': 'Materials',
0: 'Stick'
},
'bowl': {
'id': 281,
'category': 'Materials',
0: 'Bowl'
},
'mushroom_stew': {
'id': 282,
'category': 'Foodstuffs',
0: 'Mushroom Stew'
},
'golden_sword': {
'id': 283,
'category': 'Combat',
'name': 'Golden Sword',
'uses': 33
},
'golden_shovel': {
'id': 284,
'category': 'Tools',
'name': 'Golden Shovel',
'uses': 33
},
'golden_pickaxe': {
'id': 285,
'category': 'Tools',
'name': 'Golden Pickaxe',
'uses': 33
},
'golden_axe': {
'id': 286,
'category': 'Tools',
'name': 'Golden Axe',
'uses': 33
},
'string': {
'id': 287,
'category': 'Materials',
0: 'String'
},
'feather': {
'id': 288,
'category': 'Materials',
0: 'Feather'
},
'gunpowder': {
'id': 289,
'category': 'Materials',
0: 'Gunpowder'
},
'wooden_hoe': {
'id': 290,
'category': 'Tools',
'name': 'Wooden Hoe',
'uses': 60
},
'stone_hoe': {
'id': 291,
'category': 'Tools',
'name': 'Stone Hoe',
'uses': 132
},
'iron_hoe': {
'id': 292,
'category': 'Tools',
'name': 'Iron Hoe',
'uses': 251
},
'diamond_hoe': {
'id': 293,
'category': 'Tools',
'name': 'Diamond Hoe',
'uses': 1562
},
'golden_hoe': {
'id': 294,
'category': 'Tools',
'names': 'Golden Hoe',
'uses': 33
},
'wheat_seeds': {
'id': 295,
'category': 'Materials',
0: 'Seeds'
},
'wheat': {
'id': 296,
'category': 'Materials',
0: 'Wheat'
},
'bread': {
'id': 297,
'category': 'Foodstuffs',
0: 'Bread'
},
'leather_helmet': {
'id': 298,
'category': 'Combat',
'name': 'Leather Cap',
'uses': 56,
'armor': 1,
'toughness': 0
},
'leather_chestplate': {
'id': 299,
'category': 'Combat',
'name': 'Leather Tunic',
'uses': 81,
'armor': 3,
'toughness': 0
},
'leather_leggings': {
'id': 300,
'category': 'Combat',
'name': 'Leather Pants',
'uses': 76,
'armor': 2,
'toughness': 0
},
'leather_boots': {
'id': 301,
'category': 'Combat',
'name': 'Leather Boots',
'uses': 66,
'armor': 1,
'toughness': 0
},
'chainmail_helmet': {
'id': 302,
'category': 'Combat',
'name': 'Chain Helmet',
'uses': 166,
'armor': 2,
'toughness': 0
},
'chainmail_chestplate': {
'id': 303,
'category': 'Combat',
'name': 'Chain Chestplate',
'uses': 241,
'armor': 5,
'toughness': 0
},
'chainmail_leggings': {
'id': 304,
'category': 'Combat',
'name': 'Chain Leggings',
'uses': 226,
'armor': 4,
'toughness': 0
},
'chainmail_boots': {
'id': 305,
'category': 'Combat',
'name': 'Chain Boots',
'uses': 196,
'armor': 1,
'toughness': 0
},
'iron_helmet': {
'id': 306,
'category': 'Combat',
'name': 'Iron Helmet',
'uses': 166,
'armor': 2,
'toughness': 0
},
'iron_chestplate': {
'id': 307,
'category': 'Combat',
'name': 'Iron Chestplate',
'uses': 241,
'armor': 6,
'toughness': 0
},
'iron_leggings': {
'id': 308,
'category': 'Combat',
'name': 'Iron Leggings',
'uses': 226,
'armor': 5,
'toughness': 0
},
'iron_boots': {
'id': 309,
'category': 'Combat',
'name': 'Iron Boots',
'uses': 196,
'armor': 2,
'toughness': 0
},
'diamond_helmet': {
'id': 310,
'category': 'Combat',
'name': 'Diamond Helmet',
'uses': 364,
'armor': 3,
'toughness': 2
},
'diamond_chestplate': {
'id': 311,
'category': 'Combat',
'name': 'Diamond Chestplate',
'uses': 529,
'armor': 8,
'toughness': 2
},
'diamond_leggings': {
'id': 312,
'category': 'Combat',
'name': 'Diamond Leggings',
'uses': 496,
'armor': 6,
'toughness': 2
},
'diamond_boots': {
'id': 313,
'category': 'Combat',
'name': 'Diamond Boots',
'uses': 430,
'armor': 3,
'toughness': 2
},
'golden_helmet': {
'id': 314,
'category': 'Combat',
'name': 'Golden Helmet',
'uses': 78,
'armor': 2,
'toughness': 0
},
'golden_chestplate': {
'id': 315,
'category': 'Combat',
'name': 'Golden Chestplate',
'uses': 113,
'armor': 5,
'toughness': 0
},
'golden_leggings': {
'id': 316,
'category': 'Combat',
'name': 'Golden Leggings',
'uses': 106,
'armor': 3,
'toughness': 0
},
'golden_boots': {
'id': 317,
'category': 'Combat',
'name': 'Golden Boots',
'uses': 92,
'armor': 1,
'toughness': 0
},
'flint': {
'id': 318,
'category': 'Materials',
0: 'Flint'
},
'porkchop': {
'id': 319,
'category': 'Foodstuffs',
0: 'Raw Porkchop'
},
'cooked_porkchop': {
'id': 320,
'category': 'Foodstuffs',
0: 'Cooked Porkchop'
},
'painting': {
'id': 321,
'category': 'Decoration Blocks',
0: 'Painting'
},
'golden_apple': {
'id': 322,
'category': 'Foodstuffs',
0: 'Golden Apple', # Regular.
1: 'Golden Apple' # Notch Apple.
},
'sign': {
'id': 323,
'category': 'Decoration Blocks',
0: 'Sign'
},
'wooden_door': {
'id': 324,
'category': 'Redstone',
0: 'Oak Door'
},
'bucket': {
'id': 325,
'category': 'Miscellaneous',
0: 'Bucket'
},
'water_bucket': {
'id': 326,
'category': 'Miscellaneous',
0: 'Water Bucket'
},
'lava_bucket': {
'id': 327,
'category': 'Miscellaneous',
0: 'Lava Bucket'
},
'minecart': {
'id': 328,
'category': 'Transportation',
0: 'Minecart'
},
'saddle': {
'id': 329,
'category': 'Transportation',
0: 'Saddle'
},
'iron_door': {
'id': 330,
'category': 'Redstone',
0: 'Iron Door'
},
'redstone': {
'id': 331,
'category': 'Redstone',
0: 'Redstone'
},
'snowball': {
'id': 332,
'category': 'Miscellaneous',
0: 'Snowball'
},
'boat': {
'id': 333,
'category': 'Transportation',
0: 'Oak Boat'
},
'leather': {
'id': 334,
'category': 'Materials',
0: 'Leather'
},
'milk_bucket': {
'id': 335,
'category': 'Miscellaneous',
0: 'Milk'
},
'brick': {
'id': 336,
'category': 'Materials',
0: 'Brick'
},
'clay_ball': {
'id': 337,
'category': 'Materials',
0: 'Clay'
},
'reeds': {
'id': 338,
'category': 'Materials',
0: 'Sugar Canes'
},
'paper': {
'id': 339,
'category': 'Miscellaneous',
0: 'Paper'
},
'book': {
'id': 340,
'category': 'Miscellaneous',
0: 'Book'
},
'slime_ball': {
'id': 341,
'category': 'Miscellaneous',
0: 'Slimeball'
},
'chest_minecart': {
'id': 342,
'category': 'Transportation',
0: 'Minecart with Chest'
},
'furnace_minecart': {
'id': 343,
'category': 'Transportation',
0: 'Minecart with Furnace'
},
'egg': {
'id': 334,
'category': 'Materials',
0: 'Egg'
},
'compass': {
'id': 345,
'category': 'Tools',
0: 'Compass'
},
'fishing_rod': {
'id': 346,
'category': 'Tools',
'name': 'Fishing Rod',
'uses': 65
},
'clock': {
'id': 347,
'category': 'Tools',
0: 'Clock'
},
'glowstone_dust': {
'id': 348,
'category': 'Materials',
0: 'Glowstone Dust'
},
'fish': {
'id': 349,
'category': 'Foodstuffs',
0: 'Raw Fish',
1: 'Raw Salmon',
2: 'Clownfish',
3: 'Pufferfish'
},
'cooked_fish': {
'id': 350,
'category': 'Foodstuffs',
0: 'Cooked Fish',
1: 'Cooked Salmon'
},
'dye': {
'id': 351,
'category': 'Materials',
0: 'Ink Sac',
1: 'Rose Red',
2: 'Cactus Green',
3: 'Cocoa Beans',
4: 'Lapis Lazuli',
5: 'Purple Dye',
6: 'Cyan Dye',
7: 'Light Gray Dye',
8: 'Gray Dye',
9: 'Pink Dye',
10: 'Lime Dye',
11: 'Dandelion Yellow',
12: 'Light Blue Dye',
13: 'Magenta Dye',
14: 'Orange Dye',
15: 'Bone Meal'
},
'bone': {
'id': 352,
'category': 'Miscellaneous',
0: 'Bone'
},
'sugar': {
'id': 353,
'category': 'Materials',
0: 'Sugar'
},
'cake': {
'id': 354,
'category': 'Foodstuffs',
0: 'Cake'
},
'bed': {
'id': 355,
'category': 'Decoration Blocks',
0: 'Bed'
},
'repeater': {
'id': 356,
'category': 'Redstone',
0: 'Redstone Repeater'
},
'cookie': {
'id': 357,
'category': 'Foodstuffs',
0: 'Cookie'
},
# No item 358?
'shears': {
'id': 359,
'category': 'Tools',
'name': 'Shears',
'uses': 238
},
'melon': {
'id': 360,
'category': 'Foodstuffs',
0: 'Melon'
},
'pumpkin_seeds': {
'id': 361,
'category': 'Materials',
0: 'Pumpkin Seeds'
},
'melon_seeds': {
'id': 362,
'category': 'Materials',
0: 'Melon Seeds'
},
'beef': {
'id': 363,
'category': 'Foodstuffs',
0: 'Raw Beef'
},
'cooked_beef': {
'id': 364,
'category': 'Foodstuffs',
0: 'Steak'
},
'chicken': {
'id': 365,
'category': 'Foodstuffs',
0: 'Raw Chicken'
},
'cooked_chicken': {
'id': 366,
'category': 'Foodstuffs',
0: 'Cooked Chicken'
},
'rotten_flesh': {
'id': 367,
'category': 'Foodstuffs',
0: 'Rotten Flesh'
},
'ender_pearl': {
'id': 368,
'category': 'Miscellaneous',
0: 'Ender Pearl'
},
'blaze_rod': {
'id': 369,
'category': 'Materials',
0: 'Blaze Rod'
},
'ghast_tear': {
'id': 370,
'category': 'Brewing',
0: 'Ghast Tear'
},
'gold_nugget': {
'id': 371,
'category': 'Materials',
0: 'Gold Nugget'
},
'nether_wart': {
'id': 372,
'category': 'Materials',
0: 'Nether Wart'
},
'potion': {
'id': 373,
'category': 'Brewing',
0: 'Potion' # Potions are stored as NBT data.
},
'glass_bottle': {
'id': 374,
'category': 'Brewing',
0: 'Glass Bottle'
},
'spider_eye': {
'id': 375,
'category': 'Foodstuffs',
0: 'Spider Eye'
},
'fermented_spider_eye': {
'id': 376,
'category': 'Brewing',
0: 'Fermented Spider Eye'
},
'blaze_powder': {
'id': 377,
'category': 'Brewing',
0: 'Blaze Powder'
},
'magma_cream': {
'id': 378,
'category': 'Brewing',
0: 'Magma Cream'
},
'brewing_stand': {
'id': 379,
'category': 'Brewing',
0: 'Brewing Stand'
},
'cauldron': {
'id': 380,
'category': 'Brewing',
0: 'Cauldron'
},
'ender_eye': {
'id': 381,
'category': 'Miscellaneous',
0: 'Eye of Ender'
},
'speckled_melon': {
'id': 382,
'category': 'Brewing',
0: 'Glistering Melon'
},
'spawn_egg': {
'id': 383,
'category': 'Miscellaneous',
0: 'Spawn Egg' # Entity data is stored as NBT data.
},
'experience_bottle': {
'id': 384,
'category': 'Miscellaneous',
0: 'Bottle o\' Enchanting'
},
'fire_charge': {
'id': 385,
'category': 'Miscellaneous',
0: 'Fire Charge'
},
'writable_book': {
'id': 386,
'category': 'Miscellaneous',
0: 'Book and Quill'
},
# No item 387?
'emerald': {
'id': 388,
'category': 'Materials',
0: 'Emerald'
},
'item_frame': {
'id': 389,
'category': 'Decoration Blocks',
0: 'Item Frame'
},
'flower_pot': {
'id': 390,
'category': 'Decoration Blocks',
0: 'Flower Pot'
},
'carrot': {
'id': 391,
'category': 'Foodstuff',
0: 'Carrot'
},
'potato': {
'id': 392,
'category': 'Foodstuff',
0: 'Potato'
},
'baked_potato': {
'id': 393,
'category': 'Foodstuffs',
0: 'Baked Potato'
},
'poisonous_potato': {
'id': 394,
'category': 'Foodstuffs',
0: 'Poisonous Potato'
},
'map': {
'id': 395,
'category': 'Miscellaneous',
0: 'Empty Map'
},
'golden_carrot': {
'id': 396,
'category': 'Brewing',
0: 'Golden Carrot'
},
'skull': {
'id': 397,
'category': 'Decoration Blocks',
0: 'Skeleton Skull',
1: 'Wither Skeleton Skull',
2: 'Zombie Head',
3: 'Head',
4: 'Creeper Head',
5: 'Dragon Head'
},
'carrot_on_a_stick': {
'id': 398,
'category': 'Transportation',
'name': 'Carrot on a Stick',
'uses': 26
},
'nether_star': {
'id': 399,
'category': 'Materials',
0: 'Nether Star'
},
'pumpkin_pie': {
'id': 400,
'category': 'Foodstuffs',
0: 'Pumpkin Pie'
},
# No item 401?
'firework_charge': {
'id': 402,
'category': 'Miscellaneous',
0: 'Firework Star'
},
'enchanted_book': {
'id': 403,
'category': 'Miscellaneous', # Category changes based on enchant.
0: 'Enchanted Book' # Enchant is stored as NBT data.
},
'comparator': {
'id': 404, # If you make a HTTP joke you will be slapped.
'category': 'Redstone',
0: 'Redstone Comparator'
},
'netherbrick': {
'id': 405,
'category': 'Materials',
0: 'Nether Brick'
},
'quartz': {
'id': 406,
'category': 'Materials',
0: 'Nether Quartz'
},
'tnt_minecart': {
'id': 407,
'category': 'Transportation',
0: 'Minecart with TNT'
},
'hopper_minecart': {
'id': 408,
'category': 'Transportation',
0: 'Minecart with Hopper'
},
'prismarine_shard': {
'id': 409,
'category': 'Materials',
0: 'Prismarine Shard'
},
'prismarine_crystals': {
'id': 410,
'category': 'Materials',
0: 'Prismarine Crystals'
},
'rabbit': {
'id': 411,
'category': 'Foodstuffs',
0: 'Raw Rabbit'
},
'cooked_rabbit': {
'id': 412,
'category': 'Foodstuffs',
0: 'Cooked Rabbit'
},
'rabbit_stew': {
'id': 413,
'category': 'Foodstuffs',
0: 'Rabbit Stew'
},
'rabbit_foot': {
'id': 414,
'category': 'Brewing',
0: 'Rabbit\'s Foot'
},
'rabbit_hide': {
'id': 415,
'category': 'Materials',
0: 'Rabbit Hide'
},
'armor_stand': {
'id': 416,
'category': 'Decoration Blocks',
0: 'Armor Stand'
},
'iron_horse_armor': {
'id': 417,
'category': 'Miscellaneous',
0: 'Iron Horse Armor'
},
'golden_horse_armor': {
'id': 418,
'category': 'Miscellaneous',
0: 'Gold Horse Armor'
},
'diamond_horse_armor': {
'id': 419,
'category': 'Miscellaneous',
0: 'Diamond Horse Armor'
},
'lead': {
'id': 420,
'category': 'Tools',
0: 'Lead'
},
'name_tag': {
'id': 421,
'category': 'Tools',
0: 'Name Tag'
},
# No item 422?
'mutton': {
'id': 423,
'category': 'Foodstuffs',
0: 'Raw Mutton'
},
'cooked_mutton': {
'id': 424,
'category': 'Foodstuffs',
0: 'Cooked Mutton'
},
'banner': {
'id': 425,
'category': 'Decoration Blocks',
0: 'Black Banner', # Colours are in reverse order...?
1: 'Red Banner',
2: 'Green Banner',
3: 'Brown Banner',
4: 'Blue Banner',
5: 'Purple Banner',
6: 'Cyan Banner',
7: 'Light Gray Banner',
8: 'Gray Banner',
9: 'Pink Banner',
10: 'Lime Banner',
11: 'Yellow Banner',
12: 'Light Blue Banner',
13: 'Magenta Banner',
14: 'Orange Banner',
15: 'White Banner'
},
'end_crystal': {
'id': 426,
'category': 'Decoration Blocks',
0: 'End Crystal'
},
'spruce_door': {
'id': 427,
'category': 'Redstone',
0: 'Spruce Door'
},
'birch_door': {
'id': 428,
'category': 'Redstone',
0: 'Birch Door'
},
'jungle_door': {
'id': 429,
'category': 'Redstone',
0: 'Jungle Door'
},
'acacia_door': {
'id': 430,
'category': 'Redstone',
0: 'Acacia Door'
},
'dark_oak_door': {
'id': 431,
'category': 'Redstone',
0: 'Dark Oak Door'
},
'chorus_fruit': {
'id': 432,
'category': 'Materials',
0: 'Chorus Fruit'
},
'chorus_fruit_popped': {
'id': 433,
'category': 'Materials',
0: 'Popped Chorus Fruit'
},
'beetroot': {
'id': 434,
'category': 'Foodstuffs',
0: 'Beetroot'
},
'beetroot_seeds': {
'id': 435,
'category': 'Materials',
0: 'Beetroot Seeds'
},
'beetroot_soup': {
'id': 436,
'category': 'Foodstuffs',
0: 'Beetroot Soup'
},
'dragon_breath': {
'id': 437,
'category': 'Brewing',
0: 'Dragon\'s Breath'
},
'splash_potion': {
'id': 438,
'category': 'Brewing',
0: 'Splash Potion' # Potion is stored as NBT data.
},
'spectral_arrow': {
'id': 439,
'category': 'Combat',
0: 'Spectral Arrow'
},
'tipped_arrow': {
'id': 440,
'category': 'Combat',
0: 'Tipped Arrow' # Arrow type is stored as NBT data.
},
'lingering_potion': {
'id': 441,
'category': 'Brewing',
0: 'Lingering Potion' # Potion is stored as NBT data.
},
'shield': {
'id': 442,
'category': 'Combat',
'name': 'Shield',
'uses': 337
},
'elytra': {
'id': 443,
'category': 'Transportation',
'name': 'Elytra',
'uses': 431
},
'spruce_boat': {
'id': 444,
'category': 'Transportation',
0: 'Spruce Boat'
},
'birch_boat': {
'id': 445,
'category': 'Transportation',
0: 'Birch Boat'
},
'jungle_boat': {
'id': 446,
'category': 'Transportation',
0: 'Jungle Boat'
},
'acacia_boat': {
'id': 447,
'category': 'Transportation',
0: 'Acacia Boat'
},
'dark_oak_boat': {
'id': 448,
'category': 'Transportation',
0: 'Dark Oak Boat'
},
# Missing item...
# ...
# Start of 2256 block.
'record_13': {
'id': 2256,
'category': 'Miscellaneous',
0: 'Music Disc'
},
'record_cat': {
'id': 2257,
'category': 'Miscellaneous',
0: 'Music Disc'
},
'record_blocks': {
'id': 2258,
'category': 'Miscellaneous',
0: 'Music Disc'
},
'record_chirp': {
'id': 2259,
'category': 'Miscellaneous',
0: 'Music Disc'
},
'record_far': {
'id': 2260,
'category': 'Miscellaneous',
0: 'Music Disc'
},
'record_mall': {
'id': 2261,
'category': 'Miscellaneous',
0: 'Music Disc'
},
'record_mellohi': {
'id': 2262,
'category': 'Miscellaneous',
0: 'Music Disc'
},
'record_stal': {
'id': 2263,
'category': 'Miscellaneous',
0: 'Music Disc'
},
'record_strad': {
'id': 2264,
'category': 'Miscellaneous',
0: 'Music Disc'
},
'record_ward': {
'id': 2265,
'category': 'Miscellaneous',
0: 'Music Disc'
},
'record_11': {
'id': 2266,
'category': 'Miscellaneous',
0: 'Music Disc'
},
'record_wait': {
'id': 2267,
'category': 'Miscellaneous',
0: 'Music Disc'
}
}
}
enchantments = {
'minecraft': {
'__VERSION__': 1.10,
'__LANGUAGE__': 'en_US',
# Begin Armour Block.
'protection': {
'id': 0,
'name': 'Protection'
},
'fire_protection': {
'id': 1,
'name': 'Fire Protection'
},
'feather_falling': {
'id': 2,
'name': 'Feather Falling'
},
'blast_protection': {
'id': 3,
'name': 'Blast Protection'
},
'projectile_protection': {
'id': 4,
'name': 'Projectile Protection'
},
'respiration': {
'id': 5,
'name': 'Respiration'
},
'aqua_affinity': {
'id': 6,
'name': 'Aqua Affinity'
},
'thorns': {
'id': 7,
'name': 'Thorns'
},
'depth_strider': {
'id': 8,
'name': 'Depth Strider'
},
'frost_walker': {
'id': 9,
'name': 'Frost Walker'
},
# End Armour Block.
# Begin Sword Block.
'sharpness': {
'id': 16,
'name': 'Sharpness'
},
'smite': {
'id': 17,
'name': 'Smite'
},
'bane_of_arthropods': {
'id': 18,
'name': 'Bane of Arthropods'
},
'knockback': {
'id': 19,
'name': 'Knockback'
},
'fire_aspect': {
'id': 20,
'name': 'Fire Aspect'
},
'looting': {
'id': 21,
'name': 'Looting'
},
# End Sword Block.
# Begin Tools Block.
'efficiency': {
'id': 32,
'name': 'Efficiency'
},
'silk_touch': {
'id': 33,
'name': 'Silk Touch'
},
'unbreaking': {
'id': 34,
'name': 'Unbreaking'
},
'fortune': {
'id': 35,
'name': 'Fortune'
},
# End Tools Block.
# Begin Bows Block.
'power': {
'id': 48,
'name': 'Power'
},
'punch': {
'id': 49,
'name': 'Punch'
},
'flame': {
'id': 50,
'name': 'Flame'
},
'infinity': {
'id': 51,
'name': 'Flame'
},
# End Bows Block.
# Begin Fishing Rods Block.
'luck_of_the_sea': {
'id': 61,
'name': 'Luck of the Sea'
},
'lure': {
'id': 62,
'name': 'Lure'
},
# End Fishing Rods Block.
# Begin Misc Block.
'mending': {
'id': 70,
'name': 'Mending'
}
# End Misc Block.
}
}
# Roman Numeral Conversion
# Inspired by: https://stackoverflow.com/a/28777781
romanNumerals = (
(1000, 'M'),
(900, 'CM'),
(500, 'D'),
(400, 'CD'),
(100, 'C'),
(90, 'XC'),
(50, 'L'),
(40, 'XL'),
(10, 'X'),
(9, 'IX'),
(5, 'V'),
(4, 'IV'),
(1, 'I')
)
def intToRoman(number):
romanString = ''
for romanTuple in romanNumerals:
div, number = divmod(number, romanTuple[0])
romanString += romanTuple[1] * div
return romanString
def lookupItem(item, damage=0):
mod, item = item.split(':')
result = [None, None, None, None]
if mod in items and item in items[mod]:
if damage in items[mod][item]:
result[0] = items[mod][item][damage]
elif 'name' in items[mod][item]:
result[0] = items[mod][item]['name']
else:
result[0] = '[Unknown Name]'
if 'uses' in items[mod][item]:
result[1] = '{:.1%}'.format((items[mod][item]['uses'] - damage) / float(items[mod][item]['uses']))
if 'armor' in items[mod][item]:
result[2] = items[mod][item]['armor']
if 'toughness' in items[mod][item]:
result[3] = items[mod][item]['toughness']
else:
result[0] = '[Item Not Found]'
return result
def lookupNumericItem(itemNumeric, damage=0):
print('WARNING: Item numeric IDs are deprecated. Please use text IDs.')
result = [None, None, None, None]
for mod in items.values():
for item in mod.values():
if type(item) is dict and item['id'] == itemNumeric:
if damage in item:
result[0] = item[damage]
elif 'name' in item:
result[0] = item['name']
else:
result[0] = '[Unknown Name]'
if 'uses' in item:
result[1] = '{:.1%}'.format((item['uses'] - damage) / float(item['uses']))
if 'armor' in item:
result[2] = item['armor']
if 'toughness' in item:
result[3] = item['toughness']
break
if not result[0]:
result[0] = '[Item Not Found]'
return result
def lookupEnchant(enchant, level=None):
mod, enchant = enchant.split(':')
result = [None, None]
if mod in enchantments and enchant in enchantments[mod]:
if 'name' in enchantments[mod][enchant]:
result[0] = enchantments[mod][enchant]['name']
else:
result[0] = '[Unknown Name]'
else:
result[0] = '[Enchantment Not Found]'
if level:
result[1] = intToRoman(level)
return result
def lookupNumericEnchant(enchantNumeric, level=None):
result = [None, None]
for mod in enchantments.values():
for enchant in mod.values():
if type(enchant) is dict and enchant['id'] == enchantNumeric:
if 'name' in enchant:
result[0] = enchant['name']
else:
result[0] = '[Unknown Name]'
break
if not result[0]:
result[0] = '[Enchantment Not Found]'
if level:
result[1] = intToRoman(level)
return result
| gpl-3.0 | 3,909,941,655,184,969,700 | 18.160235 | 101 | 0.520557 | false |
freedombox/debian-bts-to-github-sync | setup.py | 1 | 1071 | #!/usr/bin/env python
from setuptools import setup
__version__ = '0.0.1'
CLASSIFIERS = map(str.strip,
"""Environment :: Console
License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)
Natural Language :: English
Operating System :: POSIX :: Linux
Programming Language :: Python
Programming Language :: Python :: 2.7
""".splitlines())
entry_points = {
'console_scripts': [
'bts_to_github_sync = bts_to_github.main:main',
]
}
setup(
name="debian_bts_to_github_sync",
version=__version__,
author="Federico Ceratto",
author_email="[email protected]",
description="Debian BTS to GitHub Issue sync",
license="AGPLv3+",
url="https://github.com/FedericoCeratto/desktop-security-assistant",
long_description="",
classifiers=CLASSIFIERS,
keywords="",
install_requires=[
'setproctitle>=1.0.1',
],
packages=['bts_to_github'],
package_dir={'bts_to_github_sync': 'bts_to_github_sync'},
platforms=['Linux'],
zip_safe=False,
entry_points=entry_points,
)
| agpl-3.0 | -5,006,650,484,620,465,000 | 25.121951 | 82 | 0.660131 | false |
guglielmo/mosic2-db-delibere | project/delibere/wsgi.py | 1 | 1588 | """
WSGI config for DB delle Delibere CIPE project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from os.path import abspath, dirname
from sys import path
SITE_ROOT = dirname(dirname(abspath(__file__)))
path.append(SITE_ROOT)
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "cipe-db-delibere.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "delibere.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| bsd-3-clause | 5,049,234,892,190,381,000 | 41.918919 | 79 | 0.790932 | false |
IZSVenezie/VetEpiGIS-Tool | plugin/xitem_dialog.py | 1 | 1545 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'xitem_dialog_base.ui'
#
# Created by: PyQt5 UI code generator 5.5.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(400, 78)
self.gridLayout = QtWidgets.QGridLayout(Dialog)
self.gridLayout.setObjectName("gridLayout")
self.label = QtWidgets.QLabel(Dialog)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.lineEdit = QtWidgets.QLineEdit(Dialog)
self.lineEdit.setObjectName("lineEdit")
self.gridLayout.addWidget(self.lineEdit, 0, 1, 1, 1)
self.buttonBox = QtWidgets.QDialogButtonBox(Dialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Save)
self.buttonBox.setObjectName("buttonBox")
self.gridLayout.addWidget(self.buttonBox, 1, 0, 1, 2)
self.retranslateUi(Dialog)
self.buttonBox.accepted.connect(Dialog.accept)
self.buttonBox.rejected.connect(Dialog.reject)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Dialog"))
self.label.setText(_translate("Dialog", "Item:"))
| gpl-2.0 | -5,246,522,193,695,220,000 | 39.657895 | 108 | 0.698382 | false |
ToruNiina/ecell4 | setup.py | 1 | 4466 | import os
import re
import sys
import glob
import platform
import sysconfig
import subprocess
from setuptools import setup, Extension
from setuptools.command.build_ext import build_ext
from setuptools.command.test import test
from distutils.version import LooseVersion
class CMakeExtension(Extension):
def __init__(self, name, sourcedir=''):
super(CMakeExtension, self).__init__(name, sources=[])
self.sourcedir = os.path.abspath(sourcedir)
class CMakeBuild(build_ext):
user_options = [
('jobs=', 'j', 'Specify the number of build jobs at once'),
]
def initialize_options(self):
super().initialize_options()
self.jobs = None
def finalize_options(self):
super().finalize_options()
assert self.jobs is None or self.jobs.isdecimal(), 'Invalid argument for --jobs or -j'
def run(self):
try:
out = subprocess.check_output(['cmake', '--version'])
except OSError:
raise RuntimeError("CMake must be installed to build the following extensions: " +
", ".join(e.name for e in self.extensions))
cmake_minimum_version_for_windows = '3.1.0'
if platform.system() == "Windows":
cmake_version = LooseVersion(re.search(r'version\s*([\d.]+)', out.decode()).group(1))
if cmake_version < cmake_minimum_version_for_windows:
raise RuntimeError("CMake >= {} is required on Windows".format(
cmake_minimum_version_for_windows))
for ext in self.extensions:
self.build_extension(ext)
def build_extension(self, ext):
extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name)))
cmake_args = ['-DCMAKE_LIBRARY_OUTPUT_DIRECTORY=' + extdir,
'-DPYTHON_EXECUTABLE=' + sys.executable]
cfg = 'Debug' if self.debug else 'Release'
build_args = ['--config', cfg]
if self.jobs:
build_args += ['-j', self.jobs]
if platform.system() == "Windows":
cmake_args += ['-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{}={}'.format(cfg.upper(), extdir)]
if sys.maxsize > 2**32:
cmake_args += ['-A', 'x64']
build_args += ['--', '/m']
cmake_args += ['-DCMAKE_BUILD_TYPE=' + cfg]
if platform.system() == "Windows":
env = os.environ.copy()
env['CXXFLAGS'] = '{} -DVERSION_INFO=\\"{}\\" -I {}'.format(
env.get('CXXFLAGS', ''),
self.distribution.get_version(),
sysconfig.get_path('include'))
else:
env = os.environ.copy()
env['CXXFLAGS'] = '{} -DVERSION_INFO=\\"{}\\" -isystem {}'.format(
env.get('CXXFLAGS', ''),
self.distribution.get_version(),
sysconfig.get_path('include'))
if not os.path.exists(self.build_temp):
os.makedirs(self.build_temp)
subprocess.check_call(['cmake', ext.sourcedir] + cmake_args, cwd=self.build_temp, env=env)
subprocess.check_call(['cmake', '--build', '.'] + build_args, cwd=self.build_temp)
class CustomTestCommand(test):
def run(self):
super().run()
build_py = self.get_finalized_command('build_ext')
if platform.system() == "Windows":
subprocess.check_call(['ctest', '--output-on-failure', '--build-config', 'Release'], cwd=build_py.build_temp)
else:
subprocess.check_call(['ctest', '--output-on-failure'], cwd=build_py.build_temp)
DESCRIPTION = (
"A software platform for modeling, simulation and analysis of complex, "
"heterogeneous and multi-scale systems like the cell. E-Cell has "
"multi-algorithm, multi-timescale and multi-spatial-representation as "
"its central feature."
)
LONG_DESCRIPTION = open("README.md").read()
setup(
name='ecell4_base',
version = '2.1.0b1',
license = "the GNU General Public License v2",
author = "Kazunari Kaizu",
author_email = "[email protected]",
url = "https://github.com/ecell/ecell4_base",
description = DESCRIPTION,
long_description = LONG_DESCRIPTION,
long_description_content_type='text/markdown',
data_files = [('ecell4-licenses', glob.glob('licenses/*'))],
ext_modules=[CMakeExtension('ecell4_base')],
cmdclass=dict(build_ext=CMakeBuild, test=CustomTestCommand),
zip_safe=False,
)
| gpl-3.0 | -863,269,065,925,193,500 | 35.909091 | 121 | 0.595387 | false |
digitalfox/yokadi | yokadi/tests/tests.py | 1 | 1350 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""
Yokadi unit tests
@author: Aurélien Gâteau <[email protected]>
@author: Sébastien Renard <[email protected]>
@license: GPL v3 or later
"""
import unittest
import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
from yokadi.core import db
try:
import icalendar
hasIcalendar = True
except ImportError:
hasIcalendar = False
print "icalendar is not installed, some tests won't be run"
from parseutilstestcase import ParseUtilsTestCase
from yokadioptionparsertestcase import YokadiOptionParserTestCase
from ydateutilstestcase import YDateUtilsTestCase
from dbutilstestcase import DbUtilsTestCase
from projecttestcase import ProjectTestCase
from completerstestcase import CompletersTestCase
from tasktestcase import TaskTestCase
from bugtestcase import BugTestCase
from aliastestcase import AliasTestCase
from textlistrenderertestcase import TextListRendererTestCase
if hasIcalendar:
from icaltestcase import IcalTestCase
from keywordtestcase import KeywordTestCase
from cryptotestcase import CryptoTestCase
from tuitestcase import TuiTestCase
def main():
db.connectDatabase("", memoryDatabase=True)
db.setDefaultConfig()
unittest.main()
if __name__ == "__main__":
main()
# vi: ts=4 sw=4 et
| gpl-3.0 | -6,590,374,414,460,583,000 | 25.94 | 81 | 0.791388 | false |
sillygod/my-travel-in-learning-python | ufile_transfer/updater.py | 1 | 4349 | import re
import io
import shutil
import os
import typing
import MySQLdb
from source_map import SourceMap
class FileUpdater:
"""read origin fie and update it inplace.
we give it the content and target we want to replace
"""
def __init__(self, filename, source_map:SourceMap=None, gcs_host=None, commit=True):
self._source_map = source_map
self._file = io.StringIO() if commit else None
self._filename = filename
# write file to the back file
self._write_fname = self._filename + ".myback"
self._gcs_host = gcs_host or "http://google.host/"
self._commit = commit
# a special flag for file has changed
self._changed = False
def update_line(self, content:str, match:typing.re.Match, pos):
write_buffer = content
if match is not None:
target = match[0]
subpath = target.split("/")[-1]
new_target = self._gcs_host + subpath
new_content = content.replace(target, new_target)
self._changed = True
if self._source_map is not None:
self._source_map.file_insert(content, new_content, target, subpath, pos)
write_buffer = new_content
if self._commit:
self._file.write(write_buffer)
def _swap(self, target_one, target_two):
temp_file_name = target_one + ".tmp"
os.rename(target_one, temp_file_name)
os.rename(target_two, target_one)
os.rename(temp_file_name, target_two)
def _copy_mod(self, src, dst):
mode = os.stat(src).st_mode
os.chmod(dst, mode)
def swap_origin_file(self, target_name):
# swap the origin file name and write file name
# target <--> write_fname
#
# remember to resist the file mode
# so we need to copy file mode from self._write_fname to
# target_name(this is what we create)
if self._changed:
self._swap(target_name, self._write_fname)
self._copy_mod(self._write_fname, target_name)
def revert(self):
if os.path.exists(self._write_fname):
self._swap(self._filename, self._write_fname)
def close(self):
if self._changed and self._commit:
with open(self._write_fname, 'w') as f:
self._file.seek(0)
shutil.copyfileobj(self._file, f)
self.swap_origin_file(self._filename)
if self._file is not None:
self._file.close()
class SQLUpdater:
def __init__(self, config, source_map:SourceMap=None, conn=None, gcs_host=None, commit=True):
self._source_map = source_map
self._config = config
self._conn = conn or MySQLdb.connect(
user=self._config["username"],
password=self._config["password"],
host=self._config["host"],
port=self._config["port"],
database=self._config["db_name"])
self._cursor = self._conn.cursor()
self._gcs_host = gcs_host or "http://google.host/"
self._commit = commit
self._update_sqls = []
def generate_update_sql(self, table_name, column_name, target, conds:typing.Tuple[str, str]):
# target is the goal we want to replace
subpath = target.split("/")[-1]
new_target = self._gcs_host + subpath
sql = f"UPDATE {table_name} SET {column_name}='{new_target}' WHERE {conds[0]}={conds[1]}"
if self._source_map is not None:
revert_sql = f"UPDATE {table_name} SET {column_name}='{target}' WHERE {conds[0]}={conds[1]}"
pos = f"table: {table_name} column: {column_name} {conds[0]}:{conds[1]}"
self._source_map.db_insert(revert_sql, sql, target, subpath, pos)
self._update_sqls.append(sql)
def revert(self):
sqls = [sql for sql in self._source_map.get_revert_sqls()]
self._update_sqls = sqls
self.update()
def update(self):
# only mysql-connector-python support multi..
# NOTE: https://dev.mysql.com/downloads/connector/python/
if self._commit:
for sql in self._update_sqls:
print(sql)
self._cursor.execute(sql)
self._conn.commit()
def close(self):
self._cursor.close()
self._conn.close() | gpl-2.0 | -897,047,292,775,120,300 | 31.706767 | 104 | 0.581973 | false |
hfiguiere/abiword | tools/build_osx_release.py | 2 | 12008 | #!/usr/bin/env python
# Copyright (C) 2011 Fabiano Fidencio
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA.
from os import chdir, environ, getcwd, listdir, mkdir, path
from shutil import copy2, rmtree
from subprocess import PIPE, Popen
from sys import argv, exit
from argparse import ArgumentParser
contents_path = "abiword/AbiWord.app/Contents"
def environment_prepare():
abisource_path="/tmp/abisource"
mkdir(abisource_path, 0755)
path = getcwd()
chdir(abisource_path)
return path
def environment_clean(path):
chdir(path)
abisource_path="/tmp/abisource"
rmtree(abisource_path)
def _macports_source_get():
source = "https://distfiles.macports.org/MacPorts/MacPorts-2.0.0.tar.gz"
cmd = "curl -O %s" % source
p = Popen(cmd, shell=True)
p.wait()
def _macports_source_extract():
cmd = "tar xzvpf MacPorts-2.0.0.tar.gz"
p = Popen(cmd, shell=True)
p.wait()
def _macports_install():
current_dir = getcwd()
chdir("MacPorts-2.0.0")
cmd = "./configure --prefix=/tmp/abisource/macports \
&& make \
&& sudo make install"
p = Popen(cmd, shell=True)
p.wait()
chdir(current_dir)
def _macports_env():
macports_path = "/tmp/abisource/macports/"
envs = environ
env = "%s/bin:%s/sbin:%s" % (macports_path, macports_path, envs["PATH"])
return env
def _macports_sync():
envs = _macports_env()
cmd = "sudo port -v selfupdate"
p = Popen(cmd, shell=True, env={"PATH":envs})
p.wait()
def macports_install():
_macports_source_get()
_macports_source_extract()
_macports_install()
_macports_sync()
def dependencies_install():
envs = _macports_env()
pkgs = "cairo +quartz+no_x11 \
pango +quartz+no_x11 \
fribidi \
libgsf +no_gnome \
redland \
wv +no_x11 \
enchant \
boost"
cmd = "sudo port install %s" % pkgs
p = Popen(cmd, shell=True, env={"PATH":envs})
p.wait()
def _abiword_source_get():
cmd = "svn co http://svn.abisource.com/abiword/trunk abiword"
p = Popen(cmd, shell=True)
p.wait()
def _abiword_fix_macports_path():
cmd = "sed -i -e \
's/\\/opt\\/local/\\/tmp\\/abisource\\/macports/g' \
configure.in"
p = Popen(cmd, shell=True)
p.wait()
def _abiword_install():
envs = _macports_env()
current_dir = getcwd()
chdir("abiword")
_abiword_fix_macports_path()
cmd = "./autogen.sh \
--with-darwinports \
--enable-maintainer-mode \
--disable-static \
--enable-shared \
--enable-plugins=\"docbook epub latex openwriter openxml opml\" \
&& make && DESTDIR=`pwd` make install"
p = Popen(cmd, shell=True, env={"PATH":envs})
p.wait()
chdir(current_dir)
def abiword_install():
_abiword_source_get()
_abiword_install()
def _dep_list_get(lib):
#otool -L path
cmd = "otool -L %s " %lib
#get all .dylib from otool -L
cmd += "| grep macports | sed -e 's/.dylib.*$/.dylib/'"
#remove white spaces before and after the lib path/name
cmd += "| sed 's/^[ \t]*//;s/[ \t]*$//'"
p = Popen(cmd, shell=True, stdout=PIPE)
p.wait()
stdout = p.communicate()
return stdout[0].split('\n')[:-1]
def _rdeps_get():
libabiword = ""
libabiword_deps = []
for content in listdir(contents_path + "/Frameworks"):
if content.endswith(".dylib"):
libabiword = contents_path + "/Frameworks/" + content
libabiword_deps = _dep_list_get(libabiword)
break
plugins = []
plugins_deps = []
for content in listdir(contents_path + "/PlugIns"):
if content.endswith(".so"):
plugin = contents_path + "/PlugIns/" + content
plugins.append(plugin)
plugins_deps = _dep_list_get(plugin)
abiword = contents_path + "/MacOS/AbiWord"
abiword_deps = _dep_list_get(abiword)
rdeps = []
for lib in libabiword_deps:
rdeps.append(lib)
for lib in plugins_deps:
if lib not in rdeps:
rdeps.append(lib)
for lib in abiword_deps:
if lib not in rdeps:
rdeps.append(lib)
rdeps_deps = []
for lib in rdeps:
rdeps_deps += _dep_list_get(lib)
for lib in rdeps_deps:
if lib not in rdeps_deps:
rdeps.append(lib)
return rdeps, libabiword, abiword, plugins
def _rdeps_copy(rdeps):
rdeps_path = contents_path
mkdir(rdeps_path, 0755)
n_rdeps = []
for dep in rdeps:
dep_path, dep_name = path.split(dep)
copy2(dep, rdeps_path)
d = "%s/%s" % (rdeps_path, dep_name)
cmd = "chmod 755 " + d
n_rdeps.append(d)
p = Popen(cmd, shell=True)
p.wait()
return n_rdeps
def _fix(lib, new):
dep_list = _dep_list_get(lib)
for d in dep_list:
d_path, d_name = path.split(d)
n = "@executable_path/../Frameworks/rdeps/" + d_name
cmd = "install_name_tool -change %s %s %s" % (d, n, lib)
p = Popen(cmd, shell=True)
p.wait()
lib_path, lib_name = path.split(lib)
cmd = "install_name_tool -id %s %s" % (new, lib)
p = Popen(cmd, shell=True)
p.wait()
def _rdeps_fix(rdeps):
for r in rdeps:
file_path, file_name = path.split(r)
new = "@executable_path/../Frameworks/rdeps/" + file_name
_fix(r, new)
def _libabiword_fix(libabiword):
file_path, file_name = path.split(libabiword)
new = "@executable_path/../Frameworks/" + file_name
_fix(libabiword, new)
def _abiword_fix(abiword):
file_path, file_name = path.split(abiword)
new = "@executable_path/" + file_name
_fix(abiword, new)
def _plugins_fix(plugins):
for p in plugins:
file_path, file_name = path.split(p)
new = "@executable_path/../PlugIns/" + file_name
_fix(p, new)
def do_app():
rdeps, libabiword, abiword, plugins = _rdeps_get()
n_rdeps = _rdeps_copy(rdeps)
_rdeps_fix(n_rdeps)
_libabiword_fix(libabiword)
_abiword_fix(abiword)
_plugins_fix(plugins)
def do_dmg():
mkdir("dmg", 0755)
cmd = "cp -a abiword/AbiWord.app dmg/"
p = Popen(cmd, shell = True)
p.wait()
cmd = "ln -s /Applications dmg/"
p = Popen(cmd, shell=True)
p.wait()
cmd = "hdiutil create \
-srcfolder \"dmg\" \
-volname \"AbiWord\" \
-fs HFS+ \
-fsargs \"-c c=64,a=16,e=16\" \
-format UDRW \"AbiWord.dmg\""
p = Popen(cmd, shell=True)
p.wait()
rmtree("dmg")
copy2("AbiWord.dmg", environ["HOME"] + "/Desktop/")
if __name__ == "__main__":
parser = ArgumentParser(description="Automated dmg generator")
parser.add_argument("--macports_path",
action="store",
dest="macports_path",
help="This option will use your current macports' \
installation from MACPORTS_PATH.\n\
ATTENTION: Without this option, macports will \
be downloaded and installed in: \
/tmp/abisource/macports")
parser.add_argument("--abisource_path",
action="store",
dest="abi_path",
default=False,
help="This option will consider that you have \
AbiWord's sources in your computer, located at \
ABISOURCE_PATH and want to build it and NOT a \
specific version from our SVN.")
parser.add_argument("--abisource_revision",
action="store",
dest="abi_rev",
help="This option will get a specific revision from \
AbiWord's SVN. \
ATTETION: If this option isn't passed, SVN's \
trunk will be used.")
parser.add_argument("--abiword_version",
action="store",
dest="abi_version",
help="This option will get a specific version from \
AbiWord's SVN. \
ATTETION: If this option isn't passed, SVN's \
trunk will be used.")
parser.add_argument("--no_deps",
action="store_true",
dest="no_deps",
default=False,
help="This option won't install AbiWord's \
dependencies in your computer. So, is YOUR \
WORK install all needed dependencies. Of \
course, you'll need to install macports before.")
parser.add_argument("--start_from_build",
action="store_true",
dest="start_from_build",
default=False,
help="This option will consider that you have \
macports and all AbiWord's dependencies \
installed. \
ATTENTION: This options will build AbiWord and \
create a dmg file. So, is REALLY NECESSARY \
that you pass --abisource_path option.")
parser.add_argument("--start_from_app",
action="store",
dest="start_from_app",
help="This option will use a generated .app file \
to fix all linkage and put all nedded libs \
into .app in a specific folder. After that a \
dmg file will be created(Don't put '/' at the end of .app package path). \
ATTENTION: Is REALLY NECESSARY that you pass \
--macports_path option. Eg: python build_script.py --start_from_app /Users/abi/Abiword.app")
parser.add_argument("--start_from_linkage_fixed",
action="store",
dest="start_from_linkage_fixed",
help="This option will use a generated .app file \
with linkage working properly to create a \
.dmg file.\
ATTENTION: Is REALLY NECESSARY that you pass \
--macports_path option.")
if len(argv) < 2:
parser.print_help()
exit()
else:
args = parser.parse_args()
# print args
current_dir = getcwd()
def cleanAndPrint():
environment_clean(current_dir)
print "****************************************************"
print "* AbiWord.dmg was created in you ~/Desktop. Enjoy! *"
print "****************************************************"
dict_args=vars(args)
print dict_args
if dict_args['start_from_app'] != None:
contents_path = dict_args['start_from_app'] + "/Contents"
do_app()
do_dmg()
print_text()
exit()
else:
environment_prepare()
macports_install()
dependencies_install()
abiword_install()
do_app()
do_dmg()
cleanAndPrint()
| gpl-2.0 | 9,047,993,009,526,091,000 | 31.193029 | 122 | 0.541889 | false |
Valchris/IEEEXtreme_WorkingAsIntended | 2012/AA_Alex.py | 1 | 1107 | __author__ = 'alexander'
import sys
initial_bunnies = long(sys.stdin.readline())
bunnies = dict()
bunnies['adults'] = initial_bunnies
bunnies['babies'] = long(0)
bunnies['juveniles'] = long(0)
bunnies['juveniles2'] = long(0)
for i in range(0,365,15):
if i % 2 == 0:
bunnies['babies'] = long(bunnies['babies'])*0.75 # Death to flu
bunnies['juveniles'] = long(bunnies['juveniles']*0.75) # Death to flu
bunnies['juveniles2'] = long(bunnies['juveniles2']*0.75) # Death to flu
bunnies['adults'] = long(bunnies['adults']*0.75) # Death to flu
bunnies['adults'] += long(bunnies['juveniles2']*0.70) # Forest migration
if i == 0:
continue
bunnies['juveniles2'] = bunnies['juveniles'] # Juveniles growing
bunnies['juveniles'] = long(bunnies['babies']) # Babies growing
bunnies['babies'] = long(bunnies['adults']*0.90) # Babies being born / 10% of babies die at birth
if bunnies['adults'] == 0 and bunnies['babies'] == 0 and bunnies['juveniles'] == 0:
break
print long(bunnies['adults'] + bunnies['babies'] + bunnies['juveniles']) | mit | -221,296,799,781,316,100 | 33.625 | 101 | 0.633243 | false |
lesina/Hack70 | env/bin/painter.py | 1 | 2129 | #!/home/oleg/Web/Hack70/env/bin/python3
#
# The Python Imaging Library
# $Id$
#
# this demo script illustrates pasting into an already displayed
# photoimage. note that the current version of Tk updates the whole
# image every time we paste, so to get decent performance, we split
# the image into a set of tiles.
#
try:
from tkinter import Tk, Canvas, NW
except ImportError:
from Tkinter import Tk, Canvas, NW
from PIL import Image, ImageTk
import sys
#
# painter widget
class PaintCanvas(Canvas):
def __init__(self, master, image):
Canvas.__init__(self, master, width=image.size[0], height=image.size[1])
# fill the canvas
self.tile = {}
self.tilesize = tilesize = 32
xsize, ysize = image.size
for x in range(0, xsize, tilesize):
for y in range(0, ysize, tilesize):
box = x, y, min(xsize, x+tilesize), min(ysize, y+tilesize)
tile = ImageTk.PhotoImage(image.crop(box))
self.create_image(x, y, image=tile, anchor=NW)
self.tile[(x, y)] = box, tile
self.image = image
self.bind("<B1-Motion>", self.paint)
def paint(self, event):
xy = event.x - 10, event.y - 10, event.x + 10, event.y + 10
im = self.image.crop(xy)
# process the image in some fashion
im = im.convert("L")
self.image.paste(im, xy)
self.repair(xy)
def repair(self, box):
# update canvas
dx = box[0] % self.tilesize
dy = box[1] % self.tilesize
for x in range(box[0]-dx, box[2]+1, self.tilesize):
for y in range(box[1]-dy, box[3]+1, self.tilesize):
try:
xy, tile = self.tile[(x, y)]
tile.paste(self.image.crop(xy))
except KeyError:
pass # outside the image
self.update_idletasks()
#
# main
if len(sys.argv) != 2:
print("Usage: painter file")
sys.exit(1)
root = Tk()
im = Image.open(sys.argv[1])
if im.mode != "RGB":
im = im.convert("RGB")
PaintCanvas(root, im).pack()
root.mainloop()
| gpl-3.0 | -5,233,558,321,454,570,000 | 24.963415 | 80 | 0.573978 | false |
vabs22/zulip | zerver/tests/test_settings.py | 1 | 10444 | from __future__ import absolute_import
from __future__ import print_function
import ujson
from django.http import HttpResponse
from mock import patch
from typing import Any, Dict
from zerver.lib.initial_password import initial_password
from zerver.lib.sessions import get_session_dict_user
from zerver.lib.test_classes import ZulipTestCase
from zerver.models import get_realm, get_user, UserProfile
class ChangeSettingsTest(ZulipTestCase):
def check_well_formed_change_settings_response(self, result):
# type: (Dict[str, Any]) -> None
self.assertIn("full_name", result)
# DEPRECATED, to be deleted after all uses of check_for_toggle_param
# are converted into check_for_toggle_param_patch.
def check_for_toggle_param(self, pattern, param):
# type: (str, str) -> None
self.login(self.example_email("hamlet"))
user_profile = self.example_user('hamlet')
json_result = self.client_post(pattern,
{param: ujson.dumps(True)})
self.assert_json_success(json_result)
# refetch user_profile object to correctly handle caching
user_profile = self.example_user('hamlet')
self.assertEqual(getattr(user_profile, param), True)
json_result = self.client_post(pattern,
{param: ujson.dumps(False)})
self.assert_json_success(json_result)
# refetch user_profile object to correctly handle caching
user_profile = self.example_user('hamlet')
self.assertEqual(getattr(user_profile, param), False)
# TODO: requires method consolidation, right now, there's no alternative
# for check_for_toggle_param for PATCH.
def check_for_toggle_param_patch(self, pattern, param):
# type: (str, str) -> None
self.login(self.example_email("hamlet"))
user_profile = self.example_user('hamlet')
json_result = self.client_patch(pattern,
{param: ujson.dumps(True)})
self.assert_json_success(json_result)
# refetch user_profile object to correctly handle caching
user_profile = self.example_user('hamlet')
self.assertEqual(getattr(user_profile, param), True)
json_result = self.client_patch(pattern,
{param: ujson.dumps(False)})
self.assert_json_success(json_result)
# refetch user_profile object to correctly handle caching
user_profile = self.example_user('hamlet')
self.assertEqual(getattr(user_profile, param), False)
def test_successful_change_settings(self):
# type: () -> None
"""
A call to /json/settings/change with valid parameters changes the user's
settings correctly and returns correct values.
"""
self.login(self.example_email("hamlet"))
json_result = self.client_post(
"/json/settings/change",
dict(
full_name='Foo Bar',
old_password=initial_password(self.example_email("hamlet")),
new_password='foobar1',
confirm_password='foobar1',
))
self.assert_json_success(json_result)
result = ujson.loads(json_result.content)
self.check_well_formed_change_settings_response(result)
self.assertEqual(self.example_user('hamlet').
full_name, "Foo Bar")
self.logout()
self.login(self.example_email("hamlet"), "foobar1")
user_profile = self.example_user('hamlet')
self.assertEqual(get_session_dict_user(self.client.session), user_profile.id)
def test_illegal_name_changes(self):
# type: () -> None
user = self.example_user('hamlet')
email = user.email
self.login(email)
full_name = user.full_name
with self.settings(NAME_CHANGES_DISABLED=True):
json_result = self.client_post("/json/settings/change",
dict(full_name='Foo Bar'))
# We actually fail silently here, since this only happens if
# somebody is trying to game our API, and there's no reason to
# give them the courtesy of an error reason.
self.assert_json_success(json_result)
user = self.example_user('hamlet')
self.assertEqual(user.full_name, full_name)
# Now try a too-long name
json_result = self.client_post("/json/settings/change",
dict(full_name='x' * 1000))
self.assert_json_error(json_result, 'Name too long!')
# Now try a too-short name
json_result = self.client_post("/json/settings/change",
dict(full_name='x'))
self.assert_json_error(json_result, 'Name too short!')
def test_illegal_characters_in_name_changes(self):
# type: () -> None
email = self.example_email("hamlet")
self.login(email)
# Now try a name with invalid characters
json_result = self.client_post("/json/settings/change",
dict(full_name='Opheli*'))
self.assert_json_error(json_result, 'Invalid characters in name!')
# This is basically a don't-explode test.
def test_notify_settings(self):
# type: () -> None
for notification_setting in UserProfile.notification_setting_types:
self.check_for_toggle_param_patch("/json/settings/notifications",
notification_setting)
def test_ui_settings(self):
# type: () -> None
self.check_for_toggle_param_patch("/json/settings/ui", "autoscroll_forever")
self.check_for_toggle_param_patch("/json/settings/ui", "default_desktop_notifications")
def test_toggling_boolean_user_display_settings(self):
# type: () -> None
"""Test updating each boolean setting in UserProfile property_types"""
boolean_settings = (s for s in UserProfile.property_types if UserProfile.property_types[s] is bool)
for display_setting in boolean_settings:
self.check_for_toggle_param_patch("/json/settings/display", display_setting)
def test_enter_sends_setting(self):
# type: () -> None
self.check_for_toggle_param('/json/users/me/enter-sends', "enter_sends")
def test_mismatching_passwords(self):
# type: () -> None
"""
new_password and confirm_password must match
"""
self.login(self.example_email("hamlet"))
result = self.client_post(
"/json/settings/change",
dict(
new_password="mismatched_password",
confirm_password="not_the_same",
))
self.assert_json_error(result,
"New password must match confirmation password!")
def test_wrong_old_password(self):
# type: () -> None
"""
new_password and confirm_password must match
"""
self.login(self.example_email("hamlet"))
result = self.client_post(
"/json/settings/change",
dict(
old_password='bad_password',
new_password="ignored",
confirm_password="ignored",
))
self.assert_json_error(result, "Wrong password!")
def test_changing_nothing_returns_error(self):
# type: () -> None
"""
We need to supply at least one non-empty parameter
to this API, or it should fail. (Eventually, we should
probably use a patch interface for these changes.)
"""
self.login(self.example_email("hamlet"))
result = self.client_post("/json/settings/change",
dict(old_password='ignored',))
self.assert_json_error(result, "No new data supplied")
def do_test_change_user_display_setting(self, setting_name):
# type: (str) -> None
test_changes = dict(
default_language = 'de',
emojiset = 'apple',
timezone = 'US/Mountain',
) # type: Dict[str, Any]
email = self.example_email('hamlet')
self.login(email)
test_value = test_changes.get(setting_name)
# Error if a setting in UserProfile.property_types does not have test values
if test_value is None:
raise AssertionError('No test created for %s' % (setting_name))
invalid_value = 'invalid_' + setting_name
data = {setting_name: ujson.dumps(test_value)}
result = self.client_patch("/json/settings/display", data)
self.assert_json_success(result)
user_profile = self.example_user('hamlet')
self.assertEqual(getattr(user_profile, setting_name), test_value)
# Test to make sure invalid settings are not accepted
# and saved in the db.
data = {setting_name: ujson.dumps(invalid_value)}
result = self.client_patch("/json/settings/display", data)
# the json error for multiple word setting names (ex: default_language)
# displays as 'Invalid language'. Using setting_name.split('_') to format.
self.assert_json_error(result, "Invalid %s '%s'" % (setting_name.split('_')[-1],
invalid_value))
user_profile = self.example_user('hamlet')
self.assertNotEqual(getattr(user_profile, setting_name), invalid_value)
def test_change_user_display_setting(self):
# type: () -> None
"""Test updating each non-boolean setting in UserProfile property_types"""
user_settings = (s for s in UserProfile.property_types if UserProfile.property_types[s] is not bool)
for setting in user_settings:
self.do_test_change_user_display_setting(setting)
class UserChangesTest(ZulipTestCase):
def test_update_api_key(self):
# type: () -> None
user = self.example_user('hamlet')
email = user.email
self.login(email)
old_api_key = user.api_key
result = self.client_post('/json/users/me/api_key/regenerate')
self.assert_json_success(result)
new_api_key = ujson.loads(result.content)['api_key']
self.assertNotEqual(old_api_key, new_api_key)
user = self.example_user('hamlet')
self.assertEqual(new_api_key, user.api_key)
| apache-2.0 | 5,690,486,425,606,591,000 | 41.803279 | 108 | 0.603696 | false |
RRSCDS/douban-mining | src/Python/api-oauth/oauthx.py | 1 | 2742 | # -*- coding: utf8 -*-
import urllib, urllib2
import json
# key and secret通过创建豆瓣app获得(无审核)
# http://developers.douban.com/apikey/
APIKEY = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
SECRET = 'xxxxxxxxxxxxxxxx'
CALLBACK_URL = 'http://www.douban.com'
GETTOKEN_URL = 'https://www.douban.com/service/auth2/token'
def getToken(code):
postParams = {
'client_id': APIKEY,
'client_secret': SECRET,
'redirect_uri': CALLBACK_URL,
'grant_type': 'authorization_code',
'code': code
}
# hearders可能非必要
headers = {
'Host': 'www.douban.com',
'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:34.0) Gecko/20100101 Firefox/34.0',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Connection': 'keep-alive'
}
req = urllib2.Request(
url = GETTOKEN_URL,
data = urllib.urlencode(postParams),
headers = headers
)
# Get the access token
try:
r = urllib2.urlopen(req).read()
print r
return json.loads(r)['access_token']
# Get detailed error msg if 400 bad request occurs:
except urllib2.HTTPError as e:
print 'Error: ' + e.read()
raise SystemExit(e)
# Authorization code can be obtained manually using browser,
# see http://developers.douban.com/wiki/?title=oauth2 ("获取authorization_code")
# Each code can only be used once to get an access token (?)
# Tokens are relatively long-lived - no need to get a code every time
def apiTest(user, count=1, until_id=''):
# Use old token from file if there is one, otherwise get new token
f = open('token.txt', 'a+')
tok = f.read()
if len(tok) == 0:
tok = getToken(raw_input('input code here:')) # input code manually
f.write(tok)
f.close()
print 'Current token:', tok
# Reuqest url and headers
url = 'https://api.douban.com/shuo/v2/statuses/user_timeline/'
url = url + user + '?count=%s&until_id=%s'%(count, until_id)
headers = {'Authorization': 'Bearer '+tok}
# Get data
try:
req2 = urllib2.Request(url=url, headers=headers)
resp2 = urllib2.urlopen(req2)
rj = resp2.read() # Json格式数据
print rj
r = json.loads(rj) # 转换为python列表,每条广播表示为一个词典对象
print '%s statuses loaded' % len(r)
except urllib2.HTTPError as e:
print 'Error: ' + e.read()
raise SystemExit(e)
if __name__ == "__main__":
apiTest('homeland', 5, '1605326442')
# Note that contrary to what douban api help says, until_id is NOT inclusive,
# i.e. only statuses with id < until_id will be loaded. | mit | 1,631,758,914,176,409,900 | 29.193182 | 101 | 0.615587 | false |
klmitch/appathy | tests/unit/test_actions.py | 1 | 26457 | # Copyright (C) 2012 by Kevin L. Mitchell <[email protected]>
#
# This program is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see
# <http://www.gnu.org/licenses/>.
import mock
import webob
import webob.exc
from appathy import actions
from appathy import exceptions
from appathy import response
import tests
class ActionMethodTest(tests.TestCase):
@mock.patch('appathy.types.Translators',
side_effect=['serializers', 'deserializers'])
def test_init_function(self, mock_Translators):
def function(a, b, c, d=4, e=5, f=6, *args, **kwargs):
pass
action = actions.ActionMethod(function)
self.assertEqual(action.method, function)
self.assertEqual(action.serializers, 'serializers')
self.assertEqual(action.deserializers, 'deserializers')
self.assertEqual(action.isgenerator, False)
self.assertEqual(action.argspec,
(['a', 'b', 'c', 'd', 'e', 'f'], 'args', 'kwargs',
(4, 5, 6)))
self.assertEqual(action.argidx, 0)
mock_Translators.assert_has_calls([
mock.call(function, '_wsgi_serializers'),
mock.call(function, '_wsgi_deserializers'),
])
@mock.patch('appathy.types.Translators',
side_effect=['serializers', 'deserializers'])
def test_init_generator(self, mock_Translators):
def function(a, b, c, d=4, e=5, f=6, *args, **kwargs):
yield
action = actions.ActionMethod(function)
self.assertEqual(action.method, function)
self.assertEqual(action.serializers, 'serializers')
self.assertEqual(action.deserializers, 'deserializers')
self.assertEqual(action.isgenerator, True)
self.assertEqual(action.argspec,
(['a', 'b', 'c', 'd', 'e', 'f'], 'args', 'kwargs',
(4, 5, 6)))
self.assertEqual(action.argidx, 0)
mock_Translators.assert_has_calls([
mock.call(function, '_wsgi_serializers'),
mock.call(function, '_wsgi_deserializers'),
])
@mock.patch('appathy.types.Translators',
side_effect=['serializers', 'deserializers'])
def test_init_method(self, mock_Translators):
class TestClass(object):
def method(self, a, b, c, d=4, e=5, f=6, *args, **kwargs):
pass
test = TestClass()
action = actions.ActionMethod(test.method)
self.assertEqual(action.method, test.method)
self.assertEqual(action.serializers, 'serializers')
self.assertEqual(action.deserializers, 'deserializers')
self.assertEqual(action.isgenerator, False)
self.assertEqual(action.argspec,
(['self', 'a', 'b', 'c', 'd', 'e', 'f'], 'args',
'kwargs', (4, 5, 6)))
self.assertEqual(action.argidx, 1)
mock_Translators.assert_has_calls([
mock.call(test.method, '_wsgi_serializers'),
mock.call(test.method, '_wsgi_deserializers'),
])
@mock.patch('appathy.types.Translators')
def test_getattr(self, _mock_Translators):
def function():
pass
function.test_attr = 'a test'
action = actions.ActionMethod(function)
self.assertEqual(action.test_attr, 'a test')
with self.assertRaises(AttributeError):
dummy = action.nonexistant
@mock.patch('appathy.types.Translators')
def test_call_function_nokwarg(self, _mock_Translators):
check = dict(called=False)
def function(a, b, c):
self.assertEqual(a, 1)
self.assertEqual(b, 2)
self.assertEqual(c, 3)
check['called'] = True
action = actions.ActionMethod(function)
# Verifying that this doesn't raise a TypeError, either...
action(1, b=2, c=3, d=4, e=5, f=6)
self.assertTrue(check['called'])
@mock.patch('appathy.types.Translators')
def test_call_function_withkwarg(self, _mock_Translators):
check = dict(called=False)
def function(a, b, c, **kwargs):
self.assertEqual(a, 1)
self.assertEqual(b, 2)
self.assertEqual(c, 3)
self.assertEqual(kwargs, dict(d=4, e=5, f=6))
check['called'] = True
action = actions.ActionMethod(function)
# Verifying that this doesn't raise a TypeError, either...
action(1, b=2, c=3, d=4, e=5, f=6)
self.assertTrue(check['called'])
@mock.patch('appathy.types.Translators')
def test_call_method_nokwarg(self, _mock_Translators):
check = dict(called=False)
class Test(object):
def method(inst, a, b, c):
self.assertIsInstance(inst, Test)
self.assertEqual(a, 1)
self.assertEqual(b, 2)
self.assertEqual(c, 3)
check['called'] = True
test = Test()
action = actions.ActionMethod(test.method)
# Verifying that this doesn't raise a TypeError, either...
action(1, b=2, c=3, d=4, e=5, f=6)
self.assertTrue(check['called'])
@mock.patch('appathy.types.Translators')
def test_call_function_withkwarg(self, _mock_Translators):
check = dict(called=False)
class Test(object):
def method(inst, a, b, c, **kwargs):
self.assertIsInstance(inst, Test)
self.assertEqual(a, 1)
self.assertEqual(b, 2)
self.assertEqual(c, 3)
self.assertEqual(kwargs, dict(d=4, e=5, f=6))
check['called'] = True
test = Test()
action = actions.ActionMethod(test.method)
# Verifying that this doesn't raise a TypeError, either...
action(1, b=2, c=3, d=4, e=5, f=6)
self.assertTrue(check['called'])
class ActionDescriptorTest(tests.TestCase):
@mock.patch.object(actions, 'ActionMethod',
side_effect=['meth', 'ext1', 'ext2', 'ext3'])
def test_init(self, mock_ActionMethod):
desc = actions.ActionDescriptor('method', ['extension1', 'extension2',
'extension3'],
'response_type')
self.assertEqual(desc.method, 'meth')
self.assertEqual(desc.extensions, ['ext1', 'ext2', 'ext3'])
self.assertEqual(desc.resp_type, 'response_type')
mock_ActionMethod.assert_has_calls([
mock.call('method'),
mock.call('extension1'),
mock.call('extension2'),
mock.call('extension3'),
])
@mock.patch.object(actions, 'ActionMethod',
return_value=mock.Mock(return_value='response'))
@mock.patch.object(actions.ActionDescriptor, 'wrap', return_value='resp')
def test_call(self, mock_wrap, mock_ActionMethod):
desc = actions.ActionDescriptor('method', [], 'resp_type')
result = desc('req', dict(a=1, b=2, c=3))
mock_ActionMethod.return_value.assert_called_once_with('req', a=1,
b=2, c=3)
mock_wrap.assert_called_once_with('req', 'response')
self.assertEqual(result, 'resp')
@mock.patch.object(actions, 'ActionMethod')
def test_deserialize_request_nobody(self, mock_ActionMethod,
return_value=mock.Mock()):
desc = actions.ActionDescriptor('method', [], 'resp_type')
result = desc.deserialize_request(mock.Mock(content_length=0))
self.assertEqual(result, None)
self.assertFalse(mock_ActionMethod.return_value.deserializers.called)
@mock.patch.object(actions, 'ActionMethod', return_value=mock.Mock(**{
'deserializers.side_effect': KeyError,
}))
def test_deserialize_request_nodeserializer(self, mock_ActionMethod):
request = mock.Mock(content_length=50, content_type='text/plain')
desc = actions.ActionDescriptor('method', [], 'resp_type')
with self.assertRaises(webob.exc.HTTPUnsupportedMediaType):
result = desc.deserialize_request(request)
mock_ActionMethod.return_value.deserializers.assert_called_once_with(
'text/plain')
@mock.patch.object(actions, 'ActionMethod')
def test_deserialize_request_noattacher(self, mock_ActionMethod):
deserializer = mock.Mock(spec=[], return_value='body')
method = mock.Mock(**{'deserializers.return_value': deserializer})
extensions = [mock.Mock(), mock.Mock()]
mock_ActionMethod.side_effect = [method] + extensions
request = mock.Mock(content_length=50,
content_type='text/plain',
body='this is the body')
desc = actions.ActionDescriptor('method', ['ext1', 'ext2'], 'resp')
result = desc.deserialize_request(request)
self.assertFalse(hasattr(deserializer, 'attach'))
method.deserializers.assert_called_once_with('text/plain')
self.assertFalse(extensions[0].deserializers.called)
self.assertFalse(extensions[1].deserializers.called)
deserializer.assert_called_once_with('this is the body')
self.assertEqual(result, 'body')
@mock.patch.object(actions, 'ActionMethod')
def test_deserialize_request_withattacher(self, mock_ActionMethod):
deserializer = mock.Mock(return_value='body')
method = mock.Mock(**{'deserializers.return_value': deserializer})
extensions = [
mock.Mock(**{'deserializers.return_value': 'extension1'}),
mock.Mock(**{'deserializers.side_effect': KeyError}),
mock.Mock(**{'deserializers.return_value': 'extension3'}),
]
mock_ActionMethod.side_effect = [method] + extensions
request = mock.Mock(content_length=50,
content_type='text/plain',
body='this is the body')
desc = actions.ActionDescriptor('method', ['ext1', 'ext2', 'ext3'],
'resp')
result = desc.deserialize_request(request)
method.deserializers.assert_called_once_with('text/plain')
extensions[0].deserializers.assert_called_once_with('text/plain')
extensions[1].deserializers.assert_called_once_with('text/plain')
extensions[2].deserializers.assert_called_once_with('text/plain')
deserializer.assert_has_calls([
mock.call.attach('extension1'),
mock.call.attach('extension3'),
mock.call('this is the body'),
])
self.assertEqual(result, 'body')
@mock.patch.object(actions, 'ActionMethod')
def test_serializer_nomatch(self, mock_ActionMethod):
serializers = mock.Mock(**{
'get_types.return_value': ['type1', 'type2'],
})
method = mock.Mock(serializers=serializers)
mock_ActionMethod.side_effect = [method]
request = mock.Mock(**{'accept.best_match.return_value': None})
desc = actions.ActionDescriptor('method', [], 'resp_type')
with self.assertRaises(webob.exc.HTTPNotAcceptable):
dummy = desc.serializer(request)
serializers.get_types.assert_called_once_with()
request.accept.best_match.assert_called_once_with(['type1', 'type2'])
self.assertFalse(serializers.called)
@mock.patch.object(actions, 'ActionMethod')
def test_serializer_noserializer(self, mock_ActionMethod):
serializers = mock.Mock(**{
'get_types.return_value': ['type1', 'type2'],
'side_effect': KeyError,
})
method = mock.Mock(serializers=serializers)
mock_ActionMethod.side_effect = [method]
request = mock.Mock(**{'accept.best_match.return_value': 'text/plain'})
desc = actions.ActionDescriptor('method', [], 'resp_type')
with self.assertRaises(webob.exc.HTTPNotAcceptable):
dummy = desc.serializer(request)
serializers.get_types.assert_called_once_with()
request.accept.best_match.assert_called_once_with(['type1', 'type2'])
serializers.assert_called_once_with('text/plain')
@mock.patch.object(actions, 'ActionMethod')
def test_serializer_noattacher(self, mock_ActionMethod):
serializer = mock.Mock(spec=[])
serializers = mock.Mock(**{
'get_types.return_value': ['type1', 'type2'],
'return_value': serializer,
})
method = mock.Mock(serializers=serializers)
extensions = [mock.Mock(), mock.Mock()]
mock_ActionMethod.side_effect = [method] + extensions
request = mock.Mock(**{'accept.best_match.return_value': 'text/plain'})
desc = actions.ActionDescriptor('method', ['ext1', 'ext2'], 'resp')
result = desc.serializer(request)
serializers.get_types.assert_called_once_with()
request.accept.best_match.assert_called_once_with(['type1', 'type2'])
serializers.assert_called_once_with('text/plain')
self.assertFalse(extensions[0].serializers.called)
self.assertFalse(extensions[1].serializers.called)
self.assertFalse(serializer.called)
self.assertEqual(result[0], 'text/plain')
self.assertEqual(id(result[1]), id(serializer))
@mock.patch.object(actions, 'ActionMethod')
def test_serializer_withattacher(self, mock_ActionMethod):
serializer = mock.Mock()
serializers = mock.Mock(**{
'get_types.return_value': ['type1', 'type2'],
'return_value': serializer,
})
method = mock.Mock(serializers=serializers)
extensions = [
mock.Mock(**{'serializers.return_value': 'extension1'}),
mock.Mock(**{'serializers.side_effect': KeyError}),
mock.Mock(**{'serializers.return_value': 'extension3'}),
]
mock_ActionMethod.side_effect = [method] + extensions
request = mock.Mock(**{'accept.best_match.return_value': 'text/plain'})
desc = actions.ActionDescriptor('method', ['ext1', 'ext2', 'ext3'],
'resp')
result = desc.serializer(request)
serializers.get_types.assert_called_once_with()
request.accept.best_match.assert_called_once_with(['type1', 'type2'])
serializers.assert_called_once_with('text/plain')
extensions[0].serializers.assert_called_once_with('text/plain')
extensions[1].serializers.assert_called_once_with('text/plain')
extensions[2].serializers.assert_called_once_with('text/plain')
serializer.assert_has_calls([
mock.call.attach('extension3'),
mock.call.attach('extension1'),
])
self.assertFalse(serializer.called)
self.assertEqual(result[0], 'text/plain')
self.assertEqual(id(result[1]), id(serializer))
@mock.patch.object(actions, 'ActionMethod')
@mock.patch.object(actions.ActionDescriptor, 'wrap', return_value='resp')
def test_pre_process_functions(self, mock_wrap, mock_ActionMethod):
meth = mock.Mock()
exts = [
mock.Mock(isgenerator=False),
mock.Mock(isgenerator=False),
mock.Mock(isgenerator=False),
]
mock_ActionMethod.side_effect = [meth] + exts
desc = actions.ActionDescriptor('method', ['ext1', 'ext2', 'ext3'],
'resp')
result = desc.pre_process('req', 'params')
self.assertEqual(result, (None, list(reversed(exts))))
self.assertFalse(mock_wrap.called)
@mock.patch.object(actions, 'ActionMethod')
@mock.patch.object(actions.ActionDescriptor, 'wrap', return_value='resp')
def test_pre_process_generators_noyield(self, mock_wrap,
mock_ActionMethod):
meth = mock.Mock()
ext_gens = [
mock.Mock(**{'next.return_value': None}),
mock.Mock(**{'next.return_value': None}),
mock.Mock(**{'next.return_value': None}),
]
exts = [
mock.Mock(isgenerator=True, return_value=ext_gens[0]),
mock.Mock(isgenerator=True, return_value=ext_gens[1]),
mock.Mock(isgenerator=True, return_value=ext_gens[2]),
]
mock_ActionMethod.side_effect = [meth] + exts
desc = actions.ActionDescriptor('method', ['ext1', 'ext2', 'ext3'],
'resp')
result = desc.pre_process('req', dict(a=1, b=2, c=3))
exts[0].assert_called_once_with('req', a=1, b=2, c=3)
exts[1].assert_called_once_with('req', a=1, b=2, c=3)
exts[2].assert_called_once_with('req', a=1, b=2, c=3)
ext_gens[0].next.assert_called_once_with()
ext_gens[1].next.assert_called_once_with()
ext_gens[2].next.assert_called_once_with()
self.assertEqual(result, (None, list(reversed(ext_gens))))
self.assertFalse(mock_wrap.called)
@mock.patch.object(actions, 'ActionMethod')
@mock.patch.object(actions.ActionDescriptor, 'wrap', return_value='resp')
def test_pre_process_generators_noyield_stop(self, mock_wrap,
mock_ActionMethod):
meth = mock.Mock()
ext_gens = [
mock.Mock(**{'next.side_effect': StopIteration}),
mock.Mock(**{'next.return_value': None}),
mock.Mock(**{'next.side_effect': StopIteration}),
]
exts = [
mock.Mock(isgenerator=True, return_value=ext_gens[0]),
mock.Mock(isgenerator=True, return_value=ext_gens[1]),
mock.Mock(isgenerator=True, return_value=ext_gens[2]),
]
mock_ActionMethod.side_effect = [meth] + exts
desc = actions.ActionDescriptor('method', ['ext1', 'ext2', 'ext3'],
'resp')
result = desc.pre_process('req', dict(a=1, b=2, c=3))
exts[0].assert_called_once_with('req', a=1, b=2, c=3)
exts[1].assert_called_once_with('req', a=1, b=2, c=3)
exts[2].assert_called_once_with('req', a=1, b=2, c=3)
ext_gens[0].next.assert_called_once_with()
ext_gens[1].next.assert_called_once_with()
ext_gens[2].next.assert_called_once_with()
self.assertEqual(result, (None, [ext_gens[1]]))
self.assertFalse(mock_wrap.called)
@mock.patch.object(actions, 'ActionMethod')
@mock.patch.object(actions.ActionDescriptor, 'wrap', return_value='resp')
def test_pre_process_generators_yield(self, mock_wrap, mock_ActionMethod):
meth = mock.Mock()
ext_gens = [
mock.Mock(**{'next.return_value': None}),
mock.Mock(**{'next.return_value': 'generated'}),
mock.Mock(**{'next.return_value': None}),
]
exts = [
mock.Mock(isgenerator=True, return_value=ext_gens[0]),
mock.Mock(isgenerator=True, return_value=ext_gens[1]),
mock.Mock(isgenerator=True, return_value=ext_gens[2]),
]
mock_ActionMethod.side_effect = [meth] + exts
desc = actions.ActionDescriptor('method', ['ext1', 'ext2', 'ext3'],
'resp')
result = desc.pre_process('req', dict(a=1, b=2, c=3))
exts[0].assert_called_once_with('req', a=1, b=2, c=3)
exts[1].assert_called_once_with('req', a=1, b=2, c=3)
self.assertFalse(exts[2].called)
ext_gens[0].next.assert_called_once_with()
ext_gens[1].next.assert_called_once_with()
self.assertFalse(ext_gens[2].next.called)
self.assertEqual(result, ('resp', [ext_gens[0]]))
mock_wrap.assert_called_once_with('req', 'generated')
@mock.patch('inspect.isgenerator', return_value=False)
@mock.patch.object(actions, 'ActionMethod')
@mock.patch.object(actions.ActionDescriptor, 'wrap', return_value='resp')
def test_post_process_functions_noreplace(self, mock_wrap,
_mock_ActionMethod,
_mock_is_generator):
ext_list = [
mock.Mock(return_value=None),
mock.Mock(return_value=None),
mock.Mock(return_value=None),
]
desc = actions.ActionDescriptor('method', [], 'resp_type')
result = desc.post_process(ext_list, 'req', 'in response',
dict(a=1, b=2, c=3))
ext_list[0].assert_called_once_with('req', 'in response',
a=1, b=2, c=3)
ext_list[1].assert_called_once_with('req', 'in response',
a=1, b=2, c=3)
ext_list[2].assert_called_once_with('req', 'in response',
a=1, b=2, c=3)
self.assertEqual(result, 'in response')
self.assertFalse(mock_wrap.called)
@mock.patch('inspect.isgenerator', return_value=False)
@mock.patch.object(actions, 'ActionMethod')
@mock.patch.object(actions.ActionDescriptor, 'wrap', return_value='resp')
def test_post_process_functions_withreplace(self, mock_wrap,
_mock_ActionMethod,
_mock_is_generator):
ext_list = [
mock.Mock(return_value=None),
mock.Mock(return_value='replacement'),
mock.Mock(return_value=None),
]
desc = actions.ActionDescriptor('method', [], 'resp_type')
result = desc.post_process(ext_list, 'req', 'in response',
dict(a=1, b=2, c=3))
ext_list[0].assert_called_once_with('req', 'in response',
a=1, b=2, c=3)
ext_list[1].assert_called_once_with('req', 'in response',
a=1, b=2, c=3)
ext_list[2].assert_called_once_with('req', 'resp',
a=1, b=2, c=3)
self.assertEqual(result, 'resp')
mock_wrap.assert_called_once_with('req', 'replacement')
@mock.patch('inspect.isgenerator', return_value=True)
@mock.patch.object(actions, 'ActionMethod')
@mock.patch.object(actions.ActionDescriptor, 'wrap', return_value='resp')
def test_post_process_generators_noreplace(self, mock_wrap,
_mock_ActionMethod,
_mock_is_generator):
ext_list = [
mock.Mock(**{'send.return_value': None}),
mock.Mock(**{'send.side_effect': StopIteration}),
mock.Mock(**{'send.return_value': None}),
]
desc = actions.ActionDescriptor('method', [], 'resp_type')
result = desc.post_process(ext_list, 'req', 'in response',
dict(a=1, b=2, c=3))
ext_list[0].send.assert_called_once_with('in response')
ext_list[1].send.assert_called_once_with('in response')
ext_list[2].send.assert_called_once_with('in response')
self.assertEqual(result, 'in response')
self.assertFalse(mock_wrap.called)
@mock.patch('inspect.isgenerator', return_value=True)
@mock.patch.object(actions, 'ActionMethod')
@mock.patch.object(actions.ActionDescriptor, 'wrap', return_value='resp')
def test_post_process_generators_withreplace(self, mock_wrap,
_mock_ActionMethod,
_mock_is_generator):
ext_list = [
mock.Mock(**{'send.return_value': None}),
mock.Mock(**{'send.return_value': 'replacement'}),
mock.Mock(**{'send.return_value': None}),
]
desc = actions.ActionDescriptor('method', [], 'resp_type')
result = desc.post_process(ext_list, 'req', 'in response',
dict(a=1, b=2, c=3))
ext_list[0].send.assert_called_once_with('in response')
ext_list[1].send.assert_called_once_with('in response')
ext_list[2].send.assert_called_once_with('resp')
self.assertEqual(result, 'resp')
mock_wrap.assert_called_once_with('req', 'replacement')
@mock.patch.object(actions, 'ActionMethod')
def test_wrap_httpexception(self, _mock_ActionMethod):
response = webob.exc.HTTPNotFound()
desc = actions.ActionDescriptor('method', [], 'resp_type')
with self.assertRaises(webob.exc.HTTPException) as ctx:
result = desc.wrap('req', response)
self.assertEqual(id(ctx.exception), id(response))
@mock.patch.object(actions, 'ActionMethod')
def test_wrap_webob(self, _mock_ActionMethod):
response = webob.Response()
desc = actions.ActionDescriptor('method', [], 'resp_type')
with self.assertRaises(exceptions.AppathyResponse) as ctx:
result = desc.wrap('req', response)
self.assertEqual(id(ctx.exception.response), id(response))
@mock.patch.object(actions, 'ActionMethod')
def test_wrap_responseobject(self, _mock_ActionMethod):
resp = mock.Mock(spec=response.ResponseObject)
desc = actions.ActionDescriptor('method', [], 'resp_type')
result = desc.wrap('req', resp)
self.assertEqual(id(result), id(resp))
resp._bind.assert_called_once_with(desc)
@mock.patch.object(actions, 'ActionMethod')
def test_wrap_other(self, _mock_ActionMethod):
resp_type = mock.Mock(return_value='wrapped')
desc = actions.ActionDescriptor('method', [], resp_type)
result = desc.wrap('req', 'response')
self.assertEqual(result, 'wrapped')
resp_type.assert_called_once_with('req', 'response', _descriptor=desc)
| gpl-3.0 | 3,172,380,119,785,183,700 | 40.274571 | 79 | 0.584571 | false |
CLVsol/clvsol_odoo_addons | clv_lab_test_history/wizard/lab_test_result_mass_edit.py | 1 | 1108 | # -*- coding: utf-8 -*-
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
import logging
from odoo import api, fields, models
_logger = logging.getLogger(__name__)
class LabTestResultMassEdit(models.TransientModel):
_inherit = 'clv.lab_test.result.mass_edit'
phase_id = fields.Many2one(
comodel_name='clv.phase',
string='Phase'
)
phase_id_selection = fields.Selection(
[('set', 'Set'),
('remove', 'Remove'),
], string='Phase:', default=False, readonly=False, required=False
)
@api.multi
def do_lab_test_result_mass_edit(self):
self.ensure_one()
super().do_lab_test_result_mass_edit()
for lab_test_result in self.lab_test_result_ids:
_logger.info(u'%s %s', '>>>>>', lab_test_result.code)
if self.phase_id_selection == 'set':
lab_test_result.phase_id = self.phase_id
if self.phase_id_selection == 'remove':
lab_test_result.phase_id = False
return True
| agpl-3.0 | 8,628,638,866,095,895,000 | 26.7 | 74 | 0.597473 | false |
christoph2/k-os | kosek/ImplementationDefinition/AttributeDefinitions.py | 1 | 15627 | #!/usr/bin/python
# -*- coding: utf-8 -*-
__version__ = '0.9.2rc-1'
__copyright__ = """
k_os (Konnex Operating-System based on the OSEK/VDX-Standard).
(C) 2007-2013 by Christoph Schueler <github.com/Christoph2,
[email protected]>
All Rights Reserved
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
s. FLOSS-EXCEPTION.txt
"""
import unittest
import sys
from kosek.Logger import Logger
from kosek.ImplementationDefinition.NestableDefinition import NestableDefinition
from kosek.ImplementationDefinition.Extras import Extras
from kosek.ImplementationDefinition.Enumeration import Enumeration, Enumerator
logger = Logger()
signedIntegerRange = lambda numberOfBits: (-(2 ** (numberOfBits - 1)), (2 ** (numberOfBits - 1)) - 1)
unsignedIntegerRange = lambda numberOfBits: (0, (2 ** numberOfBits) - 1)
class ImplAttrType(object):
UINT32 = 0
INT32 = 1
UINT64 = 2
INT64 = 3
FLOAT = 4
ENUM = 5
STRING = 6
BOOLEAN = 7
_types = {
'UINT32' : UINT32,
'INT32' : INT32,
'UINT64' : UINT64,
'INT64' : INT64,
'FLOAT' : FLOAT,
'ENUM' : ENUM,
'STRING' : STRING,
'BOOLEAN' : BOOLEAN
}
_types_rev = dict([(v,k) for k,v in _types.items()])
_instance = None
def __new__(cls):
if not cls._instance:
cls._instance = super(ImplAttrType, cls).__new__(cls)
return cls._instance
@classmethod
def toString(cls, t):
return cls._types_rev[t]
@classmethod
def fromString(cls, t):
return cls._types[t]
class AttributeDefinition(NestableDefinition):
def __init__(self, attrName, autoSpec, mult, default, description, extra):
super(AttributeDefinition, self).__init__(attrName = attrName, autoSpec = autoSpec,
mult = mult, default = default, desc = description, extra = extra
)
self.attrName = attrName
self.autoSpec = autoSpec
self.mult = mult
self.default = default
self.description = description
self._extra = extra
if mult == True:
pass
#self._dict = {}
self._setupInstance()
delattr(self, '_extra')
def _setupInstance(self):
raise NotImplementedError()
def __contains__(self, value):
raise NotImplementedError()
def _validate(self, parameter, path):
raise NotImplementedError()
def _getDataType(self):
return self._dataType
dataType = property(_getDataType)
class BooleanAttribute(AttributeDefinition):
def _setupInstance(self):
self._dataType = ImplAttrType.BOOLEAN
self.falseParameters = None
self.trueParameters = None
extra = self._extra
if extra.type_ == Extras.BOOL_VALUES:
if extra.boolValues:
if extra.boolValues.trueParameterList:
self.trueParameters = extra.boolValues.trueParameterList.values.items()
if extra.boolValues.falseParameterList:
self.falseParameters = extra.boolValues.falseParameterList.values.items()
self.boolValues = extra.boolValues
def __contains__(self, value):
return True # Guaranteed by grammar.
def getParameters(self, value):
if value == True:
return self.trueParameters
elif value == False:
return self.falseParameters
else:
raise ValueError(value)
def _validate(self, parameter, path):
parameterName = parameter.parameterName
if parameterName in ('ORTI_DEBUG', 'TYPE'):
pass
value = parameter.parameterValue.value
p2 = self.getParameters(value)
for xy in parameter.parameterValue.values:
if xy.parameterName == 'ACTION':
pass
try:
implDef2 = self.getParameters(value)[xy.parameterName]
implDef2.validate(xy, path)
except TypeError as e:
pass
if not value in self:
print "BOOLEAN paramter %s %s not in %s" (path, value, self, )
class EnumAttribute(AttributeDefinition):
def _setupInstance(self):
self._dataType = ImplAttrType.ENUM
self._enumeration = self._extra.enumeration
if self.default:
if self.default == u'AUTO':
pass # TODO: autosToBeResolved
else:
#enum = [e.name for e in self.extra.enumeration.values()]
if not self.default in self: # ._enumeration:
logger.error("Default-Value '%s' for Attribute '%s' out of range." % (self.default, self.attrName))
else:
pass
def __contains__(self, value):
# TODO: AUTO handling!!!
res = value in self._enumeration
return res
def _getEnumeration (self):
return self._enumeration
def getParameters(self, value):
enumerator = self._getEnumeration()[value]
parameters = enumerator.parameters # TODO: getParamaters!?
return parameters
def _validate(self, parameter, path):
parameterName = parameter.parameterName
value = parameter.parameterValue.value
try:
implDefinitions = self.getParameters(value).items() if self.getParameters(value) else {}
except KeyError as e:
pass ## TODO: Errorhandling!!!
for xy in parameter.parameterValue.values:
implDefinition = implDefinitions.get(xy.parameterName)
implDefinition.validate(xy, path)
if not value in self:
pass
enumeration = property(_getEnumeration)
class IntegerAttribute(AttributeDefinition):
def __init__(self, dataType, attrName, autoSpec, mult, default, description, extra):
self._dataType = dataType
self.signedIntegerRange = signedIntegerRange
self.unsignedIntegerRange = unsignedIntegerRange
super(IntegerAttribute, self).__init__(attrName = attrName, autoSpec = autoSpec,
mult = mult, default = default, description = description, extra = extra
)
def _setupInstance(self):
self._range = self._extra.range
self._actualNumberFrom = self._extra.numberFrom
self._actualNumberTo = self._extra.numberTo
signed, bits = {
ImplAttrType.UINT32: (False, 32),
ImplAttrType.UINT64: (False, 64),
ImplAttrType.INT32: (True, 32),
ImplAttrType.INT64: (True, 64),
}.get(self._dataType)
if signed:
result = self.signedIntegerRange(bits)
else:
result = self.unsignedIntegerRange(bits)
self.naturalRangeFrom, self.naturalRangeTo = result
def __contains__(self, value):
result = False
if isinstance(self._actualNumberFrom, list):
result = value in self._actualNumberFrom # Check against enumerated list.
elif self._actualNumberFrom and self._actualNumberTo:
result = self._actualNumberFrom <= value <= self._actualNumberTo
else:
result = self.naturalRangeFrom <= value <= self.naturalRangeTo
return result
def _validate(self, parameter, path):
parameterName = parameter.parameterName
value = parameter.parameterValue.value
if not value in self:
print "Invalid VALUE!!!"
class FloatAttribute(AttributeDefinition):
def _setupInstance(self):
#self._range = self._extra.range
self._dataType = ImplAttrType.FLOAT
self._actualNumberFrom = self._extra.numberFrom
self._actualNumberTo = self._extra.numberTo
if (self._extra.numberFrom and self._extra.numberTo) and ((self.default < self._extra.numberFrom) or (self.default > self._extra.numberTo)):
logger.error("Default-Value '%s' for Attribute '%s' out of range."
% (self.default, self.attrName))
def __contains__(self,value):
if self._actualNumberFrom and self._actualNumberTo:
return self._actualNumberFrom <= value <= self._actualNumberTo
else:
return True
def _validate(self, parameter, path):
parameterName = parameter.parameterName
value = parameter.parameterValue.value
if not value in self:
print "Invalid FLOAT VALUE!!!"
class StringAttribute(AttributeDefinition):
def _setupInstance(self):
self._dataType = ImplAttrType.STRING
def __contains__(self, value):
return True
def _validate(self, parameter, path):
pass
def AttributeDefinitionFactory(dataType, attrName, autoSpec, mult, default, desc, extra):
if dataType == ImplAttrType.BOOLEAN:
return BooleanAttribute(attrName, autoSpec, mult, default, desc, extra)
elif dataType == ImplAttrType.ENUM:
return EnumAttribute(attrName, autoSpec, mult, default, desc, extra)
elif dataType in (ImplAttrType.UINT32, ImplAttrType.UINT64, ImplAttrType.INT32, ImplAttrType.INT64):
return IntegerAttribute(dataType, attrName, autoSpec, mult, default, desc, extra)
elif dataType == ImplAttrType.FLOAT:
return FloatAttribute(attrName, autoSpec, mult, default, desc, extra)
elif dataType == ImplAttrType.STRING:
return StringAttribute(attrName, autoSpec, mult, default, desc, extra)
### ### ## ### ##### ####### ####### ####### ##### ####### #####
# # # # # # # # # # # # # # # # # # # #
# # ## # # # # # # # #
# # ## # # # # # # # # #
# # # # # # # # #### ##### # #####
# # # ## # # ####### # # # # # #
# # # ## # # # # # # #
# # # # # # # # # # # # # #
### ### # ##### ### ### ####### ##### ### #####
class TestImplAttrType_toString(unittest.TestCase):
def setUp(self):
self.c = ImplAttrType()
def test_toUI32(self):
self.assertEquals(self.c.toString(ImplAttrType.UINT32), 'UINT32')
def test_toI32(self):
self.assertEquals(self.c.toString(ImplAttrType.INT32), 'INT32')
def test_toUI64(self):
self.assertEquals(self.c.toString(ImplAttrType.UINT64), 'UINT64')
def test_toI64(self):
self.assertEquals(self.c.toString(ImplAttrType.INT64), 'INT64')
def test_toFloat(self):
self.assertEquals(self.c.toString(ImplAttrType.FLOAT), 'FLOAT')
def test_toEnum(self):
self.assertEquals(self.c.toString(ImplAttrType.ENUM), 'ENUM')
def test_toString(self):
self.assertEquals(self.c.toString(ImplAttrType.STRING), 'STRING')
def test_toBoolean(self):
self.assertEquals(self.c.toString(ImplAttrType.BOOLEAN), 'BOOLEAN')
def test_Exc(self):
self.assertRaises(KeyError, self.c.toString,0x4223)
## @unittest.skipIf(sys.platform.startswith("win"), "Windows ???")
## @unittest.skipUnless(sys.platform.startswith("win"), "requires Windows")
def skipUnlessHasattr(obj, attr):
""" This is an example for an user defined skip decorator.
"""
if hasattr(obj, attr):
return lambda func: func
return unittest.skip("{0!r} doesn't have {1!r}".format(obj, attr))
class TestImplAttrType_fromString(unittest.TestCase):
def setUp(self):
self.c = ImplAttrType()
def test_fromUI32(self):
self.assertEquals(self.c.fromString('UINT32'), ImplAttrType.UINT32)
def test_fromI32(self):
self.assertEquals(self.c.fromString('INT32'), ImplAttrType.INT32)
def test_fromUI64(self):
self.assertEquals(self.c.fromString('UINT64'), ImplAttrType.UINT64)
def test_fromI64(self):
self.assertEquals(self.c.fromString('INT64'), ImplAttrType.INT64)
def test_fromFloat(self):
self.assertEquals(self.c.fromString('FLOAT'), ImplAttrType.FLOAT)
def test_fromEnum(self):
self.assertEquals(self.c.fromString('ENUM'), ImplAttrType.ENUM)
def test_fromString(self):
self.assertEquals(self.c.fromString('STRING'), ImplAttrType.STRING)
def test_fromBoolean(self):
self.assertEquals(self.c.fromString('BOOLEAN'), ImplAttrType.BOOLEAN)
def test_Exc(self):
self.assertRaises(KeyError, self.c.fromString,"INVALID")
class BaseAttributeDefinition(unittest.TestCase):
def setUp(self):
self.extras = Extras(Extras.BOOL_VALUES, {'boolValues': None})
self.values = (ImplAttrType.BOOLEAN, 'STARTUPHOOK', False, False, None, '', self.extras )
def tearDown(self):
del self.extras
del self.values
class TestAttributeDefinitiom(BaseAttributeDefinition):
def testSetupInstanceMethodMustBeOverwritten(self):
self.assertRaises(NotImplementedError, AttributeDefinition, None, None, None, None, None, None)
class TestAttributeDefinitionFactory(BaseAttributeDefinition):
def testCreateBooleanAttributeDefinition(self):
self.assertIsInstance(AttributeDefinitionFactory(*self.values), BooleanAttribute )
def testCreateEnumAttributeDefinition(self):
enumeration = Enumeration((Enumerator(u'ECC1', None, ''), Enumerator(u'(BCC1', None, ''),
Enumerator(u'(BCC2', None, ''), Enumerator(u'(ECC2', None, ''))
)
extras = Extras(Extras.ENUMERATION, {'enumeration': enumeration})
values = (ImplAttrType.ENUM, 'CC', False, False, None, '', extras )
self.assertIsInstance(AttributeDefinitionFactory(*values), EnumAttribute )
def testCreateUint32AttributeDefinition(self):
extra = Extras(Extras.NUMBER_RANGE, {'range': True, 'numberFrom': 32, 'numberTo': 128})
values = (ImplAttrType.UINT32, 'IDLE_TASK_STACK_SIZE', False, False, 48, None, extra)
self.assertIsInstance(AttributeDefinitionFactory(*values), IntegerAttribute )
## TODO:Other integer types!!!
def testCreateFloatAttributeDefinition(self):
extra = Extras(Extras.FLOAT_RANGE, {'range': True, 'numberFrom': 32, 'numberTo': 128})
values = (ImplAttrType.FLOAT, 'IDLE_TASK_STACK_SIZE', False, False, 48.0, None, extra)
self.assertIsInstance(AttributeDefinitionFactory(*values), FloatAttribute )
# TODO: enum Tests: default == AUTO, default != AUTO ( CALLSCHEDULE = UNKNOWN).
def testCreateStringAttributeDefinition(self):
extra = Extras(Extras.DUMMY)
values = (ImplAttrType.STRING, ' KOIL_VERSION', False, False, '2.2', None, extra)
self.assertIsInstance(AttributeDefinitionFactory(*values), StringAttribute )
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | -8,186,240,278,514,551,000 | 34.355204 | 148 | 0.617585 | false |
thesealion/writelightly | writelightly/main.py | 1 | 5266 | import curses
import datetime
import sys
from writelightly.calendar import Calendar
from writelightly.conf import Config
from writelightly.edit import edit_date, get_edits, clean_tmp, show_edits
from writelightly.metadata import Metadata
from writelightly.screen import ScreenManager, TextArea
from writelightly.tags import show_tags, show_tag
from writelightly.utils import entry_exists, parse_date, WLError, WLQuit
import locale
locale.setlocale(locale.LC_ALL, ('en_US', 'UTF-8'))
def show_calendar():
"""Show an interactive calendar.
Show the calendar on the left side of the screen and some metadata about
the selected date on the right. Any entry can be edited in external editor.
"""
today = datetime.date.today()
year, month = today.year, today.month
cal = Calendar(year, month, today.day, entry_exists)
metadata = Metadata.get(year, month)
text_area = TextArea()
ScreenManager.draw_all()
d = cal.get_current_date()
text_area.show_text(metadata.text(d.day))
keys = Config.calendar_keys
while 1:
try:
kn = curses.keyname(cal.window.getch())
except KeyboardInterrupt:
break
except ValueError:
continue
if kn in Config.general_keys['quit']:
raise WLQuit
if kn in Config.general_keys['quit_mode']:
break
if kn == 'KEY_RESIZE':
ScreenManager.resize()
if cal.hidden:
continue
if kn in keys['left']:
moved = cal.move_left()
if not moved:
cal = cal.get_previous_calendar()
cal.draw()
metadata = Metadata.get(cal.year, cal.month)
text_area.show_text(metadata.text(cal.get_current_day()))
elif kn in keys['right']:
moved = cal.move_right()
if not moved:
cal = cal.get_next_calendar()
cal.draw()
metadata = Metadata.get(cal.year, cal.month)
text_area.show_text(metadata.text(cal.get_current_day()))
elif kn in keys['down']:
cal.move_down()
text_area.show_text(metadata.text(cal.get_current_day()))
elif kn in keys['up']:
cal.move_up()
text_area.show_text(metadata.text(cal.get_current_day()))
elif kn in keys['edit']:
date = cal.get_current_date()
edit_date(date)
metadata.load_day(date.day)
cal.set_active(entry_exists(date))
text_area.show_text(metadata.text(date.day))
elif kn in keys['tags']:
show_tags(cal.area_id, text_area)
ScreenManager.restore_area(cal.area_id)
cal.reinit()
text_area.set_title()
text_area.show_text(metadata.text(cal.get_current_day()))
elif kn in keys['edits']:
date = cal.get_current_date()
edits = get_edits(date)
if edits:
show_edits(date, edits, text_area.area_id)
ScreenManager.restore_area(text_area.area_id)
text_area.show_text(metadata.text(date.day))
elif kn in keys['prev_month']:
cal = cal.get_previous_calendar(cal.get_current_day())
cal.draw()
metadata = Metadata.get(cal.year, cal.month)
text_area.show_text(metadata.text(cal.get_current_day()))
elif kn in keys['next_month']:
cal = cal.get_next_calendar(cal.get_current_day())
cal.draw()
metadata = Metadata.get(cal.year, cal.month)
text_area.show_text(metadata.text(cal.get_current_day()))
Metadata.write_all()
clean_tmp()
def edit_single_date(date):
"""Edit a single entry in external editor without initializing screen."""
date = parse_date(date)
if not date:
raise WLError('Unrecognised date format\n')
edit_date(date)
metadata = Metadata(date.year, date.month)
metadata.load_day(date.day)
metadata.write()
usage = '''Usage:
%(name)s
%(name)s ( <date> | today | yesterday )
%(name)s -t [<tag>]
''' % {'name': sys.argv[0]}
def wrapper(func, with_screen=False):
if with_screen:
ScreenManager.init()
error = None
try:
func()
except WLQuit:
pass
except WLError as exc:
error = exc
finally:
if with_screen:
ScreenManager.quit()
if error is not None:
sys.stderr.write('%s\n' % error)
def main():
from getopt import getopt, GetoptError
from functools import partial
try:
options, args = getopt(sys.argv[1:], 'th', ['help'])
except GetoptError as exc:
sys.stderr.write('%s\nTry `%s -h` for help\n' % (exc, sys.argv[0]))
sys.exit(1)
init_screen = True
option_names = [o[0] for o in options]
if '-h' in option_names or '--help' in option_names:
print usage
sys.exit()
if options:
if args:
func = partial(show_tag, args[0])
else:
func = show_tags
else:
if args:
func = partial(edit_single_date, args[0])
init_screen = False
else:
func = show_calendar
wrapper(func, init_screen)
| mit | -884,814,466,088,044,500 | 32.75641 | 79 | 0.583175 | false |
allofhercats/whiskey | proto/literal_print.py | 1 | 1544 | def get_n_digits(value, base):
n = 0
if value == 0:
return 1
else:
while value > 0:
value //= base
n += 1
return n
def literal_int_to_string(value, base, width = 0, pad = '0', prefix = True):
rtn = ""
if base == 2 and prefix:
rtn = "0b"
elif base == 8 and prefix:
rtn = "0"
elif base == 16 and prefix:
rtn = "0x"
n_digits = get_n_digits(value, base)
if width > 0 and width > n_digits:
i = 0
while i < width - n_digits:
rtn += pad
i += 1
fac = base ** (n_digits - 1)
i = 0
while i < n_digits:
digit = (value // fac) % base
if base == 2:
rtn += "01"[digit]
elif base == 8:
rtn += "01234567"[digit]
elif base == 10:
rtn += "0123456789"[digit]
elif base == 16:
rtn += "0123456789abcdef"[digit]
else:
raise NotImplementedError()
fac //= base
i += 1
return rtn
def literal_float_to_string(value, precision = 5, strip = True):
if value < 0.0:
return "-" + literal_float_to_string(-value, precision, strip)
rtn = literal_int_to_string(int(value), 10)
rtn += "."
value1 = value - int(value)
n_stripped = 0
i = 0
while i < precision:
value1 *= 10.0
digit = int(value1) % 10
if digit == 0:
n_stripped += 1
else:
n_stripped = 0
if not strip:
rtn += "0123456789"[digit]
i += 1
if strip:
value1 = value - int(value)
i = 0
while i < precision - n_stripped:
value1 *= 10.0
digit = int(value1) % 10
rtn += "0123456789"[digit]
i += 1
return rtn
if __name__ == "__main__":
print(literal_float_to_string(3.1400000000001, 20))
| mit | 2,214,180,999,662,640,600 | 17.60241 | 76 | 0.577073 | false |
ngageoint/scale | scale/scale/settings.py | 1 | 14209 | """
Django settings for scale_test project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
import os
import scale
import sys
import dj_database_url
def get_env_boolean(variable_name, default=False):
return os.getenv(variable_name, str(default)).lower() in ('yes', 'true', 't', '1')
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Project version
VERSION = scale.__version__
DOCKER_VERSION = scale.__docker_version__
# Mesos connection information. Default for -m
# This can be something like "127.0.0.1:5050"
# or a zookeeper url like 'zk://host1:port1,host2:port2,.../path`
MESOS_MASTER = os.getenv('MESOS_MASTER', 'zk://leader.mesos:2181/mesos')
# We by default, use the '*' role, meaning all resources are unreserved offers are received
# By default, use the '*' role, meaning all resources are unreserved offers are received
MESOS_ROLE = os.getenv('MESOS_ROLE', '*')
# Used to set the user that Mesos tasks are launched by Docker. This should NEVER be set to root
# and must be a user name NOT a Linux UID. Mesos chokes on UIDs.
CONTAINER_PROCESS_OWNER = os.getenv('CONTAINER_PROCESS_OWNER', 'nobody')
# By default, the accepted resources match reservations to the MESOS_ROLE
ACCEPTED_RESOURCE_ROLE = os.getenv('ACCEPTED_RESOURCE_ROLE', MESOS_ROLE)
# By default, all API calls require authentication.
PUBLIC_READ_API = get_env_boolean('PUBLIC_READ_API')
# Placeholder for service secret that will be overridden in local_settings_docker
SERVICE_SECRET = None
# Zookeeper URL for scheduler leader election. If this is None, only a single scheduler is used.
SCHEDULER_ZK = None
# The full name for the Scale Docker image (without version tag)
SCALE_DOCKER_IMAGE = 'geoint/scale'
# The location of the config file containing Docker credentials
# The URI value should point to an externally hosted location such as a webserver or hosted S3 bucket.
# The value will be an http URL such as 'http://static.mysite.com/foo/.dockercfg'
CONFIG_URI = None
# Directory for rotating metrics storage
METRICS_DIR = None
# fluentd warning levels, or -1 to disable warnings
FLUENTD_BUFFER_WARN = int(os.environ.get('FLUENTD_BUFFER_WARN', -1))
FLUENTD_BUFFER_SIZE_WARN = int(os.environ.get('FLUENTD_BUFFER_SIZE_WARN', -1))
# URL for fluentd, or None to disable fluentd
LOGGING_ADDRESS = os.environ.get('LOGGING_ADDRESS')
LOGGING_HEALTH_ADDRESS = os.environ.get('LOGGING_HEALTH_ADDRESS')
# Base URL of elasticsearch nodes
ELASTICSEARCH_URL = os.environ.get('ELASTICSEARCH_URL')
# Placeholder for elasticsearch version. Supplied in production by local_settings_docker.py
ELASTICSEARCH_VERSION = None
# Placeholder for Elasticsearch object. Needed for unit tests.
ELASTICSEARCH = None
DATABASE_URL = os.getenv('DATABASE_URL')
#root url for scale installation
SCALE_VHOST = os.getenv('SCALE_VHOST', 'localhost:8000')
# Broker URL for connection to messaging backend
BROKER_URL = 'amqp://guest:guest@localhost:5672//'
QUEUE_NAME = 'scale-command-messages'
MESSSAGE_QUEUE_DEPTH_WARN = int(os.environ.get('MESSSAGE_QUEUE_DEPTH_WARN', -1))
# Queue limit
SCHEDULER_QUEUE_LIMIT = int(os.environ.get('SCHEDULER_QUEUE_LIMIT', 500))
# The max number of times the scheduler will try to reconnect to
# mesos if disconnected.
SCHEDULER_MAX_RECONNECT = int(os.environ.get('SCHEDULER_MAX_RECONNECT', 3))
# Base URL of vault or DCOS secrets store, or None to disable secrets
SECRETS_URL = None
# Public token if DCOS secrets store, or privleged token for vault
SECRETS_TOKEN = None
# DCOS service account name, or None if not DCOS secrets store
DCOS_SERVICE_ACCOUNT = None
# Flag for raising SSL warnings associated with secrets transactions.
SECRETS_SSL_WARNINGS = True
# SECURITY WARNING: keep the secret key used in production secret!
INSECURE_DEFAULT_KEY = 'this-key-is-insecure-and-should-never-be-used-in-production'
SECRET_KEY = INSECURE_DEFAULT_KEY
# Used to write the superuser password
MESOS_SANDBOX = os.getenv('MESOS_SANDBOX')
# Security settings for production
SECURE_CONTENT_TYPE_NOSNIFF = True
SECURE_BROWSER_XSS_FILTER = True
SESSION_COOKIE_SECURE = get_env_boolean('SESSION_COOKIE_SECURE', True)
X_FRAME_OPTIONS = 'DENY'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
# authentication toggle, to be used for testing
AUTHENTICATION_ENABLED = get_env_boolean('AUTHENTICATION_ENABLED', True)
ALLOWED_HOSTS = ['localhost', '127.0.0.1']
# used primarily by debug-toolbar to dictate what client url has access
if os.environ.get('INTERNAL_IP'):
INTERNAL_IPS = [os.environ.get('INTERNAL_IP')]
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.gis',
'rest_framework',
'rest_framework.authtoken',
'debug_toolbar',
###############
# Social Auth #
###############
'oauth2_provider',
'social_django',
'rest_framework_social_oauth2',
# Scale apps
'accounts',
'batch',
'cli',
'data',
'diagnostic',
'error',
'ingest',
'job',
'mesos_api',
'messaging',
'metrics',
'node',
'product',
'queue',
'recipe',
'scheduler',
'shared_resource',
'source',
'storage',
'trigger',
'util',
'vault'
)
MIDDLEWARE = [
'debug_toolbar.middleware.DebugToolbarMiddleware',
'util.middleware.MultipleProxyMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'util.middleware.ExceptionLoggingMiddleware',
]
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'debug': False,
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
###############
# Social Auth #
###############
'social_django.context_processors.backends',
'social_django.context_processors.login_redirect',
],
},
},
]
AUTHENTICATION_BACKENDS = [
'django.contrib.auth.backends.ModelBackend',
]
REST_FRAMEWORK = {
'DEFAULT_FILTER_BACKENDS': (
'django_filters.rest_framework.DjangoFilterBackend',
),
'DEFAULT_PAGINATION_CLASS': 'util.rest.DefaultPagination',
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
'rest_framework.renderers.BrowsableAPIRenderer',
'rest_framework.renderers.AdminRenderer',
),
'ALLOWED_VERSIONS': ('v6', 'v7'),
'DEFAULT_VERSION': 'v6',
'DEFAULT_VERSIONING_CLASS': 'rest_framework.versioning.NamespaceVersioning',
}
if AUTHENTICATION_ENABLED:
REST_FRAMEWORK['DEFAULT_AUTHENTICATION_CLASSES'] = (
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.TokenAuthentication',
###############
# Social Auth #
###############
'oauth2_provider.contrib.rest_framework.OAuth2Authentication',
'rest_framework_social_oauth2.authentication.SocialAuthentication',
)
REST_FRAMEWORK['DEFAULT_PERMISSION_CLASSES'] = (
'util.rest.ScaleAPIPermissions',
)
else:
REST_FRAMEWORK['DEFAULT_AUTHENTICATION_CLASSES'] = ()
REST_FRAMEWORK['DEFAULT_PERMISSION_CLASSES'] = ()
REST_FRAMEWORK['UNAUTHENTICATED_USER'] = None
ROOT_URLCONF = 'scale.urls'
WSGI_APPLICATION = 'scale.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': dj_database_url.config(default='sqlite://%s' % os.path.join(BASE_DIR, 'db.sqlite3'))
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
LOGIN_REDIRECT_URL = '/'
#############################
# GEOAxIS specific settings #
#############################
SOCIAL_AUTH_NEW_USER_REDIRECT_URL = '/'
# Redirect after directly hitting login endpoint
SOCIAL_AUTH_LOGIN_REDIRECT_URL = '/'
DEFAULT_AUTH_PIPELINE = (
'social_core.pipeline.social_auth.social_details',
'social_core.pipeline.social_auth.social_uid',
'social_core.pipeline.social_auth.auth_allowed',
'social_core.pipeline.social_auth.social_user',
'social_core.pipeline.user.get_username',
'social_core.pipeline.mail.mail_validation',
'social_core.pipeline.social_auth.associate_by_email',
'social_core.pipeline.user.create_user',
'social_core.pipeline.social_auth.associate_user',
'social_core.pipeline.social_auth.load_extra_data',
'social_core.pipeline.user.user_details'
)
SOCIAL_AUTH_GEOAXIS_KEY = os.getenv('GEOAXIS_KEY')
SOCIAL_AUTH_GEOAXIS_SECRET = os.getenv('GEOAXIS_SECRET')
SOCIAL_AUTH_GEOAXIS_HOST = os.getenv('GEOAXIS_HOST', 'geoaxis.gxaccess.com')
OAUTH_GEOAXIS_USER_FIELDS = os.getenv(
'GEOAXIS_USER_FIELDS', 'username, email, last_name, first_name')
SOCIAL_AUTH_GEOAXIS_USER_FIELDS = map(
str.strip, OAUTH_GEOAXIS_USER_FIELDS.split(','))
OAUTH_GEOAXIS_SCOPES = os.getenv('GEOAXIS_SCOPES', 'UserProfile.me')
SOCIAL_AUTH_GEOAXIS_SCOPE = map(str.strip, OAUTH_GEOAXIS_SCOPES.split(','))
# GeoAxisOAuth2 will cause all login attempt to fail if
# SOCIAL_AUTH_GEOAXIS_HOST is None
GEOAXIS_ENABLED = False
if SOCIAL_AUTH_GEOAXIS_KEY and len(SOCIAL_AUTH_GEOAXIS_KEY) > 0:
GEOAXIS_ENABLED = True
AUTHENTICATION_BACKENDS += (
'django_geoaxis.backends.geoaxis.GeoAxisOAuth2',
)
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = 'static/'
STATICFILES_DIRS = ()
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# Logging configuration
LOG_DIR = os.path.join(BASE_DIR, 'logs')
if not os.path.exists(LOG_DIR):
os.makedirs(LOG_DIR)
LOG_NAME = 'scale'
LOG_FORMATTERS = {
'standard': {
'format': ('%(asctime)s %(levelname)s ' +
'[%(name)s(%(lineno)s)] %(message)s'),
'datefmt': '%Y-%m-%d %H:%M:%S',
},
'db-standard': {
'format': ('[%(name)s(%(lineno)s)] %(message)s'),
}
}
LOG_FILTERS = {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
},
'debug_info_only':{
'()':'scale.custom_logging.UserFilter',
}
}
LOG_HANDLERS = {
'null': {
'level': 'DEBUG',
'class': 'logging.NullHandler',
},
'mesoshttp' : {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'standard',
'stream': sys.stdout
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'standard',
'stream': sys.stdout
},
'console-stderr': {
'level': 'WARNING',
'class': 'logging.StreamHandler',
'formatter': 'standard',
'stream': sys.stderr
},
'console-stdout': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'standard',
'stream': sys.stdout,
'filters':['debug_info_only']
},
'log-db': {
'level': 'WARNING',
'class': 'error.handlers.DatabaseLogHandler',
'formatter': 'db-standard',
'model': 'error.models.LogEntry',
},
}
LOG_CONSOLE_DEBUG = {
'version': 1,
'formatters': LOG_FORMATTERS,
'filters': LOG_FILTERS,
'handlers': LOG_HANDLERS,
'loggers': {
'': {
'handlers': ['console-stdout','console-stderr'],
'level': 'DEBUG',
},
},
}
LOG_CONSOLE_INFO = {
'version': 1,
'formatters': LOG_FORMATTERS,
'filters': LOG_FILTERS,
'handlers': LOG_HANDLERS,
'loggers': {
'': {
'handlers': ['console-stdout','console-stderr'],
'level': 'INFO',
},
},
}
LOG_CONSOLE_ERROR = {
'version': 1,
'formatters': LOG_FORMATTERS,
'filters': LOG_FILTERS,
'handlers': LOG_HANDLERS,
'loggers': {
'': {
'handlers': ['console-stderr'],
'level': 'ERROR',
},
},
}
LOG_CONSOLE_WARNING = {
'version': 1,
'formatters': LOG_FORMATTERS,
'filters': LOG_FILTERS,
'handlers': LOG_HANDLERS,
'loggers': {
'': {
'handlers': ['console-stderr'],
'level': 'WARNING',
},
},
}
LOG_CONSOLE_CRITICAL = {
'version': 1,
'formatters': LOG_FORMATTERS,
'filters': LOG_FILTERS,
'handlers': LOG_HANDLERS,
'loggers': {
'': {
'handlers': ['console-stderr'],
'level': 'CRITICAL',
},
},
}
LOGGING = LOG_CONSOLE_DEBUG
# Hack to fix ISO8601 for datetime filters.
# This should be taken care of by a future django fix. And might even be handled
# by a newer version of django-rest-framework. Unfortunately, both of these solutions
# will accept datetimes without timezone information which we do not want to allow
# see https://code.djangoproject.com/tickets/23448
# Solution modified from http://akinfold.blogspot.com/2012/12/datetimefield-doesnt-accept-iso-8601.html
from django.forms import fields
from util.parse import parse_datetime
fields.DateTimeField.strptime = lambda _self, datetime_string, _format: parse_datetime(datetime_string)
| apache-2.0 | -3,697,003,481,016,563,700 | 29.622845 | 103 | 0.661553 | false |
TeamADEA/Hunger_Games | HG_Code/Model.py | 1 | 7064 | import numpy as np
import copy
import time
from Kat import Kat
from Visualize import Visualizer
from SimManager import sim_manager
from hg_settings import *
from Hunger_Grid import hunger_grid
import sys
import os
STEP_SIZE = 10 # 0 = only last frame,
# 1 = every frame,
# N = every N frames
# -1 = don't show
tki_breakdown = np.zeros(NUM_OF_GENERATIONS*6).reshape(NUM_OF_GENERATIONS, 6)
full_graph = np.zeros(NUM_OF_SPECIES*NUM_OF_GENERATIONS).reshape(NUM_OF_SPECIES, NUM_OF_GENERATIONS)
full_graph_bk = np.zeros(NUM_OF_SPECIES*2).reshape(NUM_OF_SPECIES, 2)
def run_model(from_lava = .02, to_lava = .02, from_berry = .05, to_berry = .05\
, from_mut=10, to_mut=10, from_gen = 33, to_gen = 33, \
t_name = 'Default', frames = -1):
global STEP_SIZE
STEP_SIZE = frames
progenitor = Kat(0,0)
grid = hunger_grid()
vis = Visualizer(grid)
start_time = time.time()
# Calculate the seed settings for each specie. This is used to run multiple
# species in a row without needing to manually set it
def calc_steps(from_num, to_num):
array = np.arange(1, NUM_OF_SPECIES+1, dtype='float')
if(from_num == to_num): # If values match. fill with only 1 value
array[:] = from_num
else:
# fill with incemental steps
inc = (float(to_num) - from_num) / float(NUM_OF_SPECIES)
array = np.arange(from_num, to_num, inc, dtype='float')
return copy.deepcopy(array)
#Fill arrays for each specie, these array determine % of land for each specie
lava_chance_array = calc_steps(from_lava, to_lava)
berry_chance_array = calc_steps(from_berry, to_berry)
mutate_chance_array = calc_steps(from_mut, to_mut)
generate_chance_array = calc_steps(from_gen, to_gen)
#open output file, file will be named after the test given name.
file_name = t_name + '.txt'
__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
out_file = open(os.path.join(__location__,file_name), 'a')
print "\n", generate_chance_array
print mutate_chance_array
for i in range(NUM_OF_SPECIES): # MAIN LOOP OF SIMULATION RUNNING
mutation_var = [mutate_chance_array[i]]
mutation_var.append(generate_chance_array[i])
grid = hunger_grid(lava_chance_array[i], berry_chance_array[i])
full_graph[i] = model(progenitor, vis, grid, i, mutation_var,t_name, out_file)
full_graph_bk[i] = [grid.lava_chance, grid.berry_chance]
#close output file
out_file.close()
# DISPLAY VARIOUS GRAPHS AND PLOTS
tki_breakdown[:] /= NUM_OF_SPECIES
vis.graph(full_graph, full_graph_bk, t_name)
vis.ins_graph(tki_breakdown, t_name)
vis.chance_vs_fitness(full_graph, full_graph_bk, mutate_chance_array, generate_chance_array,t_name)
print("--- %s MODEL COMPLETE ---" % (t_name))
print("--- TIME TO COMPLETE MODEL: %s seconds ---" % (time.time() - start_time))
vis.show_plots()
def one_sim(seed_kat, grid, mut ,gen, out_file, multi_cat=False):
"""Run one simulation of number of time steps (default: 300)
First initialize a sim_manager with first Kat agent.
Then update at each time steps, finally taking the top
Kat and top fitness score, returns it.
"""
if not multi_cat:
sim_temp = sim_manager(seed_kat, grid, mut)
top_kat = seed_kat
else:
sim_temp = sim_manager(seed_kat, grid, mut, multi_cat=True)
top_kat = seed_kat[0]
for i in range(NUM_OF_INDIVIDUALS):
sim_temp.clear_grid(grid)
sim_temp.start_kat(i)
for j in range(STEPS_PER_SIM):
if(sim_temp.kats[i].dead == False):
sim_temp.update(i, j)
else:
break
avg_fitness = sim_temp.average_fitness()
top_kats = sim_temp.top_kats() # ARRAY FOR DISPLAYING FITNESS
tki_breakdown[gen] += sim_temp.tk_breakdown() # FOR BREAKDOWN OF INSTRUCTIONS
#file output
for k in top_kats:
out_file.write("\nFitness: ")
out_file.write(str(k.calculate_fitness()))
out_file.write(k.print_ins_1(False))
for kat in top_kats:
kat.reset()
kat_temp, score_temp = sim_temp.top_kat()
return copy.deepcopy(kat_temp), score_temp, sim_temp.return_playback(),\
avg_fitness, copy.deepcopy(top_kats)
def playback(vis, pb, best_kats, gen, specie, t_name):
if (STEP_SIZE == -1):
return
if (STEP_SIZE == 0):
vis.show(pb[-1], best_kats, gen, specie, t_name)
else:
for i in np.arange(0,len(pb), STEP_SIZE):
vis.show(pb[i], copy.deepcopy(best_kats), gen, specie, t_name)
def model(seed_kat, vis, grid, specie, mut,t_name, out_file):
"""Run multiple simulation of number of time steps each,
(default: 300 simulations).
In a loop, keep running each simulation of 300
number of time steps, append the top fitness score,
and after loops ended, graph the fitness score over
generations (simulations).
"""
top_kats = []
avg_kats = []
print "Species:",specie,"| Gen: 1"
seed_kat, fit_score, play, avg_fitness, seed_kats = one_sim(seed_kat, grid, mut, 0,out_file)
top_kats.append(fit_score)
avg_kats.append(avg_fitness)
playback(vis, play, seed_kat, 1, specie+1, t_name)
#flie output
out_file.write("Species:")
out_file.write(str(specie))
out_file.write(" | Gen: 1\n")
if (NUM_OF_SPECIES > 1):
for i in np.arange(2, (NUM_OF_GENERATIONS+1)):
#file output
out_file.write("\nMODEL NAME: %s" % (t_name))
out_file.write("\n######### START: Species:")
out_file.write(str(specie+1))
out_file.write(" | Gen:")
out_file.write(str(i))
out_file.write("###########")
print "\nMODEL NAME: %s" % (t_name)
print "\n############### START: Species:",specie+1," OF ", NUM_OF_SPECIES ," | Gen:",i, "#######################\n"
temp_top = seed_kats
seed_kat, fit_score, play, avg_fitness, seed_kats = one_sim(seed_kats, grid, mut, (i-1),out_file, multi_cat=True)
if fit_score < top_kats[-1]:
seed_kats = temp_top
top_kats.append(top_kats[-1])
else:
top_kats.append(fit_score)
avg_kats.append(avg_fitness)
playback(vis, play,copy.deepcopy(seed_kats),i, specie+1, t_name)
print "\n############### END: Species:",specie+1," OF ", NUM_OF_SPECIES ," | Gen:",i, "#######################\n"
#file output
out_file.write("######### END: Species:")
out_file.write(str(specie+1))
out_file.write(" OF ")
out_file.write(str(NUM_OF_SPECIES))
out_file.write(" | Gen:")
out_file.write(str(i))
out_file.write("###########\n")
return copy.deepcopy(list(top_kats))
| mit | 4,120,856,940,087,549,000 | 37.601093 | 127 | 0.587061 | false |
chapware/aircrack | scripts/airdrop-ng/install.py | 1 | 3800 | #!/usr/bin/env python
__version__ = "1.13.2010.21:00"
__author__ = "Bryan Chapman <[email protected]>"
'''
This is the installer file for airdrop-ng. It first checks for
different dependancies, such as make, svn, etc.
'''
import os, sys
from shutil import rmtree
if os.geteuid() != 0:
print "Installer must be root to run. \nPlease 'su' or 'sudo -i' and try again. \nExiting..."
sys.exit(1)
class checkDepend:
def __init__ (self):
clear = "\n" *100
print clear
print "Checking for dependancies used by the installer..."
self.a = 0
self.deps = ["make", "svn", "tar", "gcc"]
for depends in self.deps:
if (os.path.isfile("/usr/bin/" + depends) or os.path.isfile("/usr/sbin/" + depends) or os.path.isfile("/usr/local/bin/" + depends) or os.path.isfile("/usr/local/sbin/" + depends) or os.path.isfile ("/bin/" + depends) ) == True:
pass
else:
self.a = 1
print depends + " not installed."
if self.a == 0:
print "All dependancies installed! Continuing...\n"
print "#### NOTE: For Ubuntu based distro's, \npython2.6-dev must be installed. Please \nmake sure it is installed before continuing!\n"
else:
print "Please install dependancies. Exiting...\n\n"
exit()
class installAirdrop:
def __init__(self):
print "Welcome to the airdrop-ng installer!\nYou will be prompted for installing\nAirdrop-ng, lorcon, and pylorcon.\n"
yno = raw_input ("Continue with installer? (y/n): ")
if yno == "y":
pass
else:
print "Fine, be that way. Exiting..."
exit()
yno = raw_input ("Install airdrop-ng? (y/n): ")
if yno == "y":
self.install()
else:
print "airdrop-ng not installed. Continuing..."
pass
def install(self):
print "Build exist? "
if os.path.isdir("build"):
rmtree("build") # imported from shutil, or shutil.rmtree()
print "File exists. Cleaning it..."
os.mkdir ("build")
else:
os.mkdir ("build")
print "Didn't exist. Creating..."
# moves everything to build/. This is to keep everything clean,
# and not clutter up the directory.
os.system ("cp airdrop-ng.py build/ && cp -r lib build/ && cp docs/airdrop-ng.1 build/")
print "Files copied. Now, moving to directory..."
os.chdir ("build")
if os.path.isdir("/usr/lib/airdrop-ng") == True:
rmtree ("/usr/lib/airdrop-ng")
print "Moving airdrop-ng to /usr/bin, lib to \n/usr/lib/airdrop-ng, and installing man pages..."
os.system ("cp airdrop-ng.py /usr/bin/airdrop-ng && cp -r lib /usr/lib/airdrop-ng && cp airdrop-ng.1 /usr/share/man/man1/")
#os.chdir ("..")
print "airdrop-ng installed! =)"
class installLorcon:
def __init__(self):
yno = raw_input ("Would you like to install lorcon? (y/n): ")
if yno == "y":
print "Running svn co http://802.11ninja.net/svn/lorcon/branch/lorcon-old. This may take a while..."
os.system ("svn co http://802.11ninja.net/svn/lorcon/branch/lorcon-old")
os.chdir("lorcon-old")
os.system ("./configure && make && make install")
print "Creating symlinks..."
os.system ("ln -s /usr/local/lib/liborcon-1.0.0.so /usr/lib")
os.chdir("..")
else:
print "Lorcon wasn't installed. "
class installPylorcon:
def __init__(self):
yno = raw_input ("Would you like to install pylorcon? (y/n): ")
if yno == "y":
import urllib
urllib.urlretrieve("http://pylorcon.googlecode.com/files/pylorcon-3.tar.bz2", "pylorcon-3.tar.bz2")
os.system ("tar -xvf pylorcon-3.tar.bz2")
os.chdir ("pylorcon")
os.system ("python setup.py install")
os.chdir("..")
# What actually runs the classes
checkDepend()
installAirdrop()
installLorcon()
installPylorcon()
yno = raw_input ("Clean up? (y/n): ")
if yno == "y":
os.chdir("..")
if os.path.isdir("build") == True:
rmtree("build")
print "Operation(s) complete! May the source be with you. =) "
sys.exit()
| gpl-2.0 | 3,112,824,450,673,922,600 | 30.147541 | 230 | 0.649211 | false |
ppries/tensorflow | tensorflow/contrib/framework/python/ops/variables.py | 1 | 25287 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Variable functions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import re
from tensorflow.contrib.framework.python.ops import add_arg_scope as contrib_add_arg_scope
from tensorflow.contrib.framework.python.ops import gen_variable_ops
from tensorflow.contrib.util import loader
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.framework import device as tf_device
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.ops import gen_state_ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.platform import resource_loader
from tensorflow.python.training import saver as tf_saver
from tensorflow.python.training import training_util
__all__ = ['add_model_variable',
'assert_global_step',
'assert_or_get_global_step',
'assign_from_checkpoint',
'assign_from_checkpoint_fn',
'assign_from_values',
'assign_from_values_fn',
'create_global_step',
'filter_variables',
'get_global_step',
'get_or_create_global_step',
'get_local_variables',
'get_model_variables',
'get_unique_variable',
'get_variables_by_name',
'get_variables_by_suffix',
'get_variables_to_restore',
'get_variables',
'local_variable',
'model_variable',
'variable',
'VariableDeviceChooser',
'zero_initializer']
def zero_initializer(ref, use_locking=True, name="zero_initializer"):
"""Initialize 'ref' with all zeros, ref tensor should be uninitialized.
If already initialized, you will get ValueError. This op is intended to
save memory during initialization.
Args:
ref: ref of the tensor need to be zero initialized.
name: optional name for this operation.
Returns:
ref that initialized.
Raises:
ValueError: If ref tensor is initialized.
"""
loader.load_op_library(
resource_loader.get_path_to_datafile("_variable_ops.so"))
return gen_variable_ops.zero_initializer(ref, name=name)
def assert_global_step(global_step_tensor):
training_util.assert_global_step(global_step_tensor)
def assert_or_get_global_step(graph=None, global_step_tensor=None):
"""Verifies that a global step tensor is valid or gets one if None is given.
If `global_step_tensor` is not None, check that it is a valid global step
tensor (using `assert_global_step`). Otherwise find a global step tensor using
`get_global_step` and return it.
Args:
graph: The graph to find the global step tensor for.
global_step_tensor: The tensor to check for suitability as a global step.
If None is given (the default), find a global step tensor.
Returns:
A tensor suitable as a global step, or `None` if none was provided and none
was found.
"""
if global_step_tensor is None:
# Get the global step tensor the same way the supervisor would.
global_step_tensor = get_global_step(graph)
else:
assert_global_step(global_step_tensor)
return global_step_tensor
def get_global_step(graph=None):
return training_util.get_global_step(graph)
def create_global_step(graph=None):
"""Create global step tensor in graph.
Args:
graph: The graph in which to create the global step. If missing, use default
graph.
Returns:
Global step tensor.
Raises:
ValueError: if global step key is already defined.
"""
graph = ops.get_default_graph() if graph is None else graph
if get_global_step(graph) is not None:
raise ValueError('"global_step" already exists.')
# Create in proper graph and base name_scope.
with graph.as_default() as g, g.name_scope(None):
collections = [ops.GraphKeys.GLOBAL_VARIABLES, ops.GraphKeys.GLOBAL_STEP]
return variable(ops.GraphKeys.GLOBAL_STEP, shape=[], dtype=dtypes.int64,
initializer=init_ops.zeros_initializer, trainable=False,
collections=collections)
def get_or_create_global_step(graph=None):
"""Returns and create (if necessary) the global step variable.
Args:
graph: The graph in which to create the global step. If missing, use default
graph.
Returns:
the tensor representing the global step variable.
"""
graph = ops.get_default_graph() if graph is None else graph
globalstep = get_global_step(graph)
if globalstep is None:
globalstep = create_global_step(graph)
return globalstep
def local_variable(initial_value, validate_shape=True, name=None):
"""Create variable and add it to `GraphKeys.LOCAL_VARIABLES` collection.
Args:
initial_value: See variables.Variable.__init__.
validate_shape: See variables.Variable.__init__.
name: See variables.Variable.__init__.
Returns:
New variable.
"""
return variables.Variable(
initial_value, trainable=False,
collections=[ops.GraphKeys.LOCAL_VARIABLES],
validate_shape=validate_shape, name=name)
@contrib_add_arg_scope
def variable(name, shape=None, dtype=None, initializer=None,
regularizer=None, trainable=True, collections=None,
caching_device=None, device=None,
partitioner=None, custom_getter=None):
"""Gets an existing variable with these parameters or creates a new one.
Args:
name: the name of the new or existing variable.
shape: shape of the new or existing variable.
dtype: type of the new or existing variable (defaults to `DT_FLOAT`).
initializer: initializer for the variable if one is created.
regularizer: a (Tensor -> Tensor or None) function; the result of
applying it on a newly created variable will be added to the collection
GraphKeys.REGULARIZATION_LOSSES and can be used for regularization.
trainable: If `True` also add the variable to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
collections: A list of collection names to which the Variable will be added.
If None it would default to `tf.GraphKeys.GLOBAL_VARIABLES`.
caching_device: Optional device string or function describing where the
Variable should be cached for reading. Defaults to the Variable's
device.
device: Optional device to place the variable. It can be an string or a
function that is called to get the device for the variable.
partitioner: Optional callable that accepts a fully defined `TensorShape`
and dtype of the `Variable` to be created, and returns a list of
partitions for each axis (currently only one axis can be partitioned).
custom_getter: Callable that allows overwriting the internal
get_variable method and has to have the same signature.
Returns:
The created or existing variable.
"""
collections = list(collections or [ops.GraphKeys.GLOBAL_VARIABLES])
# Remove duplicates
collections = set(collections)
getter = variable_scope.get_variable
if custom_getter is not None:
getter = custom_getter
with ops.device(device or ''):
return getter(name, shape=shape, dtype=dtype,
initializer=initializer,
regularizer=regularizer,
trainable=trainable,
collections=collections,
caching_device=caching_device,
partitioner=partitioner)
@contrib_add_arg_scope
def model_variable(name, shape=None, dtype=dtypes.float32, initializer=None,
regularizer=None, trainable=True, collections=None,
caching_device=None, device=None, partitioner=None,
custom_getter=None):
"""Gets an existing model variable with these parameters or creates a new one.
Args:
name: the name of the new or existing variable.
shape: shape of the new or existing variable.
dtype: type of the new or existing variable (defaults to `DT_FLOAT`).
initializer: initializer for the variable if one is created.
regularizer: a (Tensor -> Tensor or None) function; the result of
applying it on a newly created variable will be added to the collection
GraphKeys.REGULARIZATION_LOSSES and can be used for regularization.
trainable: If `True` also add the variable to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
collections: A list of collection names to which the Variable will be added.
Note that the variable is always also added to the
`GraphKeys.GLOBAL_VARIABLES` and `GraphKeys.MODEL_VARIABLES` collections.
caching_device: Optional device string or function describing where the
Variable should be cached for reading. Defaults to the Variable's
device.
device: Optional device to place the variable. It can be an string or a
function that is called to get the device for the variable.
partitioner: Optional callable that accepts a fully defined `TensorShape`
and dtype of the `Variable` to be created, and returns a list of
partitions for each axis (currently only one axis can be partitioned).
custom_getter: Callable that allows overwriting the internal
get_variable method and has to have the same signature.
Returns:
The created or existing variable.
"""
collections = list(collections or [])
collections += [ops.GraphKeys.GLOBAL_VARIABLES, ops.GraphKeys.MODEL_VARIABLES]
var = variable(name, shape=shape, dtype=dtype,
initializer=initializer, regularizer=regularizer,
trainable=trainable, collections=collections,
caching_device=caching_device, device=device,
partitioner=partitioner, custom_getter=custom_getter)
return var
def add_model_variable(var):
"""Adds a variable to the `GraphKeys.MODEL_VARIABLES` collection.
Args:
var: a variable.
"""
if var not in ops.get_collection(ops.GraphKeys.MODEL_VARIABLES):
ops.add_to_collection(ops.GraphKeys.MODEL_VARIABLES, var)
def get_variables(scope=None, suffix=None,
collection=ops.GraphKeys.GLOBAL_VARIABLES):
"""Gets the list of variables, filtered by scope and/or suffix.
Args:
scope: an optional scope for filtering the variables to return. Can be a
variable scope or a string.
suffix: an optional suffix for filtering the variables to return.
collection: in which collection search for. Defaults to
`GraphKeys.GLOBAL_VARIABLES`.
Returns:
a list of variables in collection with scope and suffix.
"""
if isinstance(scope, variable_scope.VariableScope):
scope = scope.name
if suffix is not None:
if ':' not in suffix:
suffix += ':'
scope = (scope or '') + '.*' + suffix
return ops.get_collection(collection, scope)
def get_model_variables(scope=None, suffix=None):
"""Gets the list of model variables, filtered by scope and/or suffix.
Args:
scope: an optional scope for filtering the variables to return.
suffix: an optional suffix for filtering the variables to return.
Returns:
a list of variables in collection with scope and suffix.
"""
return get_variables(scope, suffix, ops.GraphKeys.MODEL_VARIABLES)
def get_local_variables(scope=None, suffix=None):
"""Gets the list of local variables, filtered by scope and/or suffix.
Args:
scope: an optional scope for filtering the variables to return.
suffix: an optional suffix for filtering the variables to return.
Returns:
a list of variables in collection with scope and suffix.
"""
return get_variables(scope, suffix, ops.GraphKeys.LOCAL_VARIABLES)
def get_variables_to_restore(include=None, exclude=None):
"""Gets the list of the variables to restore.
Args:
include: an optional list/tuple of scope strings for filtering which
variables from the VARIABLES collection to include. None would include all
the variables.
exclude: an optional list/tuple of scope strings for filtering which
variables from the VARIABLES collection to exclude. None it would not
exclude any.
Returns:
a list of variables to restore.
Raises:
TypeError: include or exclude is provided but is not a list or a tuple.
"""
if include is None:
# Include all variables.
vars_to_include = get_variables()
else:
if not isinstance(include, (list, tuple)):
raise TypeError('include is provided but is not a list or a tuple.')
vars_to_include = []
for scope in include:
vars_to_include += get_variables(scope)
vars_to_exclude = set()
if exclude is not None:
if not isinstance(exclude, (list, tuple)):
raise TypeError('exclude is provided but is not a list or a tuple.')
for scope in exclude:
vars_to_exclude |= set(get_variables(scope))
# Exclude the variables in vars_to_exclude
return [v for v in vars_to_include if v not in vars_to_exclude]
def get_variables_by_suffix(suffix, scope=None):
"""Gets the list of variables that end with the given suffix.
Args:
suffix: suffix for filtering the variables to return.
scope: an optional scope for filtering the variables to return.
Returns:
a copied list of variables with the given name and prefix.
"""
return get_variables(scope=scope, suffix=suffix)
def get_variables_by_name(given_name, scope=None):
"""Gets the list of variables that were given that name.
Args:
given_name: name given to the variable without any scope.
scope: an optional scope for filtering the variables to return.
Returns:
a copied list of variables with the given name and scope.
"""
suffix = '/' + given_name + ':|^' + given_name + ':'
return get_variables(scope=scope, suffix=suffix)
def get_unique_variable(var_op_name):
"""Gets the variable uniquely identified by that var_op_name.
Args:
var_op_name: the full name of the variable op, including the scope.
Returns:
a tensorflow variable.
Raises:
ValueError: if no variable uniquely identified by the name exists.
"""
candidates = get_variables(scope=var_op_name)
if not candidates:
raise ValueError('Couldnt find variable %s' % var_op_name)
for candidate in candidates:
if candidate.op.name == var_op_name:
return candidate
raise ValueError('Variable %s does not uniquely identify a variable',
var_op_name)
def assign_from_values(var_names_to_values):
"""Creates an assignment operation from a given mapping.
This function provides a mechanism for performing assignment of variables
to values in a way that does not fill the graph with large assignment values.
Args:
var_names_to_values: A map from variable names to values.
Returns:
assign_op: An `Operation` that assigns each of the given variables to the
requested values.
feed_dict: The feed dictionary to use when evaluating `assign_op`.
Raises:
ValueError: if any of the given variable names were not found.
"""
feed_dict = {}
assign_ops = []
for var_name in var_names_to_values:
var_value = var_names_to_values[var_name]
var = ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES, var_name)
if not var:
raise ValueError('Variable %s wasnt found', var_name)
elif len(var) > 1:
# tf.get_collection is just a filter on the prefix: find the exact match:
found = False
for v in var:
if v.op.name == var_name:
var = v
found = True
break
if not found:
raise ValueError('Variable %s doesnt uniquely identify a variable',
var_name)
else:
var = var[0]
# TODO(nsilberman): ensure placeholder and assign are on the same device.
# Assign a placeholder to the value that will be filled later.
placeholder_name = 'placeholder/' + var.op.name
placeholder_value = array_ops.placeholder(
dtype=var.dtype.base_dtype,
shape=var.get_shape(),
name=placeholder_name)
assign_ops.append(var.assign(placeholder_value))
feed_dict[placeholder_value] = var_value.reshape(var.get_shape())
assign_op = control_flow_ops.group(*assign_ops)
return assign_op, feed_dict
def assign_from_values_fn(var_names_to_values):
"""Returns a function that assigns specific variables from the given values.
This function provides a mechanism for performing assignment of variables
to values in a way that does not fill the graph with large assignment values.
Args:
var_names_to_values: A map from variable names to values.
Returns:
A function that takes a single argument, a `tf.Session`, that applies the
assignment operation.
Raises:
ValueError: if any of the given variable names were not found.
"""
assign_op, feed_dict = assign_from_values(var_names_to_values)
def callback(session):
return session.run(assign_op, feed_dict)
return callback
# TODO(nsilberman): add flag to load exponential moving averages instead
def assign_from_checkpoint(model_path, var_list):
"""Creates an operation to assign specific variables from a checkpoint.
Args:
model_path: The full path to the model checkpoint. To get latest checkpoint
use `model_path = tf.train.latest_checkpoint(checkpoint_dir)`
var_list: A list of `Variable` objects or a dictionary mapping names in the
checkpoint to the corresponding variables to initialize. If empty or
None, it would return no_op(), None.
Returns:
the restore_op and the feed_dict that need to be run to restore var_list.
Raises:
ValueError: If the checkpoint specified at `model_path` is missing one of
the variables in `var_list`.
"""
reader = pywrap_tensorflow.NewCheckpointReader(model_path)
if isinstance(var_list, (tuple, list)):
var_list = {var.op.name: var for var in var_list}
feed_dict = {}
assign_ops = []
for checkpoint_var_name in var_list:
var = var_list[checkpoint_var_name]
if not reader.has_tensor(checkpoint_var_name):
raise ValueError(
'Checkpoint is missing variable [%s]' % checkpoint_var_name)
var_value = reader.get_tensor(checkpoint_var_name)
placeholder_name = 'placeholder/' + var.op.name
placeholder_value = array_ops.placeholder(
dtype=var.dtype.base_dtype,
shape=var.get_shape(),
name=placeholder_name)
assign_ops.append(var.assign(placeholder_value))
if var.get_shape() != var_value.shape:
raise ValueError(
'Total size of new array must be unchanged for %s '
'lh_shape: [%s], rh_shape: [%s]'
% (checkpoint_var_name, str(var_value.shape), str(var.get_shape())))
feed_dict[placeholder_value] = var_value.reshape(var.get_shape())
assign_op = control_flow_ops.group(*assign_ops)
return assign_op, feed_dict
def assign_from_checkpoint_fn(model_path, var_list, ignore_missing_vars=False,
reshape_variables=False):
"""Returns a function that assigns specific variables from a checkpoint.
Args:
model_path: The full path to the model checkpoint. To get latest checkpoint
use `model_path = tf.train.latest_checkpoint(checkpoint_dir)`
var_list: A list of `Variable` objects or a dictionary mapping names in the
checkpoint to the correspoing variables to initialize. If empty or None,
it would return no_op(), None.
ignore_missing_vars: Boolean, if True it would ignore variables missing in
the checkpoint with a warning instead of failing.
reshape_variables: Boolean, if True it would automatically reshape variables
which are of different shape then the ones stored in the checkpoint but
which have the same number of elements.
Returns:
A function that takes a single argument, a `tf.Session`, that applies the
assignment operation.
Raises:
ValueError: If the checkpoint specified at `model_path` is missing one of
the variables in `var_list`.
"""
if ignore_missing_vars:
reader = pywrap_tensorflow.NewCheckpointReader(model_path)
if isinstance(var_list, dict):
var_dict = var_list
else:
var_dict = {var.op.name: var for var in var_list}
available_vars = {}
for var in var_dict:
if reader.has_tensor(var):
available_vars[var] = var_dict[var]
else:
logging.warning(
'Variable %s missing in checkpoint %s', var, model_path)
var_list = available_vars
saver = tf_saver.Saver(var_list, reshape=reshape_variables)
def callback(session):
saver.restore(session, model_path)
return callback
class VariableDeviceChooser(object):
"""Device chooser for variables.
When using a parameter server it will assign them in a round-robin fashion.
When not using a parameter server it allows GPU or CPU placement.
"""
def __init__(self,
num_tasks=0,
job_name='ps',
device_type='CPU',
device_index=0):
"""Initialize VariableDeviceChooser.
Usage:
To use with 2 parameter servers:
VariableDeviceChooser(2)
To use without parameter servers:
VariableDeviceChooser()
VariableDeviceChooser(device_type='GPU') # For GPU placement
Args:
num_tasks: number of tasks.
job_name: String, a name for the parameter server job.
device_type: Optional device type string (e.g. "CPU" or "GPU")
device_index: int. Optional device index. If left
unspecified, device represents 'any' device_index.
"""
self._job_name = job_name
self._device_type = device_type
self._device_index = device_index
self._num_tasks = num_tasks
self._next_task_id = 0
def __call__(self, op):
device_spec = tf_device.DeviceSpec(device_type=self._device_type,
device_index=self._device_index)
if self._num_tasks > 0:
task_id = self._next_task_id
self._next_task_id = (self._next_task_id + 1) % self._num_tasks
device_spec.job = self._job_name
device_spec.task = task_id
return device_spec.to_string()
def filter_variables(var_list, include_patterns=None, exclude_patterns=None,
reg_search=True):
"""Filter a list of variables using regular expressions.
First includes variables according to the list of include_patterns.
Afterwards, eliminates variables according to the list of exclude_patterns.
For example, one can obtain a list of variables with the weights of all
convolutional layers (depending on the network definition) by:
```python
variables = tf.contrib.framework.get_model_variables()
conv_weight_variables = tf.contrib.framework.filter_variables(
variables,
include_patterns=['Conv'],
exclude_patterns=['biases', 'Logits'])
```
Args:
var_list: list of variables.
include_patterns: list of regular expressions to include. Defaults to None,
which means all variables are selected according to the include rules.
A variable is included if it matches any of the include_patterns.
exclude_patterns: list of regular expressions to exclude. Defaults to None,
which means all variables are selected according to the exclude rules.
A variable is excluded if it matches any of the exclude_patterns.
reg_search: boolean. If True (default), performs re.search to find matches
(i.e. pattern can match any substring of the variable name). If False,
performs re.match (i.e. regexp should match from the beginning of the
variable name).
Returns:
filtered list of variables.
"""
if reg_search:
reg_exp_func = re.search
else:
reg_exp_func = re.match
# First include variables.
if include_patterns is None:
included_variables = list(var_list)
else:
included_variables = []
for var in var_list:
if any(reg_exp_func(ptrn, var.name) for ptrn in include_patterns):
included_variables.append(var)
# Afterwards, exclude variables.
if exclude_patterns is None:
filtered_variables = included_variables
else:
filtered_variables = []
for var in included_variables:
if not any(reg_exp_func(ptrn, var.name) for ptrn in exclude_patterns):
filtered_variables.append(var)
return filtered_variables
| apache-2.0 | -135,211,928,952,083,820 | 35.701016 | 90 | 0.693835 | false |
Daniel-Brosnan-Blazquez/DIT-100 | debugging/trajectory_planning_profiles/trapezoidal-profile.py | 1 | 7690 | import numpy
import time
from matplotlib import pyplot
def main (params):
angle = params['p0']
vel = params['v0']
sign = params['sign']
# Plan the trajectory if it is not planned
T = 0
Ta = 0
Td = 0
dt = params['dt']
if not params['trajectory']:
# Maximum acceleration and velocity values in degrees/s^2 and
# degrees/s respectively
amax = params['acc_limit_d']*sign*(-1)
vmax = params['vel_limit']*sign*(-1)
v0 = vel
h = angle
vlim = vmax
# Check if the trajectory is feasible
print "abs (amax*h) >= v0**2/2.0 = %s" % (abs (amax*h) >= v0**2/2.0)
if abs (amax*h) >= v0**2/2.0:
# The trajectory is feasible
# Check if the maximum value of velocity can be reached
if abs (h*amax) > vmax**2 - v0**2/2.0:
# The maximum value of velocity can be reached
Ta = (vmax - v0)/amax
Td = vmax/amax
term1 = abs (h/vmax)
term2 = (vmax/(2*amax)) * (1 - (v0/vmax))**2
term3 = (vmax/(2*amax))
T = term1 + term2 + term3
else:
# The maximum value of velocity can't be reached
vlim = ((abs (h * amax) + v0**2/2.0)**(1/2.0))*sign*(-1)
Ta = abs ((vlim - v0)/amax)
Td = abs (vlim/amax)
T = Ta + Td
# end if
# The time has to be positive
Ta = abs (Ta)
Td = abs (Td)
T = abs (T)
print "Ta = %s, Td = %s" % (Ta, Td)
params['trajectory'] = True
params['T'] = T
params['Ta'] = Ta
params['Td'] = Td
params['T_sign'] = sign*(-1)
params['vv'] = vlim
# if Ta > dt and Td > dt:
# params['trajectory'] = True
# params['T'] = T
# params['Ta'] = Ta
# params['Td'] = Td
# params['T_sign'] = sign*(-1)
# params['vv'] = vlim
# else:
# Ta = 0
# Td = 0
# T = 0
# end if
# end if
return
def plot (params):
t = 0
interval = params['dt']
# Sign
sign = params['T_sign']
# Maximum values
amax = params['acc_limit_d']*sign
vmax = params['vel_limit']*sign
# Buffers to store the motion
positions = []
vels = []
accs = []
# Initial values of the motion
v0 = params['v0']
p0 = params['p0']
vv = params['vv']
T = params['T']
Ta = params['Ta']
Td = params['Td']
# Acceleration phase
while t < Ta:
# Position
pos = p0 + v0*t + ((vv - v0)/(2*Ta))*t**2
positions.append (pos)
# Velocity
vel = v0 + ((vv - v0)/(Ta))*t
vels.append (vel)
# Acceleration
acc = (vv - v0)/Ta
accs.append (acc)
t += interval
# end while
# Constant velocity phase
while t < (T - Td):
# Position
pos = p0 + v0*(Ta/2.0) + vv*(t-(Ta/2.0))
positions.append (pos)
# Velocity
vel = vv
vels.append (vel)
# Acceleration
acc = 0
accs.append (acc)
t += interval
# end while
# Deceleration phase
while t < T:
# Position
pos = 0 - (vv/(2*Td))*(T-t)**2
positions.append (pos)
# Velocity
vel = (vv/Td)*(T-t)
vels.append (vel)
# Acceleration
acc = -(vv/Td)
accs.append (acc)
t += interval
# end while
fig = pyplot.figure (1, figsize = (20,10))
s = fig.add_subplot (311)
p, = s.plot(positions)
s.grid (True)
s.set_title ("position")
s = fig.add_subplot (312)
p, = s.plot(vels)
s.grid (True)
s.set_title ("velocity")
s = fig.add_subplot (313)
p, = s.plot(accs)
s.grid (True)
s.set_title ("acceleration")
pyplot.show ()
pyplot.close (1)
return
if __name__ == "__main__":
params = {}
# Period
params['dt'] = 0.015
# Flag to indicate if it is necessary to compute the trajectory
# (not needed here)
params['trajectory'] = False
# Velocity, acceleration and jerk limits in degrees/s^2
params['vel_limit'] = 150.0
rad_to_degrees = 180.0/numpy.pi
radius = 0.3
# m/s^2
params['acc_limit'] = 7.5
# degrees/s^2
params['acc_limit_d'] = (params['acc_limit']*rad_to_degrees)/radius
# # p0 = 0. Checked, trajectory unfeasible
# # p0
# params['p0'] = 0.0
# # v0
# params['v0'] = 100.0
# p0 > 50 v0 = 0. Checked, trajectory feasible
# p0
params['p0'] = 80.0
# v0
params['v0'] = 0.0
# # p0 > 50 v0 < limit. Checked, trajectory feasible
# # p0
# params['p0'] = 80.0
# # v0
# params['v0'] = 50.0
# # p0 > 50 v0 = limit. Checked, trajectory feasible
# # p0
# params['p0'] = 80.0
# # v0
# params['v0'] = 100.0
# # p0 > 50 v0 > limit. Checked, trajectory feasible
# # p0
# params['p0'] = 80.0
# # v0
# params['v0'] = -150.0
# # p0 < 50 p0 > 0 v0 = 0. Checked, trajectory feasible
# # p0
# params['p0'] = 20.0
# # v0
# params['v0'] = 0.0
# # p0 < 50 p0 > 0 v0 < limit. REVIEW IT!!!!!!!!!
# # p0
# params['p0'] = 20.0
# # v0
# params['v0'] = 50.0
# # p0 < 50 p0 > 0 v0 = limit. Checked, trajectory feasible
# # p0
# params['p0'] = 20.0
# # v0
# params['v0'] = 100.0
# # p0 < 50 p0 > 0 v0 > limit. Checked, trajectory feasible
# # p0
# params['p0'] = 20.0
# # v0
# params['v0'] = 150.0
# # p0 < -50 v0 = 0. Checked, trajectory feasible
# # p0
# params['p0'] = -80.0
# # v0
# params['v0'] = 0.0
# # p0 < -50 v0 < limit. Checked, trajectory feasible
# # p0
# params['p0'] = -80.0
# # v0
# params['v0'] = 50.0
# # p0 < -50 v0 = limit. Checked, trajectory feasible
# # p0
# params['p0'] = -80.0
# # v0
# params['v0'] = 100.0
# # p0 < -50 v0 > limit. Checked, trajectory feasible
# # p0
# params['p0'] = -80.0
# # v0
# params['v0'] = 150.0
# # p0 > -50 p0 < 0 v0 = 0. Checked, trajectory feasible
# # p0
# params['p0'] = -20.0
# # v0
# params['v0'] = 0.0
# # p0 > -50 p0 < 0 v0 < limit. Checked, trajectory feasible
# # p0
# params['p0'] = -20.0
# # v0
# params['v0'] = -50.0
# # p0 > -50 p0 < 0 v0 = limit. Checked, trajectory feasible
# # p0
# params['p0'] = -20.0
# # v0
# params['v0'] = 100.0
# # p0 > -50 p0 < 0 v0 > limit. Checked, trajectory feasible
# # p0
# params['p0'] = -20.0
# # v0
# params['v0'] = 150.0
# # p0 > -50 p0 < 0 v0 > limit. Checked, trajectory feasible
# # p0
# params['p0'] = -20.0
# # v0
# params['v0'] = 200.0
# sign
params['sign'] = 1
# params['sign'] = -1
# # p0
# params['p0'] = 11.0962258945
# # params['p0'] = 22.0
# # v0
# params['v0'] = 71.19
# # params['v0'] = 0.0
main(params)
print "Trajectory performed: %s" % params['trajectory']
if params['trajectory']:
T = params['T']
Ta = params['Ta']
Td = params['Td']
print "T = %s, Ta = %s, Td = %s" %(T, Ta, Td)
plot (params)
| gpl-3.0 | -1,022,844,989,781,801,000 | 23.258675 | 76 | 0.447854 | false |
Oreder/PythonSelfStudy | Exe_18.py | 1 | 8002 | # Creating class
#
# +++ Syntax +++
# class ClassName:
# 'Optional class documentation string'
# class_suite
#
class Employee:
'common base class for all employees'
empCount = 0
def __init__(self, name, salary):
self.name = name
self.salary = salary
Employee.empCount += 1
def displayCount(self):
print("Total Employee:", empCount)
def display(self):
print("Name:", self.name, "with Salary:", self.salary)
# Here,
# The variable empCount is a class variable whose value is shared among all
# instances of a this class. This can be accessed as Employee.empCount from
# inside the class or outside the class.
# The first method __init__() is a special method, which is called class
# constructor or initialization method that Python calls when you create a new
# instance of this class.
# You declare other class methods like normal functions with the exception that
# the first argument to each method is self. Python adds the self argument to
# the list for you; you do not need to include it when you call the methods.
#
# Creating Instance Objects
"This would create first object of Employee class"
emp1 = Employee("Zara", 2000)
"This would create second object of Employee class"
emp2 = Employee("Manni", 5000)
# Accessing Attributes
emp1.display()
emp2.display()
print("Total Employee:", Employee.empCount)
# We can add, remove, or modify attributes of classes and objects at any time
emp1.age = 7 # Add an 'age' attribute
print(emp1.age)
emp1.age = 8 # Modify 'age' attribute
print(emp1.age)
del emp1.age # Delete 'age' attribute
# Instead of using the normal statements to access attributes, we can use the
# following functions:
#
# The getattr(obj, name[, default]) : to access the attribute of object.
# The hasattr(obj,name) : to check if an attribute exists or not.
# The setattr(obj,name,value) : to set an attribute.
# If attribute does not exist, then it would be created.
# The delattr(obj, name) : to delete an attribute.
print(hasattr(emp1, 'age')) # Returns true if 'age' attribute exists
print(setattr(emp1, 'age', 8)) # Set attribute 'age' at 8
print(getattr(emp1, 'age')) # Returns value of 'age' attribute
print(delattr(emp1, 'age')) # Delete attribute 'age'
# +++ Built-In Class Attributes
# Every Python class keeps following built-in attributes and they can be accessed using
# dot operator like any other attribute:
# __dict__: Dictionary containing the class's namespace.
# __doc__: Class documentation string or none, if undefined.
# __name__: Class name.
# __module__: Module name in which the class is defined. This attribute is
# "__main__" in interactive mode.
# __bases__: A possibly empty tuple containing the base classes, in the order
# of their occurrence in the base class list.
print("Employee.__doc__:", Employee.__doc__)
print("Employee.__name__:", Employee.__name__)
print("Employee.__module__:", Employee.__module__)
print("Employee.__bases__:", Employee.__bases__)
print("Employee.__dict__:", Employee.__dict__)
# +++ Destroying Objects (Garbage Collection)
# The __del__() destructor prints the class name of an instance that is about to be destroyed.
class Point:
def __init__(self, x = 0, y = 0):
self.x = x
self.y = y
def __del__(self):
class_name = self.__class__.__name__
print(class_name, "is destroyed!")
p1 = Point()
p2 = p1
p3 = p1
print("Id(P1):", id(p1))
print("Id(P2):", id(p2))
print("Id(P3):", id(p3))
del p1
del p2
del p3
# +++ Class Inheritance +++
# ---------------------------------------------------------
# Syntax
# class SubClassName (ParentClass1[, ParentClass2, ...]):
# 'Optional class documentation string'
# class_suite
# ---------------------------------------------------------
class Parent: # define parent class
parentAttr = 100
def __init__(self):
print("Calling parent constructor")
def parentMethod(self):
print('Calling parent method')
def setAttr(self, attr):
Parent.parentAttr = attr
def getAttr(self):
print("Parent attribute:", Parent.parentAttr)
class Child(Parent): # define child class
def __init__(self):
print("Calling child constructor")
def childMethod(self):
print('Calling child method')
c = Child() # instance of child
c.childMethod() # child calls its method
c.parentMethod() # calls parent's method
c.setAttr(200) # again call parent's method
c.getAttr() # again call parent's method
# Similar way, we can drive a class from multiple parent classes as follows:
# -----------------------------------------------
# class A: # define class A |
# ..... |
# class B: # define class B |
# ..... |
# class C(A, B): # subclass of A and B |
# ..... |
# -----------------------------------------------
# +++ Overriding Methods +++
class Parent:
def myMethod(self):
print("Calling parent method")
class Child(Parent):
def myMethod(self):
print("Calling child method")
c = Child()
c.myMethod()
# +++ Base Overloading Methods
# ===========================================================
# Sr. No. # Method, Description and Sample Call #
# ===========================================================
# # __init__ ( self [,args...] ) #
# 1 # Constructor (with any optional arguments) #
# # Sample Call : obj = className(args) #
# -----------------------------------------------------------
# # __del__( self ) #
# 2 # Destructor, deletes an object #
# # Sample Call : del obj #
# -----------------------------------------------------------
# # __repr__( self ) #
# 3 # Evaluatable string representation #
# # Sample Call : repr(obj) #
# -----------------------------------------------------------
# # __str__( self ) #
# 4 # Printable string representation #
# # Sample Call : str(obj) #
# -----------------------------------------------------------
# # __cmp__ ( self, x ) #
# 5 # Object comparison #
# # Sample Call : cmp(obj, x) #
# ===========================================================
# +++ Overloading Operators: using __add__ method
class Vector:
def __init__(self, a, b):
self.a = a
self.b = b
def __str__(self):
return 'Vector (%d, %d)' % (self.a, self.b)
def __add__(self, other):
return Vector(self.a + other.a, self.b + other.b)
v1 = Vector(2, 10)
v2 = Vector(5, -2)
print(v1 + v2)
# Data Hiding
class JustCounter:
__secretCount = 0
def count(self):
self.__secretCount += 1
print(self.__secretCount)
counter = JustCounter()
counter.count()
counter.count()
print(counter.__secretCount) # Error!
# When the above code is executed, it produces the following result:
# 1
# 2
# Traceback (most recent call last):
# File "Exe_18.py", line 225, in <module>
# print counter.__secretCount
# AttributeError: JustCounter instance has no attribute '__secretCount'
#
# Python protects those members by internally changing the name to include the class
# name. We can access such attributes as
# object._className__attrName
# If we would replace our last line as following, then it works for us:
print(counter._JustCounter__secretCount) # Worked!
| mit | 4,672,089,774,815,598,000 | 32.241667 | 94 | 0.543369 | false |
ess-dmsc/do-ess-data-simulator | DonkiPlayer/DonkiOrchestraLib.py | 1 | 7360 | import zmq
import traceback
import socket
import time
class CommunicationClass:
def __init__(self, name='director'):
self.context = zmq.Context()
self.poller = zmq.Poller()
self.pub_sock = None
self.sub_socks = {}
self.pub_tag = name
#
self.create_pub_socket()
#-----------------------------------------------------------------------------------
# create_pub_socket:
#
#-----------------------------------------------------------------------------------
def create_pub_socket(self):
try:
self.pub_sock = self.context.socket(zmq.PUB)
self.pub_port = self.pub_sock.bind_to_random_port("tcp://0.0.0.0")
print "PUB " + "tcp://" + str(self.pub_port)
except:
traceback.print_exc()
self.pub_sock = None
#-----------------------------------------------------------------------------------
# create_sub_socket:
#
#-----------------------------------------------------------------------------------
def create_sub_socket(self, name, url):
try:
if name in self.sub_socks:
self.poller.unregister(self.sub_socks[name])
self.sub_socks[name].close()
self.sub_socks[name] = self.context.socket(zmq.SUB)
self.sub_socks[name].setsockopt(zmq.SUBSCRIBE, '')
self.sub_socks[name].connect("tcp://"+str(url))
self.poller.register(self.sub_socks[name], zmq.POLLIN)
#print "SUB TO " + "tcp://" + str(url),self.sub_socks[name]
except:
traceback.print_exc()
print "tcp://"+str(url)
del self.sub_socks[name]
return False
return True
#-----------------------------------------------------------------------------------
# my_pub_socket_info :
#
#-----------------------------------------------------------------------------------
def my_pub_socket_info(self):
return socket.gethostname()+":"+str(self.pub_port)
#-----------------------------------------------------------------------------------
# publish_ack :
#
#-----------------------------------------------------------------------------------
def publish_ack(self, ack_tag, trg_start, trg_stop):
# At the moment just use send_pyobj
self.pub_sock.send_pyobj([ack_tag, trg_start,trg_stop])
#-----------------------------------------------------------------------------------
# publish_data :
#
#-----------------------------------------------------------------------------------
def publish_data(self, tag, trg_start, trg_stop, data_value):
# At the moment just use send_pyobj
self.pub_sock.send_pyobj(['data',tag.lower(), trg_start,trg_stop,data_value])
#-----------------------------------------------------------------------------------
# publish_info :
#
#-----------------------------------------------------------------------------------
def publish_info( self, priority = -1, data_names=[]):
# At the moment just use send_pyobj
self.pub_sock.send_pyobj(['info',{'prio':priority,'data':data_names}])
#-----------------------------------------------------------------------------------
# ask_for_info :
#
#-----------------------------------------------------------------------------------
def ask_for_info(self, srv_name, timeout_sec=1):
# At the moment just use send_pyobj
self.pub_sock.send_pyobj(["info", srv_name])
msg = []
sub_socket = self.sub_socks[srv_name]
max_retries = 5
retry = 0
while retry < max_retries and msg == []:
socks = dict(self.poller.poll((1000./max_retries)*timeout_sec))
#if len(socks) == 0:
# return msg
if sub_socket in socks and socks[sub_socket] == zmq.POLLIN:
try:
reply = sub_socket.recv_pyobj()
if reply[0] == 'info':
msg = reply[1]
except:
traceback.print_exc()
msg = []
retry += 1
return msg
#-----------------------------------------------------------------------------------
# ask_for_log :
#
#-----------------------------------------------------------------------------------
def ask_for_log(self, srv_name, timeout_sec=1):
# At the moment just use send_pyobj
self.pub_sock.send_pyobj(["playerlog", srv_name])
msg = []
sub_socket = self.sub_socks[srv_name]
max_retries = 5
retry = 0
while retry < max_retries and msg == []:
socks = dict(self.poller.poll((1000./max_retries)*timeout_sec))
#if len(socks) == 0:
# return msg
if sub_socket in socks and socks[sub_socket] == zmq.POLLIN:
try:
reply = sub_socket.recv_pyobj()
if reply[0] == 'data' and reply[1] == 'playerlog':
msg = reply[4]
except:
traceback.print_exc()
msg = []
retry += 1
return msg
#-----------------------------------------------------------------------------------
# wait_message :
#
#-----------------------------------------------------------------------------------
def wait_message(self, srv_names, timeout_sec=1):
try:
msg = {}
socks = dict(self.poller.poll(1000*timeout_sec))
if len(socks) == 0:
return msg
for sn in srv_names:
s = self.sub_socks[sn]
if s in socks and socks[s] == zmq.POLLIN:
recv_msg = s.recv_pyobj()
msg[sn] = recv_msg
except:
traceback.print_exc()
msg = None
return msg
#-----------------------------------------------------------------------------------
# publish_command :
#
#-----------------------------------------------------------------------------------
def publish_command(self, command, srv_name, argin=None, timeout_sec=1):
# At the moment just use send_pyobj
self.pub_sock.send_pyobj([command, srv_name, argin])
print "Sent command:", command, srv_name, argin
msg = []
sub_socket = self.sub_socks[srv_name]
max_retries = 5
retry = 0
while retry < max_retries and msg == []:
socks = dict(self.poller.poll((1000./max_retries)*timeout_sec))
if sub_socket in socks and socks[sub_socket] == zmq.POLLIN:
try:
reply = sub_socket.recv_pyobj()
if reply[0] == command and reply[1] == reply[2] == -1:
return True
except:
traceback.print_exc()
return False
retry += 1
return False
#-----------------------------------------------------------------------------------
# publish_trigger :
#
#-----------------------------------------------------------------------------------
def publish_trigger(self, trigger_value, priority):
# At the moment just use send_pyobj
self.pub_sock.send_pyobj(["trigger", trigger_value, priority])
| bsd-2-clause | -3,240,108,281,225,997,000 | 37.134715 | 85 | 0.38125 | false |
ragupta-git/ImcSdk | imcsdk/mometa/storage/StorageFlexFlashVirtualDrive.py | 1 | 7941 | """This module contains the general information for StorageFlexFlashVirtualDrive ManagedObject."""
from ...imcmo import ManagedObject
from ...imccoremeta import MoPropertyMeta, MoMeta
from ...imcmeta import VersionMeta
class StorageFlexFlashVirtualDriveConsts:
ADMIN_ACTION_DISABLE_VD = "disable-vd"
ADMIN_ACTION_ENABLE_VD = "enable-vd"
ADMIN_ACTION_ERASE_VD = "erase-vd"
ADMIN_ACTION_SYNC_VD = "sync-vd"
ADMIN_ACTION_UPDATE_VD = "update-vd"
class StorageFlexFlashVirtualDrive(ManagedObject):
"""This is StorageFlexFlashVirtualDrive class."""
consts = StorageFlexFlashVirtualDriveConsts()
naming_props = set([u'partitionId'])
mo_meta = {
"classic": MoMeta("StorageFlexFlashVirtualDrive", "storageFlexFlashVirtualDrive", "vd-[partition_id]", VersionMeta.Version202c, "InputOutput", 0x1f, [], ["admin", "read-only", "user"], [u'storageFlexFlashController'], [u'faultInst'], ["Get", "Set"]),
"modular": MoMeta("StorageFlexFlashVirtualDrive", "storageFlexFlashVirtualDrive", "vd-[partition_id]", VersionMeta.Version2013e, "InputOutput", 0x1f, [], ["admin", "read-only", "user"], [u'storageFlexFlashController'], [u'faultInst'], ["Get", "Set"])
}
prop_meta = {
"classic": {
"admin_action": MoPropertyMeta("admin_action", "adminAction", "string", VersionMeta.Version202c, MoPropertyMeta.READ_WRITE, 0x2, 0, 510, None, ["disable-vd", "enable-vd", "erase-vd", "sync-vd", "update-vd"], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version202c, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version202c, MoPropertyMeta.READ_WRITE, 0x8, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version202c, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version202c, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"drive_scope": MoPropertyMeta("drive_scope", "driveScope", "string", VersionMeta.Version202c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"drive_status": MoPropertyMeta("drive_status", "driveStatus", "string", VersionMeta.Version202c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"drive_type": MoPropertyMeta("drive_type", "driveType", "string", VersionMeta.Version202c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"host_accessible": MoPropertyMeta("host_accessible", "hostAccessible", "string", VersionMeta.Version202c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"last_operation_status": MoPropertyMeta("last_operation_status", "lastOperationStatus", "string", VersionMeta.Version202c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"operation_in_progress": MoPropertyMeta("operation_in_progress", "operationInProgress", "string", VersionMeta.Version202c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"partition_id": MoPropertyMeta("partition_id", "partitionId", "string", VersionMeta.Version202c, MoPropertyMeta.NAMING, None, 0, 510, None, [], []),
"size": MoPropertyMeta("size", "size", "string", VersionMeta.Version202c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"virtual_drive": MoPropertyMeta("virtual_drive", "virtualDrive", "string", VersionMeta.Version202c, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
},
"modular": {
"admin_action": MoPropertyMeta("admin_action", "adminAction", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x2, 0, 510, None, ["disable-vd", "enable-vd", "erase-vd", "sync-vd", "update-vd"], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x8, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version2013e, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"drive_scope": MoPropertyMeta("drive_scope", "driveScope", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"drive_status": MoPropertyMeta("drive_status", "driveStatus", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"drive_type": MoPropertyMeta("drive_type", "driveType", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"host_accessible": MoPropertyMeta("host_accessible", "hostAccessible", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"last_operation_status": MoPropertyMeta("last_operation_status", "lastOperationStatus", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"operation_in_progress": MoPropertyMeta("operation_in_progress", "operationInProgress", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"partition_id": MoPropertyMeta("partition_id", "partitionId", "string", VersionMeta.Version2013e, MoPropertyMeta.NAMING, None, 0, 510, None, [], []),
"size": MoPropertyMeta("size", "size", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"virtual_drive": MoPropertyMeta("virtual_drive", "virtualDrive", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
},
}
prop_map = {
"classic": {
"adminAction": "admin_action",
"dn": "dn",
"rn": "rn",
"status": "status",
"childAction": "child_action",
"driveScope": "drive_scope",
"driveStatus": "drive_status",
"driveType": "drive_type",
"hostAccessible": "host_accessible",
"lastOperationStatus": "last_operation_status",
"operationInProgress": "operation_in_progress",
"partitionId": "partition_id",
"size": "size",
"virtualDrive": "virtual_drive",
},
"modular": {
"adminAction": "admin_action",
"dn": "dn",
"rn": "rn",
"status": "status",
"childAction": "child_action",
"driveScope": "drive_scope",
"driveStatus": "drive_status",
"driveType": "drive_type",
"hostAccessible": "host_accessible",
"lastOperationStatus": "last_operation_status",
"operationInProgress": "operation_in_progress",
"partitionId": "partition_id",
"size": "size",
"virtualDrive": "virtual_drive",
},
}
def __init__(self, parent_mo_or_dn, partition_id, **kwargs):
self._dirty_mask = 0
self.partition_id = partition_id
self.admin_action = None
self.status = None
self.child_action = None
self.drive_scope = None
self.drive_status = None
self.drive_type = None
self.host_accessible = None
self.last_operation_status = None
self.operation_in_progress = None
self.size = None
self.virtual_drive = None
ManagedObject.__init__(self, "StorageFlexFlashVirtualDrive", parent_mo_or_dn, **kwargs)
| apache-2.0 | -984,964,799,914,068,200 | 65.175 | 258 | 0.625866 | false |
ErnieAllen/qpid-dispatch | tests/system_tests_protocol_settings.py | 1 | 16207 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import unittest2 as unittest
from system_test import TestCase, Qdrouterd, main_module
from proton.utils import BlockingConnection
import subprocess
X86_64_ARCH = "x86_64"
skip_test = True
# Dont skip tests on 64 bit architectures.
p = subprocess.Popen("uname -m", shell=True, stdout=subprocess.PIPE,
universal_newlines=True)
if X86_64_ARCH in p.communicate()[0]:
skip_test = False
class MaxFrameMaxSessionFramesTest(TestCase):
"""System tests setting proton negotiated size max-frame-size and incoming-window"""
@classmethod
def setUpClass(cls):
'''Start a router'''
super(MaxFrameMaxSessionFramesTest, cls).setUpClass()
name = "MaxFrameMaxSessionFrames"
config = Qdrouterd.Config([
('router', {'mode': 'standalone', 'id': 'QDR'}),
('listener', {'host': '0.0.0.0', 'port': cls.tester.get_port(), 'maxFrameSize': '2048', 'maxSessionFrames': '10'}),
])
cls.router = cls.tester.qdrouterd(name, config)
cls.router.wait_ready()
cls.address = cls.router.addresses[0]
def test_max_frame_max_session_frames__max_sessions_default(self):
# Set up a connection to get the Open and a receiver to get a Begin frame in the log
bc = BlockingConnection(self.router.addresses[0])
bc.create_receiver("xxx")
bc.close()
with open('../setUpClass/MaxFrameMaxSessionFrames.log', 'r') as router_log:
log_lines = router_log.read().split("\n")
open_lines = [s for s in log_lines if "-> @open" in s]
# max-frame is from the config
self.assertTrue(' max-frame-size=2048,' in open_lines[0])
# channel-max is default
self.assertTrue(" channel-max=32767" in open_lines[0])
begin_lines = [s for s in log_lines if "-> @begin" in s]
# incoming-window is from the config
self.assertTrue(" incoming-window=10," in begin_lines[0] )
class MaxSessionsTest(TestCase):
"""System tests setting proton channel-max"""
@classmethod
def setUpClass(cls):
"""Start a router and a messenger"""
super(MaxSessionsTest, cls).setUpClass()
name = "MaxSessions"
config = Qdrouterd.Config([
('router', {'mode': 'standalone', 'id': 'QDR'}),
('listener', {'host': '0.0.0.0', 'port': cls.tester.get_port(), 'maxSessions': '10'}),
])
cls.router = cls.tester.qdrouterd(name, config)
cls.router.wait_ready()
cls.address = cls.router.addresses[0]
def test_max_sessions(self):
# Set up a connection to get the Open and a receiver to get a Begin frame in the log
bc = BlockingConnection(self.router.addresses[0])
bc.create_receiver("xxx")
bc.close()
with open('../setUpClass/MaxSessions.log', 'r') as router_log:
log_lines = router_log.read().split("\n")
open_lines = [s for s in log_lines if "-> @open" in s]
# channel-max is 10
self.assertTrue(" channel-max=9" in open_lines[0])
class MaxSessionsZeroTest(TestCase):
"""System tests setting proton channel-max"""
@classmethod
def setUpClass(cls):
"""Start a router and a messenger"""
super(MaxSessionsZeroTest, cls).setUpClass()
name = "MaxSessionsZero"
config = Qdrouterd.Config([
('router', {'mode': 'standalone', 'id': 'QDR'}),
('listener', {'host': '0.0.0.0', 'port': cls.tester.get_port(), 'maxSessions': '0'}),
])
cls.router = cls.tester.qdrouterd(name, config)
cls.router.wait_ready()
cls.address = cls.router.addresses[0]
def test_max_sessions_zero(self):
# Set up a connection to get the Open and a receiver to get a Begin frame in the log
bc = BlockingConnection(self.router.addresses[0])
bc.create_receiver("xxx")
bc.close()
with open('../setUpClass/MaxSessionsZero.log', 'r') as router_log:
log_lines = router_log.read().split("\n")
open_lines = [s for s in log_lines if "-> @open" in s]
# channel-max is 0. Should get proton default 32767
self.assertTrue(" channel-max=32767" in open_lines[0])
class MaxSessionsLargeTest(TestCase):
"""System tests setting proton channel-max"""
@classmethod
def setUpClass(cls):
"""Start a router and a messenger"""
super(MaxSessionsLargeTest, cls).setUpClass()
name = "MaxSessionsLarge"
config = Qdrouterd.Config([
('router', {'mode': 'standalone', 'id': 'QDR'}),
('listener', {'host': '0.0.0.0', 'port': cls.tester.get_port(), 'maxSessions': '500000'}),
])
cls.router = cls.tester.qdrouterd(name, config)
cls.router.wait_ready()
cls.address = cls.router.addresses[0]
def test_max_sessions_large(self):
# Set up a connection to get the Open and a receiver to get a Begin frame in the log
bc = BlockingConnection(self.router.addresses[0])
bc.create_receiver("xxx")
bc.close()
with open('../setUpClass/MaxSessionsLarge.log', 'r') as router_log:
log_lines = router_log.read().split("\n")
open_lines = [s for s in log_lines if "-> @open" in s]
# channel-max is 0. Should get proton default 32767
self.assertTrue(" channel-max=32767" in open_lines[0])
class MaxFrameSmallTest(TestCase):
"""System tests setting proton max-frame-size"""
@classmethod
def setUpClass(cls):
"""Start a router and a messenger"""
super(MaxFrameSmallTest, cls).setUpClass()
name = "MaxFrameSmall"
config = Qdrouterd.Config([
('router', {'mode': 'standalone', 'id': 'QDR'}),
('listener', {'host': '0.0.0.0', 'port': cls.tester.get_port(), 'maxFrameSize': '2'}),
])
cls.router = cls.tester.qdrouterd(name, config)
cls.router.wait_ready()
cls.address = cls.router.addresses[0]
def test_max_frame_small(self):
# Set up a connection to get the Open and a receiver to get a Begin frame in the log
bc = BlockingConnection(self.router.addresses[0])
bc.create_receiver("xxx")
bc.close()
with open('../setUpClass/MaxFrameSmall.log', 'r') as router_log:
log_lines = router_log.read().split("\n")
open_lines = [s for s in log_lines if "-> @open" in s]
# if frame size <= 512 proton set min of 512
self.assertTrue(" max-frame-size=512" in open_lines[0])
class MaxFrameDefaultTest(TestCase):
"""System tests setting proton max-frame-size"""
@classmethod
def setUpClass(cls):
"""Start a router and a messenger"""
super(MaxFrameDefaultTest, cls).setUpClass()
name = "MaxFrameDefault"
config = Qdrouterd.Config([
('router', {'mode': 'standalone', 'id': 'QDR'}),
('listener', {'host': '0.0.0.0', 'port': cls.tester.get_port()}),
])
cls.router = cls.tester.qdrouterd(name, config)
cls.router.wait_ready()
cls.address = cls.router.addresses[0]
def test_max_frame_default(self):
# Set up a connection to get the Open and a receiver to get a Begin frame in the log
bc = BlockingConnection(self.router.addresses[0])
bc.create_receiver("xxx")
bc.close()
with open('../setUpClass/MaxFrameDefault.log', 'r') as router_log:
log_lines = router_log.read().split("\n")
open_lines = [s for s in log_lines if "-> @open" in s]
# if frame size not set then a default is used
self.assertTrue(" max-frame-size=16384" in open_lines[0])
class MaxSessionFramesDefaultTest(TestCase):
"""System tests setting proton max-frame-size"""
@classmethod
def setUpClass(cls):
"""Start a router and a messenger"""
super(MaxSessionFramesDefaultTest, cls).setUpClass()
name = "MaxSessionFramesDefault"
config = Qdrouterd.Config([
('router', {'mode': 'standalone', 'id': 'QDR'}),
('listener', {'host': '0.0.0.0', 'port': cls.tester.get_port()}),
])
cls.router = cls.tester.qdrouterd(name, config)
cls.router.wait_ready()
cls.address = cls.router.addresses[0]
def test_max_session_frames_default(self):
# Set up a connection to get the Open and a receiver to get a Begin frame in the log
if skip_test:
return self.skipTest("Test skipped on non-64 bit architectures")
bc = BlockingConnection(self.router.addresses[0])
bc.create_receiver("xxx")
bc.close()
with open('../setUpClass/MaxSessionFramesDefault.log', 'r') as router_log:
log_lines = router_log.read().split("\n")
open_lines = [s for s in log_lines if "-> @open" in s]
# if frame size not set then a default is used
self.assertTrue(" max-frame-size=16384" in open_lines[0])
begin_lines = [s for s in log_lines if "-> @begin" in s]
# incoming-window is from the config
self.assertTrue(" incoming-window=2147483647," in begin_lines[0])
class MaxFrameMaxSessionFramesZeroTest(TestCase):
"""
System tests setting proton negotiated size max-frame-size and incoming-window
when they are both zero. Frame size is bumped up to the minimum and capacity is
bumped up to have an incoming window of 1
"""
@classmethod
def setUpClass(cls):
'''Start a router'''
super(MaxFrameMaxSessionFramesZeroTest, cls).setUpClass()
name = "MaxFrameMaxSessionFramesZero"
config = Qdrouterd.Config([
('router', {'mode': 'standalone', 'id': 'QDR'}),
('listener', {'host': '0.0.0.0', 'port': cls.tester.get_port(), 'maxFrameSize': '0', 'maxSessionFrames': '0'}),
])
cls.router = cls.tester.qdrouterd(name, config)
cls.router.wait_ready()
cls.address = cls.router.addresses[0]
def test_max_frame_max_session_zero(self):
# Set up a connection to get the Open and a receiver to get a Begin frame in the log
if skip_test:
return self.skipTest("Test disabled on non-64 bit architectures")
bc = BlockingConnection(self.router.addresses[0])
bc.create_receiver("xxx")
bc.close()
with open('../setUpClass/MaxFrameMaxSessionFramesZero.log', 'r') as router_log:
log_lines = router_log.read().split("\n")
open_lines = [s for s in log_lines if "-> @open" in s]
# max-frame gets set to protocol min
self.assertTrue(' max-frame-size=512,' in open_lines[0])
begin_lines = [s for s in log_lines if "-> @begin" in s]
# incoming-window is defaulted to 2^31-1
self.assertTrue(" incoming-window=2147483647," in begin_lines[0])
class ConnectorSettingsDefaultTest(TestCase):
"""
The internal logic for protocol settings in listener and connector
is common code. This test makes sure that defaults in the connector
config make it to the wire.
"""
inter_router_port = None
@staticmethod
def ssl_config(client_server, connection):
return [] # Over-ridden by RouterTestSsl
@classmethod
def setUpClass(cls):
"""Start two routers"""
super(ConnectorSettingsDefaultTest, cls).setUpClass()
def router(name, client_server, connection):
config = cls.ssl_config(client_server, connection) + [
('router', {'mode': 'interior', 'id': 'QDR.%s' % name}),
('listener', {'port': cls.tester.get_port()}),
connection
]
config = Qdrouterd.Config(config)
cls.routers.append(cls.tester.qdrouterd(name, config, wait=True))
cls.routers = []
inter_router_port = cls.tester.get_port()
router('A', 'server',
('listener', {'role': 'inter-router', 'port': inter_router_port}))
router('B', 'client',
('connector', {'name': 'connectorToA', 'role': 'inter-router', 'port': inter_router_port,
'verifyHostname': 'no'}))
cls.routers[0].wait_router_connected('QDR.B')
cls.routers[1].wait_router_connected('QDR.A')
def test_connector_default(self):
if skip_test:
return self.skipTest("Test disabled on non-64 bit architectures")
with open('../setUpClass/A.log', 'r') as router_log:
log_lines = router_log.read().split("\n")
open_lines = [s for s in log_lines if "<- @open" in s]
# defaults
self.assertTrue(' max-frame-size=16384,' in open_lines[0])
self.assertTrue(' channel-max=32767,' in open_lines[0])
begin_lines = [s for s in log_lines if "<- @begin" in s]
# defaults
self.assertTrue(" incoming-window=2147483647," in begin_lines[0])
class ConnectorSettingsNondefaultTest(TestCase):
"""
The internal logic for protocol settings in listener and connector
is common code. This test makes sure that settings in the connector
config make it to the wire. The listener tests test the setting logic.
"""
inter_router_port = None
@staticmethod
def ssl_config(client_server, connection):
return [] # Over-ridden by RouterTestSsl
@classmethod
def setUpClass(cls):
"""Start two routers"""
super(ConnectorSettingsNondefaultTest, cls).setUpClass()
def router(name, client_server, connection):
config = cls.ssl_config(client_server, connection) + [
('router', {'mode': 'interior', 'id': 'QDR.%s' % name}),
('listener', {'port': cls.tester.get_port()}),
connection
]
config = Qdrouterd.Config(config)
cls.routers.append(cls.tester.qdrouterd(name, config, wait=True))
cls.routers = []
inter_router_port = cls.tester.get_port()
router('A', 'server',
('listener', {'role': 'inter-router', 'port': inter_router_port}))
router('B', 'client',
('connector', {'name': 'connectorToA', 'role': 'inter-router', 'port': inter_router_port,
'maxFrameSize': '2048', 'maxSessionFrames': '10', 'maxSessions': '20',
'verifyHostname': 'no'}))
cls.routers[0].wait_router_connected('QDR.B')
cls.routers[1].wait_router_connected('QDR.A')
def test_connector_default(self):
with open('../setUpClass/A.log', 'r') as router_log:
log_lines = router_log.read().split("\n")
open_lines = [s for s in log_lines if "<- @open" in s]
# nondefaults
self.assertTrue(' max-frame-size=2048,' in open_lines[0])
self.assertTrue(' channel-max=19,' in open_lines[0])
begin_lines = [s for s in log_lines if "<- @begin" in s]
# nondefaults
self.assertTrue(" incoming-window=10," in begin_lines[0])
if __name__ == '__main__':
unittest.main(main_module())
| apache-2.0 | -6,041,756,895,692,285,000 | 39.31592 | 127 | 0.604554 | false |
acressity/acressity | narratives/forms.py | 1 | 2019 | from datetime import date
from django import forms
from narratives.models import Narrative
from django.forms.extras.widgets import SelectDateWidget
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from django.http import HttpResponse
from django.core.exceptions import PermissionDenied
TRANSFER_ACTION_CHOICES = (
('', '-----'),
(1, _('Transfer')),
(2, _('Copy')),
)
class NarrativeForm(forms.ModelForm):
date_created = forms.DateField(widget=SelectDateWidget(years=range(timezone.now().year, timezone.now().year - 110, -1)), required=False)
title = forms.CharField(widget=forms.TextInput(attrs={'class': 'larger', 'onfocus': 'if($(this).val()==this.defaultValue){$(this).val("")};', 'onblur': 'if($(this).val()==""){$(this).val(this.defaultValue)};'})) # default value moved to views.py
class Meta:
model = Narrative
exclude = ('gallery', 'author')
def __init__(self, *args, **kwargs):
self.author = kwargs.pop('author', None)
super(NarrativeForm, self).__init__(*args, **kwargs)
self.fields['experience'].queryset = self.author.experiences.all()
def save(self, commit=True):
instance = super(NarrativeForm, self).save(commit=False)
if self.author:
instance.author = self.author
if commit:
instance.save()
return instance
def clean_date_created(self):
date_created = self.cleaned_data.get('date_created')
if not date_created:
date_created = timezone.now()
return date_created
def clean_body(self):
body = self.cleaned_data.get('body')
if len(body) < 3:
raise forms.ValidationError('The narrative body needs a little more extrapolation')
return body
class NarrativeTransferForm(forms.ModelForm):
potential_actions = forms.ChoiceField(choices=TRANSFER_ACTION_CHOICES, required=False)
class Meta:
model = Narrative
fields = ('title',)
| gpl-3.0 | -5,569,418,809,155,219,000 | 34.421053 | 250 | 0.65577 | false |
rigetticomputing/grove | grove/tomography/state_tomography.py | 1 | 11664 | ##############################################################################
# Copyright 2017-2018 Rigetti Computing
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
import logging
import numpy as np
import matplotlib.pyplot as plt
from pyquil.quilbase import Pragma
from scipy.sparse import csr_matrix, coo_matrix
from pyquil.quil import Program
import grove.tomography.operator_utils
from grove.tomography.tomography import TomographyBase, TomographySettings, DEFAULT_SOLVER_KWARGS
from grove.tomography import tomography
import grove.tomography.utils as ut
import grove.tomography.operator_utils as o_ut
_log = logging.getLogger(__name__)
qt = ut.import_qutip()
cvxpy = ut.import_cvxpy()
UNIT_TRACE = 'unit_trace'
POSITIVE = 'positive'
DEFAULT_STATE_TOMO_SETTINGS = TomographySettings(
constraints={UNIT_TRACE},
solver_kwargs=DEFAULT_SOLVER_KWARGS
)
def _prepare_c_jk_m(readout_povm, pauli_basis, channel_ops):
"""
Prepare the coefficient matrix for state tomography. This function uses sparse matrices
for much greater efficiency.
The coefficient matrix is defined as:
.. math::
C_{(jk)m} = \tr{\Pi_{s_j} \Lambda_k(P_m)} = \sum_{r}\pi_{jr}(\mathcal{R}_{k})_{rm}
where :math:`\Lambda_k(\cdot)` is the quantum map corresponding to the k-th pre-measurement
channel, i.e., :math:`\Lambda_k(\rho) = E_k \rho E_k^\dagger` where :math:`E_k` is the k-th
channel operator. This map can also be represented via its transfer matrix
:math:`\mathcal{R}_{k}`. In that case one also requires the overlap between the (generalized)
Pauli basis ops and the projection operators
:math:`\pi_{jl}:=\sbraket{\Pi_j}{P_l} = \tr{\Pi_j P_l}`.
See the grove documentation on tomography for detailed information.
:param DiagonalPOVM readout_povm: The POVM corresponding to the readout plus classifier.
:param OperatorBasis pauli_basis: The (generalized) Pauli basis employed in the estimation.
:param list channel_ops: The pre-measurement channel operators as `qutip.Qobj`
:return: The coefficient matrix necessary to set up the binomial state tomography problem.
:rtype: scipy.sparse.csr_matrix
"""
channel_transfer_matrices = [pauli_basis.transfer_matrix(qt.to_super(ek)) for ek in channel_ops]
# This bit could be more efficient but does not run super long and is thus preserved for
# readability.
pi_jr = csr_matrix(
[pauli_basis.project_op(n_j).toarray().ravel()
for n_j in readout_povm.ops])
# Dict used for constructing our sparse matrix, keys are tuples (row_index, col_index), values
# are the non-zero elements of the final matrix.
c_jk_m_elms = {}
# This explicitly exploits the sparsity of all operators involved
for k in range(len(channel_ops)):
pi_jr__rk_rm = (pi_jr * channel_transfer_matrices[k]).tocoo()
for (j, m, val) in ut.izip(pi_jr__rk_rm.row, pi_jr__rk_rm.col, pi_jr__rk_rm.data):
# The multi-index (j,k) is enumerated in column-major ordering (like Fortran arrays)
c_jk_m_elms[(j + k * readout_povm.pi_basis.dim, m)] = val.real
# create sparse matrix from COO-format (see scipy.sparse docs)
_keys, _values = ut.izip(*c_jk_m_elms.items())
_rows, _cols = ut.izip(*_keys)
c_jk_m = coo_matrix((list(_values), (list(_rows), list(_cols))),
shape=(readout_povm.pi_basis.dim * len(channel_ops),
pauli_basis.dim)).tocsr()
return c_jk_m
class StateTomography(TomographyBase):
"""
A StateTomography object encapsulates the result of quantum state estimation from tomographic
data. It provides convenience functions for visualization and computing state fidelities.
"""
__tomography_type__ = "STATE"
@staticmethod
def estimate_from_ssr(histograms, readout_povm, channel_ops, settings):
"""
Estimate a density matrix from single shot histograms obtained by measuring bitstrings in
the Z-eigenbasis after application of given channel operators.
:param numpy.ndarray histograms: The single shot histograms, `shape=(n_channels, dim)`.
:param DiagognalPOVM readout_povm: The POVM corresponding to the readout plus classifier.
:param list channel_ops: The tomography measurement channels as `qutip.Qobj`'s.
:param TomographySettings settings: The solver and estimation settings.
:return: The generated StateTomography object.
:rtype: StateTomography
"""
nqc = len(channel_ops[0].dims[0])
pauli_basis = grove.tomography.operator_utils.PAULI_BASIS ** nqc
pi_basis = readout_povm.pi_basis
if not histograms.shape[1] == pi_basis.dim: # pragma no coverage
raise ValueError("Currently tomography is only implemented for two-level systems.")
# prepare the log-likelihood function parameters, see documentation
n_kj = np.asarray(histograms)
c_jk_m = _prepare_c_jk_m(readout_povm, pauli_basis, channel_ops)
rho_m = cvxpy.Variable(pauli_basis.dim)
p_jk = c_jk_m * rho_m
obj = -n_kj.ravel() * cvxpy.log(p_jk)
p_jk_mat = cvxpy.reshape(p_jk, pi_basis.dim, len(channel_ops)) # cvxpy has col-major order
# Default constraints:
# MLE must describe valid probability distribution
# i.e., for each k, p_jk must sum to one and be element-wise non-negative:
# 1. \sum_j p_jk == 1 for all k
# 2. p_jk >= 0 for all j, k
# where p_jk = \sum_m c_jk_m rho_m
constraints = [
p_jk >= 0,
np.matrix(np.ones((1, pi_basis.dim))) * p_jk_mat == 1,
]
rho_m_real_imag = sum((rm * o_ut.to_realimag(Pm)
for (rm, Pm) in ut.izip(rho_m, pauli_basis.ops)), 0)
if POSITIVE in settings.constraints:
if tomography._SDP_SOLVER.is_functional():
constraints.append(rho_m_real_imag >> 0)
else: # pragma no coverage
_log.warning("No convex solver capable of semi-definite problems installed.\n"
"Dropping the positivity constraint on the density matrix.")
if UNIT_TRACE in settings.constraints:
# this assumes that the first element of the Pauli basis is always proportional to
# the identity
constraints.append(rho_m[0, 0] == 1. / pauli_basis.ops[0].tr().real)
prob = cvxpy.Problem(cvxpy.Minimize(obj), constraints)
_log.info("Starting convex solver")
prob.solve(solver=tomography.SOLVER, **settings.solver_kwargs)
if prob.status != cvxpy.OPTIMAL: # pragma no coverage
_log.warning("Problem did not converge to optimal solution. "
"Solver settings: {}".format(settings.solver_kwargs))
return StateTomography(np.array(rho_m.value).ravel(), pauli_basis, settings)
def __init__(self, rho_coeffs, pauli_basis, settings):
"""
Construct a StateTomography to encapsulate the result of estimating the quantum state from
a quantum tomography measurement.
:param numpy.ndarray r_est: The estimated quantum state represented in a given (generalized)
Pauli basis.
:param OperatorBasis pauli_basis: The employed (generalized) Pauli basis.
:param TomographySettings settings: The settings used to estimate the state.
"""
self.rho_coeffs = rho_coeffs
self.pauli_basis = pauli_basis
self.rho_est = sum((r_m * p_m for r_m, p_m in ut.izip(rho_coeffs, pauli_basis.ops)))
self.settings = settings
def fidelity(self, other):
"""
Compute the quantum state fidelity of the estimated state with another state.
:param qutip.Qobj other: The other quantum state.
:return: The fidelity, a real number between 0 and 1.
:rtype: float
"""
return qt.fidelity(self.rho_est, other)
def plot_state_histogram(self, ax):
"""
Visualize the complex matrix elements of the estimated state.
:param matplotlib.Axes ax: A matplotlib Axes object to plot into.
"""
title = "Estimated state"
nqc = int(round(np.log2(self.rho_est.data.shape[0])))
labels = ut.basis_labels(nqc)
return ut.state_histogram(self.rho_est, ax, title)
def plot(self):
"""
Visualize the state.
:return: The generated figure.
:rtype: matplotlib.Figure
"""
width = 10
# The pleasing golden ratio.
height = width / 1.618
f = plt.figure(figsize=(width, height))
ax = f.add_subplot(111, projection="3d")
self.plot_state_histogram(ax)
return f
def state_tomography_programs(state_prep, qubits=None,
rotation_generator=tomography.default_rotations):
"""
Yield tomographic sequences that prepare a state with Quil program `state_prep` and then append
tomographic rotations on the specified `qubits`. If `qubits is None`, it assumes all qubits in
the program should be tomographically rotated.
:param Program state_prep: The program to prepare the state to be tomographed.
:param list|NoneType qubits: A list of Qubits or Numbers, to perform the tomography on. If
`None`, performs it on all in state_prep.
:param generator rotation_generator: A generator that yields tomography rotations to perform.
:return: Program for state tomography.
:rtype: Program
"""
if qubits is None:
qubits = state_prep.get_qubits()
for tomography_program in rotation_generator(*qubits):
state_tomography_program = Program(Pragma("PRESERVE_BLOCK"))
state_tomography_program.inst(state_prep)
state_tomography_program.inst(tomography_program)
state_tomography_program.inst(Pragma("END_PRESERVE_BLOCK"))
yield state_tomography_program
def do_state_tomography(preparation_program, nsamples, cxn, qubits=None, use_run=False):
"""
Method to perform both a QPU and QVM state tomography, and use the latter as
as reference to calculate the fidelity of the former.
:param Program preparation_program: Program to execute.
:param int nsamples: Number of samples to take for the program.
:param QVMConnection|QPUConnection cxn: Connection on which to run the program.
:param list qubits: List of qubits for the program.
to use in the tomography analysis.
:param bool use_run: If ``True``, use append measurements on all qubits and use ``cxn.run``
instead of ``cxn.run_and_measure``.
:return: The state tomogram.
:rtype: StateTomography
"""
return tomography._do_tomography(preparation_program, nsamples, cxn, qubits,
tomography.MAX_QUBITS_STATE_TOMO,
StateTomography, state_tomography_programs,
DEFAULT_STATE_TOMO_SETTINGS, use_run=use_run)
| apache-2.0 | -8,882,595,617,667,638,000 | 43.015094 | 100 | 0.650034 | false |
ecohealthalliance/eidr-connect | .scripts/utils.py | 1 | 1603 | import re
import requests
import os
import functools
import json
try:
from functools import lru_cache
except ImportError:
from backports.functools_lru_cache import lru_cache
GRITS_URL = os.environ.get("GRITS_URL", "https://grits.eha.io")
def clean(s):
return re.sub(r"\s+", " ", s).strip()
def clean_disease_name(s):
# Modifiers that make case counts more specific need to be treated
# specially because constraining counts for the general disease cannot be
# created from them.
# s = re.sub(r"^(Highly Pathogenic|Virulent|Suspected)", "", s, re.I)
# s = re.sub(" Serotype .+$", "", s, re.I)
# Remove hyphens
s = re.sub(r"\-", "", s)
s = re.sub(r"\(.*\)", "", s)
s = re.sub(r"\[.*\]", "", s)
return clean(s)
@lru_cache()
def lookup_geoname(name):
resp = requests.get(GRITS_URL + "/api/geoname_lookup/api/lookup", params={
"q": name
})
result = json.loads(resp.text)["hits"][0]["_source"]
del result["alternateNames"]
del result["rawNames"]
del result["asciiName"]
del result["cc2"]
del result["elevation"]
del result["dem"]
del result["timezone"]
del result["modificationDate"]
return result
@lru_cache()
def lookup_disease(name):
if len(name) == 0:
return None
resp = requests.get(GRITS_URL + "/api/v1/disease_ontology/lookup", params={
"q": name
})
result = resp.json()
first_result = next(iter(result["result"]), None)
if first_result:
return {
"id": first_result["id"],
"text": first_result["label"]
} | apache-2.0 | -169,896,742,788,247,500 | 24.870968 | 79 | 0.601996 | false |
kevin-intel/scikit-learn | sklearn/datasets/_openml.py | 2 | 34451 | import gzip
import json
import os
import shutil
import hashlib
from os.path import join
from warnings import warn
from contextlib import closing
from functools import wraps
from typing import Callable, Optional, Dict, Tuple, List, Any, Union
import itertools
from collections.abc import Generator
from collections import OrderedDict
from functools import partial
from urllib.request import urlopen, Request
import numpy as np
import scipy.sparse
from ..externals import _arff
from ..externals._arff import ArffSparseDataType, ArffContainerType
from . import get_data_home
from urllib.error import HTTPError
from ..utils import Bunch
from ..utils import is_scalar_nan
from ..utils import get_chunk_n_rows
from ..utils import _chunk_generator
from ..utils import check_pandas_support # noqa
__all__ = ['fetch_openml']
_OPENML_PREFIX = "https://openml.org/"
_SEARCH_NAME = "api/v1/json/data/list/data_name/{}/limit/2"
_DATA_INFO = "api/v1/json/data/{}"
_DATA_FEATURES = "api/v1/json/data/features/{}"
_DATA_QUALITIES = "api/v1/json/data/qualities/{}"
_DATA_FILE = "data/v1/download/{}"
OpenmlQualitiesType = List[Dict[str, str]]
OpenmlFeaturesType = List[Dict[str, str]]
def _get_local_path(openml_path: str, data_home: str) -> str:
return os.path.join(data_home, 'openml.org', openml_path + ".gz")
def _retry_with_clean_cache(
openml_path: str, data_home: Optional[str]
) -> Callable:
"""If the first call to the decorated function fails, the local cached
file is removed, and the function is called again. If ``data_home`` is
``None``, then the function is called once.
"""
def decorator(f):
@wraps(f)
def wrapper(*args, **kw):
if data_home is None:
return f(*args, **kw)
try:
return f(*args, **kw)
except HTTPError:
raise
except Exception:
warn("Invalid cache, redownloading file", RuntimeWarning)
local_path = _get_local_path(openml_path, data_home)
if os.path.exists(local_path):
os.unlink(local_path)
return f(*args, **kw)
return wrapper
return decorator
def _open_openml_url(openml_path: str, data_home: Optional[str]):
"""
Returns a resource from OpenML.org. Caches it to data_home if required.
Parameters
----------
openml_path : str
OpenML URL that will be accessed. This will be prefixes with
_OPENML_PREFIX
data_home : str
Directory to which the files will be cached. If None, no caching will
be applied.
Returns
-------
result : stream
A stream to the OpenML resource
"""
def is_gzip_encoded(_fsrc):
return _fsrc.info().get('Content-Encoding', '') == 'gzip'
req = Request(_OPENML_PREFIX + openml_path)
req.add_header('Accept-encoding', 'gzip')
if data_home is None:
fsrc = urlopen(req)
if is_gzip_encoded(fsrc):
return gzip.GzipFile(fileobj=fsrc, mode='rb')
return fsrc
local_path = _get_local_path(openml_path, data_home)
if not os.path.exists(local_path):
try:
os.makedirs(os.path.dirname(local_path))
except OSError:
# potentially, the directory has been created already
pass
try:
with closing(urlopen(req)) as fsrc:
opener: Callable
if is_gzip_encoded(fsrc):
opener = open
else:
opener = gzip.GzipFile
with opener(local_path, 'wb') as fdst:
shutil.copyfileobj(fsrc, fdst)
except Exception:
if os.path.exists(local_path):
os.unlink(local_path)
raise
# XXX: First time, decompression will not be necessary (by using fsrc), but
# it will happen nonetheless
return gzip.GzipFile(local_path, 'rb')
class OpenMLError(ValueError):
"""HTTP 412 is a specific OpenML error code, indicating a generic error"""
pass
def _get_json_content_from_openml_api(
url: str,
error_message: Optional[str],
data_home: Optional[str]
) -> Dict:
"""
Loads json data from the openml api
Parameters
----------
url : str
The URL to load from. Should be an official OpenML endpoint
error_message : str or None
The error message to raise if an acceptable OpenML error is thrown
(acceptable error is, e.g., data id not found. Other errors, like 404's
will throw the native error message)
data_home : str or None
Location to cache the response. None if no cache is required.
Returns
-------
json_data : json
the json result from the OpenML server if the call was successful.
An exception otherwise.
"""
@_retry_with_clean_cache(url, data_home)
def _load_json():
with closing(_open_openml_url(url, data_home)) as response:
return json.loads(response.read().decode("utf-8"))
try:
return _load_json()
except HTTPError as error:
# 412 is an OpenML specific error code, indicating a generic error
# (e.g., data not found)
if error.code != 412:
raise error
# 412 error, not in except for nicer traceback
raise OpenMLError(error_message)
def _split_sparse_columns(
arff_data: ArffSparseDataType, include_columns: List
) -> ArffSparseDataType:
"""
obtains several columns from sparse arff representation. Additionally, the
column indices are re-labelled, given the columns that are not included.
(e.g., when including [1, 2, 3], the columns will be relabelled to
[0, 1, 2])
Parameters
----------
arff_data : tuple
A tuple of three lists of equal size; first list indicating the value,
second the x coordinate and the third the y coordinate.
include_columns : list
A list of columns to include.
Returns
-------
arff_data_new : tuple
Subset of arff data with only the include columns indicated by the
include_columns argument.
"""
arff_data_new: ArffSparseDataType = (list(), list(), list())
reindexed_columns = {column_idx: array_idx for array_idx, column_idx
in enumerate(include_columns)}
for val, row_idx, col_idx in zip(arff_data[0], arff_data[1], arff_data[2]):
if col_idx in include_columns:
arff_data_new[0].append(val)
arff_data_new[1].append(row_idx)
arff_data_new[2].append(reindexed_columns[col_idx])
return arff_data_new
def _sparse_data_to_array(
arff_data: ArffSparseDataType, include_columns: List
) -> np.ndarray:
# turns the sparse data back into an array (can't use toarray() function,
# as this does only work on numeric data)
num_obs = max(arff_data[1]) + 1
y_shape = (num_obs, len(include_columns))
reindexed_columns = {column_idx: array_idx for array_idx, column_idx
in enumerate(include_columns)}
# TODO: improve for efficiency
y = np.empty(y_shape, dtype=np.float64)
for val, row_idx, col_idx in zip(arff_data[0], arff_data[1], arff_data[2]):
if col_idx in include_columns:
y[row_idx, reindexed_columns[col_idx]] = val
return y
def _convert_arff_data(
arff: ArffContainerType,
col_slice_x: List[int],
col_slice_y: List[int],
shape: Optional[Tuple] = None
) -> Tuple:
"""
converts the arff object into the appropriate matrix type (np.array or
scipy.sparse.csr_matrix) based on the 'data part' (i.e., in the
liac-arff dict, the object from the 'data' key)
Parameters
----------
arff : dict
As obtained from liac-arff object.
col_slice_x : list
The column indices that are sliced from the original array to return
as X data
col_slice_y : list
The column indices that are sliced from the original array to return
as y data
Returns
-------
X : np.array or scipy.sparse.csr_matrix
y : np.array
"""
arff_data = arff['data']
if isinstance(arff_data, Generator):
if shape is None:
raise ValueError(
"shape must be provided when arr['data'] is a Generator"
)
if shape[0] == -1:
count = -1
else:
count = shape[0] * shape[1]
data = np.fromiter(itertools.chain.from_iterable(arff_data),
dtype='float64', count=count)
data = data.reshape(*shape)
X = data[:, col_slice_x]
y = data[:, col_slice_y]
return X, y
elif isinstance(arff_data, tuple):
arff_data_X = _split_sparse_columns(arff_data, col_slice_x)
num_obs = max(arff_data[1]) + 1
X_shape = (num_obs, len(col_slice_x))
X = scipy.sparse.coo_matrix(
(arff_data_X[0], (arff_data_X[1], arff_data_X[2])),
shape=X_shape, dtype=np.float64)
X = X.tocsr()
y = _sparse_data_to_array(arff_data, col_slice_y)
return X, y
else:
# This should never happen
raise ValueError('Unexpected Data Type obtained from arff.')
def _feature_to_dtype(feature: Dict[str, str]):
"""Map feature to dtype for pandas DataFrame
"""
if feature['data_type'] == 'string':
return object
elif feature['data_type'] == 'nominal':
return 'category'
# only numeric, integer, real are left
elif (feature['number_of_missing_values'] != '0' or
feature['data_type'] in ['numeric', 'real']):
# cast to floats when there are any missing values
return np.float64
elif feature['data_type'] == 'integer':
return np.int64
raise ValueError('Unsupported feature: {}'.format(feature))
def _convert_arff_data_dataframe(
arff: ArffContainerType, columns: List, features_dict: Dict[str, Any]
) -> Tuple:
"""Convert the ARFF object into a pandas DataFrame.
Parameters
----------
arff : dict
As obtained from liac-arff object.
columns : list
Columns from dataframe to return.
features_dict : dict
Maps feature name to feature info from openml.
Returns
-------
result : tuple
tuple with the resulting dataframe
"""
pd = check_pandas_support('fetch_openml with as_frame=True')
attributes = OrderedDict(arff['attributes'])
arff_columns = list(attributes)
if not isinstance(arff['data'], Generator):
raise ValueError(
"arff['data'] must be a generator when converting to pd.DataFrame."
)
# calculate chunksize
first_row = next(arff['data'])
first_df = pd.DataFrame([first_row], columns=arff_columns)
row_bytes = first_df.memory_usage(deep=True).sum()
chunksize = get_chunk_n_rows(row_bytes)
# read arff data with chunks
columns_to_keep = [col for col in arff_columns if col in columns]
dfs = []
dfs.append(first_df[columns_to_keep])
for data in _chunk_generator(arff['data'], chunksize):
dfs.append(pd.DataFrame(data, columns=arff_columns)[columns_to_keep])
df = pd.concat(dfs, ignore_index=True)
for column in columns_to_keep:
dtype = _feature_to_dtype(features_dict[column])
if dtype == 'category':
cats_without_missing = [cat for cat in attributes[column]
if cat is not None and
not is_scalar_nan(cat)]
dtype = pd.api.types.CategoricalDtype(cats_without_missing)
df[column] = df[column].astype(dtype, copy=False)
return (df, )
def _get_data_info_by_name(
name: str, version: Union[int, str], data_home: Optional[str]
):
"""
Utilizes the openml dataset listing api to find a dataset by
name/version
OpenML api function:
https://www.openml.org/api_docs#!/data/get_data_list_data_name_data_name
Parameters
----------
name : str
name of the dataset
version : int or str
If version is an integer, the exact name/version will be obtained from
OpenML. If version is a string (value: "active") it will take the first
version from OpenML that is annotated as active. Any other string
values except "active" are treated as integer.
data_home : str or None
Location to cache the response. None if no cache is required.
Returns
-------
first_dataset : json
json representation of the first dataset object that adhired to the
search criteria
"""
if version == "active":
# situation in which we return the oldest active version
url = _SEARCH_NAME.format(name) + "/status/active/"
error_msg = "No active dataset {} found.".format(name)
json_data = _get_json_content_from_openml_api(
url, error_msg, data_home=data_home
)
res = json_data['data']['dataset']
if len(res) > 1:
warn("Multiple active versions of the dataset matching the name"
" {name} exist. Versions may be fundamentally different, "
"returning version"
" {version}.".format(name=name, version=res[0]['version']))
return res[0]
# an integer version has been provided
url = (_SEARCH_NAME + "/data_version/{}").format(name, version)
try:
json_data = _get_json_content_from_openml_api(
url, error_message=None, data_home=data_home
)
except OpenMLError:
# we can do this in 1 function call if OpenML does not require the
# specification of the dataset status (i.e., return datasets with a
# given name / version regardless of active, deactivated, etc. )
# TODO: feature request OpenML.
url += "/status/deactivated"
error_msg = "Dataset {} with version {} not found.".format(name,
version)
json_data = _get_json_content_from_openml_api(
url, error_msg, data_home=data_home
)
return json_data['data']['dataset'][0]
def _get_data_description_by_id(
data_id: int, data_home: Optional[str]
) -> Dict[str, Any]:
# OpenML API function: https://www.openml.org/api_docs#!/data/get_data_id
url = _DATA_INFO.format(data_id)
error_message = "Dataset with data_id {} not found.".format(data_id)
json_data = _get_json_content_from_openml_api(
url, error_message, data_home=data_home
)
return json_data['data_set_description']
def _get_data_features(
data_id: int, data_home: Optional[str]
) -> OpenmlFeaturesType:
# OpenML function:
# https://www.openml.org/api_docs#!/data/get_data_features_id
url = _DATA_FEATURES.format(data_id)
error_message = "Dataset with data_id {} not found.".format(data_id)
json_data = _get_json_content_from_openml_api(
url, error_message, data_home=data_home
)
return json_data['data_features']['feature']
def _get_data_qualities(
data_id: int, data_home: Optional[str]
) -> OpenmlQualitiesType:
# OpenML API function:
# https://www.openml.org/api_docs#!/data/get_data_qualities_id
url = _DATA_QUALITIES.format(data_id)
error_message = "Dataset with data_id {} not found.".format(data_id)
json_data = _get_json_content_from_openml_api(
url, error_message, data_home=data_home
)
# the qualities might not be available, but we still try to process
# the data
return json_data.get('data_qualities', {}).get('quality', [])
def _get_num_samples(data_qualities: OpenmlQualitiesType) -> int:
"""Get the number of samples from data qualities.
Parameters
----------
data_qualities : list of dict
Used to retrieve the number of instances (samples) in the dataset.
Returns
-------
n_samples : int
The number of samples in the dataset or -1 if data qualities are
unavailable.
"""
# If the data qualities are unavailable, we return -1
default_n_samples = -1
qualities = {d['name']: d['value'] for d in data_qualities}
return int(float(qualities.get('NumberOfInstances', default_n_samples)))
def _load_arff_response(
url: str,
data_home: Optional[str],
return_type, encode_nominal: bool,
parse_arff: Callable[[ArffContainerType], Tuple],
md5_checksum: str
) -> Tuple:
"""Load arff data with url and parses arff response with parse_arff"""
response = _open_openml_url(url, data_home)
with closing(response):
# Note that if the data is dense, no reading is done until the data
# generator is iterated.
actual_md5_checksum = hashlib.md5()
def _stream_checksum_generator(response):
for line in response:
actual_md5_checksum.update(line)
yield line.decode('utf-8')
stream = _stream_checksum_generator(response)
arff = _arff.load(stream,
return_type=return_type,
encode_nominal=encode_nominal)
parsed_arff = parse_arff(arff)
# consume remaining stream, if early exited
for _ in stream:
pass
if actual_md5_checksum.hexdigest() != md5_checksum:
raise ValueError("md5 checksum of local file for " + url +
" does not match description. "
"Downloaded file could have been modified / "
"corrupted, clean cache and retry...")
return parsed_arff
def _download_data_to_bunch(
url: str,
sparse: bool,
data_home: Optional[str],
*,
as_frame: bool,
features_list: List,
data_columns: List[int],
target_columns: List,
shape: Optional[Tuple[int, int]],
md5_checksum: str
):
"""Download OpenML ARFF and convert to Bunch of data
"""
# NB: this function is long in order to handle retry for any failure
# during the streaming parse of the ARFF.
# Prepare which columns and data types should be returned for the X and y
features_dict = {feature['name']: feature for feature in features_list}
# XXX: col_slice_y should be all nominal or all numeric
_verify_target_data_type(features_dict, target_columns)
col_slice_y = [int(features_dict[col_name]['index'])
for col_name in target_columns]
col_slice_x = [int(features_dict[col_name]['index'])
for col_name in data_columns]
for col_idx in col_slice_y:
feat = features_list[col_idx]
nr_missing = int(feat['number_of_missing_values'])
if nr_missing > 0:
raise ValueError('Target column {} has {} missing values. '
'Missing values are not supported for target '
'columns. '.format(feat['name'], nr_missing))
# Access an ARFF file on the OpenML server. Documentation:
# https://www.openml.org/api_data_docs#!/data/get_download_id
if sparse is True:
return_type = _arff.COO
else:
return_type = _arff.DENSE_GEN
frame = nominal_attributes = None
parse_arff: Callable
postprocess: Callable
if as_frame:
columns = data_columns + target_columns
parse_arff = partial(_convert_arff_data_dataframe, columns=columns,
features_dict=features_dict)
def postprocess(frame):
X = frame[data_columns]
if len(target_columns) >= 2:
y = frame[target_columns]
elif len(target_columns) == 1:
y = frame[target_columns[0]]
else:
y = None
return X, y, frame, nominal_attributes
else:
def parse_arff(arff):
X, y = _convert_arff_data(arff, col_slice_x, col_slice_y, shape)
# nominal attributes is a dict mapping from the attribute name to
# the possible values. Includes also the target column (which will
# be popped off below, before it will be packed in the Bunch
# object)
nominal_attributes = {k: v for k, v in arff['attributes']
if isinstance(v, list) and
k in data_columns + target_columns}
return X, y, nominal_attributes
def postprocess(X, y, nominal_attributes):
is_classification = {col_name in nominal_attributes
for col_name in target_columns}
if not is_classification:
# No target
pass
elif all(is_classification):
y = np.hstack([
np.take(
np.asarray(nominal_attributes.pop(col_name),
dtype='O'),
y[:, i:i + 1].astype(int, copy=False))
for i, col_name in enumerate(target_columns)
])
elif any(is_classification):
raise ValueError('Mix of nominal and non-nominal targets is '
'not currently supported')
# reshape y back to 1-D array, if there is only 1 target column;
# back to None if there are not target columns
if y.shape[1] == 1:
y = y.reshape((-1,))
elif y.shape[1] == 0:
y = None
return X, y, frame, nominal_attributes
out = _retry_with_clean_cache(url, data_home)(
_load_arff_response)(url, data_home,
return_type=return_type,
encode_nominal=not as_frame,
parse_arff=parse_arff,
md5_checksum=md5_checksum)
X, y, frame, nominal_attributes = postprocess(*out)
return Bunch(data=X, target=y, frame=frame,
categories=nominal_attributes,
feature_names=data_columns,
target_names=target_columns)
def _verify_target_data_type(features_dict, target_columns):
# verifies the data type of the y array in case there are multiple targets
# (throws an error if these targets do not comply with sklearn support)
if not isinstance(target_columns, list):
raise ValueError('target_column should be list, '
'got: %s' % type(target_columns))
found_types = set()
for target_column in target_columns:
if target_column not in features_dict:
raise KeyError('Could not find target_column={}')
if features_dict[target_column]['data_type'] == "numeric":
found_types.add(np.float64)
else:
found_types.add(object)
# note: we compare to a string, not boolean
if features_dict[target_column]['is_ignore'] == 'true':
warn('target_column={} has flag is_ignore.'.format(
target_column))
if features_dict[target_column]['is_row_identifier'] == 'true':
warn('target_column={} has flag is_row_identifier.'.format(
target_column))
if len(found_types) > 1:
raise ValueError('Can only handle homogeneous multi-target datasets, '
'i.e., all targets are either numeric or '
'categorical.')
def _valid_data_column_names(features_list, target_columns):
# logic for determining on which columns can be learned. Note that from the
# OpenML guide follows that columns that have the `is_row_identifier` or
# `is_ignore` flag, these can not be learned on. Also target columns are
# excluded.
valid_data_column_names = []
for feature in features_list:
if (feature['name'] not in target_columns
and feature['is_ignore'] != 'true'
and feature['is_row_identifier'] != 'true'):
valid_data_column_names.append(feature['name'])
return valid_data_column_names
def fetch_openml(
name: Optional[str] = None,
*,
version: Union[str, int] = 'active',
data_id: Optional[int] = None,
data_home: Optional[str] = None,
target_column: Optional[Union[str, List]] = 'default-target',
cache: bool = True,
return_X_y: bool = False,
as_frame: Union[str, bool] = 'auto'
):
"""Fetch dataset from openml by name or dataset id.
Datasets are uniquely identified by either an integer ID or by a
combination of name and version (i.e. there might be multiple
versions of the 'iris' dataset). Please give either name or data_id
(not both). In case a name is given, a version can also be
provided.
Read more in the :ref:`User Guide <openml>`.
.. versionadded:: 0.20
.. note:: EXPERIMENTAL
The API is experimental (particularly the return value structure),
and might have small backward-incompatible changes without notice
or warning in future releases.
Parameters
----------
name : str, default=None
String identifier of the dataset. Note that OpenML can have multiple
datasets with the same name.
version : int or 'active', default='active'
Version of the dataset. Can only be provided if also ``name`` is given.
If 'active' the oldest version that's still active is used. Since
there may be more than one active version of a dataset, and those
versions may fundamentally be different from one another, setting an
exact version is highly recommended.
data_id : int, default=None
OpenML ID of the dataset. The most specific way of retrieving a
dataset. If data_id is not given, name (and potential version) are
used to obtain a dataset.
data_home : str, default=None
Specify another download and cache folder for the data sets. By default
all scikit-learn data is stored in '~/scikit_learn_data' subfolders.
target_column : str, list or None, default='default-target'
Specify the column name in the data to use as target. If
'default-target', the standard target column a stored on the server
is used. If ``None``, all columns are returned as data and the
target is ``None``. If list (of strings), all columns with these names
are returned as multi-target (Note: not all scikit-learn classifiers
can handle all types of multi-output combinations)
cache : bool, default=True
Whether to cache downloaded datasets using joblib.
return_X_y : bool, default=False
If True, returns ``(data, target)`` instead of a Bunch object. See
below for more information about the `data` and `target` objects.
as_frame : bool or 'auto', default='auto'
If True, the data is a pandas DataFrame including columns with
appropriate dtypes (numeric, string or categorical). The target is
a pandas DataFrame or Series depending on the number of target_columns.
The Bunch will contain a ``frame`` attribute with the target and the
data. If ``return_X_y`` is True, then ``(data, target)`` will be pandas
DataFrames or Series as describe above.
If as_frame is 'auto', the data and target will be converted to
DataFrame or Series as if as_frame is set to True, unless the dataset
is stored in sparse format.
.. versionchanged:: 0.24
The default value of `as_frame` changed from `False` to `'auto'`
in 0.24.
Returns
-------
data : :class:`~sklearn.utils.Bunch`
Dictionary-like object, with the following attributes.
data : np.array, scipy.sparse.csr_matrix of floats, or pandas DataFrame
The feature matrix. Categorical features are encoded as ordinals.
target : np.array, pandas Series or DataFrame
The regression target or classification labels, if applicable.
Dtype is float if numeric, and object if categorical. If
``as_frame`` is True, ``target`` is a pandas object.
DESCR : str
The full description of the dataset
feature_names : list
The names of the dataset columns
target_names: list
The names of the target columns
.. versionadded:: 0.22
categories : dict or None
Maps each categorical feature name to a list of values, such
that the value encoded as i is ith in the list. If ``as_frame``
is True, this is None.
details : dict
More metadata from OpenML
frame : pandas DataFrame
Only present when `as_frame=True`. DataFrame with ``data`` and
``target``.
(data, target) : tuple if ``return_X_y`` is True
.. note:: EXPERIMENTAL
This interface is **experimental** and subsequent releases may
change attributes without notice (although there should only be
minor changes to ``data`` and ``target``).
Missing values in the 'data' are represented as NaN's. Missing values
in 'target' are represented as NaN's (numerical target) or None
(categorical target)
"""
if cache is False:
# no caching will be applied
data_home = None
else:
data_home = get_data_home(data_home=data_home)
data_home = join(data_home, 'openml')
# check valid function arguments. data_id XOR (name, version) should be
# provided
if name is not None:
# OpenML is case-insensitive, but the caching mechanism is not
# convert all data names (str) to lower case
name = name.lower()
if data_id is not None:
raise ValueError(
"Dataset data_id={} and name={} passed, but you can only "
"specify a numeric data_id or a name, not "
"both.".format(data_id, name))
data_info = _get_data_info_by_name(name, version, data_home)
data_id = data_info['did']
elif data_id is not None:
# from the previous if statement, it is given that name is None
if version != "active":
raise ValueError(
"Dataset data_id={} and version={} passed, but you can only "
"specify a numeric data_id or a version, not "
"both.".format(data_id, version))
else:
raise ValueError(
"Neither name nor data_id are provided. Please provide name or "
"data_id.")
data_description = _get_data_description_by_id(data_id, data_home)
if data_description['status'] != "active":
warn("Version {} of dataset {} is inactive, meaning that issues have "
"been found in the dataset. Try using a newer version from "
"this URL: {}".format(
data_description['version'],
data_description['name'],
data_description['url']))
if 'error' in data_description:
warn("OpenML registered a problem with the dataset. It might be "
"unusable. Error: {}".format(data_description['error']))
if 'warning' in data_description:
warn("OpenML raised a warning on the dataset. It might be "
"unusable. Warning: {}".format(data_description['warning']))
return_sparse = False
if data_description['format'].lower() == 'sparse_arff':
return_sparse = True
if as_frame == 'auto':
as_frame = not return_sparse
if as_frame and return_sparse:
raise ValueError('Cannot return dataframe with sparse data')
# download data features, meta-info about column types
features_list = _get_data_features(data_id, data_home)
if not as_frame:
for feature in features_list:
if 'true' in (feature['is_ignore'], feature['is_row_identifier']):
continue
if feature['data_type'] == 'string':
raise ValueError('STRING attributes are not supported for '
'array representation. Try as_frame=True')
if target_column == "default-target":
# determines the default target based on the data feature results
# (which is currently more reliable than the data description;
# see issue: https://github.com/openml/OpenML/issues/768)
target_columns = [feature['name'] for feature in features_list
if feature['is_target'] == 'true']
elif isinstance(target_column, str):
# for code-simplicity, make target_column by default a list
target_columns = [target_column]
elif target_column is None:
target_columns = []
elif isinstance(target_column, list):
target_columns = target_column
else:
raise TypeError("Did not recognize type of target_column"
"Should be str, list or None. Got: "
"{}".format(type(target_column)))
data_columns = _valid_data_column_names(features_list,
target_columns)
shape: Optional[Tuple[int, int]]
# determine arff encoding to return
if not return_sparse:
# The shape must include the ignored features to keep the right indexes
# during the arff data conversion.
data_qualities = _get_data_qualities(data_id, data_home)
shape = _get_num_samples(data_qualities), len(features_list)
else:
shape = None
# obtain the data
url = _DATA_FILE.format(data_description['file_id'])
bunch = _download_data_to_bunch(url, return_sparse, data_home,
as_frame=bool(as_frame),
features_list=features_list, shape=shape,
target_columns=target_columns,
data_columns=data_columns,
md5_checksum=data_description[
"md5_checksum"])
if return_X_y:
return bunch.data, bunch.target
description = "{}\n\nDownloaded from openml.org.".format(
data_description.pop('description'))
bunch.update(
DESCR=description, details=data_description,
url="https://www.openml.org/d/{}".format(data_id))
return bunch
| bsd-3-clause | 66,852,054,886,861,020 | 35.845989 | 79 | 0.604017 | false |
yoazmenda/Hearthstone_deck_builder | run_games.py | 1 | 1724 | import json
from hearthbreaker.agents.basic_agents import RandomAgent
from hearthbreaker.cards.heroes import hero_for_class
from hearthbreaker.constants import CHARACTER_CLASS
from hearthbreaker.engine import Game, Deck, card_lookup
from hearthbreaker.cards import *
import timeit
def load_deck(filename):
cards = []
character_class = CHARACTER_CLASS.MAGE
with open(filename, "r") as deck_file:
contents = deck_file.read()
items = contents.splitlines()
for line in items[0:]:
parts = line.split(" ", 1)
count = int(parts[0])
for i in range(0, count):
card = card_lookup(parts[1])
if card.character_class != CHARACTER_CLASS.ALL:
character_class = card.character_class
cards.append(card)
if len(cards) > 30:
pass
return Deck(cards, hero_for_class(character_class))
def do_stuff():
_count = 0
def play_game():
nonlocal _count
_count += 1
new_game = game.copy()
try:
new_game.start()
except Exception as e:
print(json.dumps(new_game.__to_json__(), default=lambda o: o.__to_json__(), indent=1))
print(new_game._all_cards_played)
raise e
#winner
#print(new_game.players[0].hero.dead)
del new_game
if _count % 1000 == 0:
print("---- game #{} ----".format(_count))
deck1 = load_deck("zoo.hsdeck")
deck2 = load_deck("zoo.hsdeck")
game = Game([deck1, deck2], [RandomAgent(), RandomAgent()])
print(timeit.timeit(play_game, 'gc.enable()', number=2000))
| mit | -2,154,089,804,845,221,400 | 27.733333 | 98 | 0.563805 | false |
so3500/volttron-kafka | CloudAgent/cloudagent/agent.py | 1 | 19234 | import datetime
import logging
import os
import sys
import datetime
import json
from dateutil.parser import parse
import multiprocessing
import ast
import random
# cloud
from pymongo import MongoClient
# kafka
from kafka import KafkaConsumer, KafkaProducer
from kafka.errors import KafkaError
from volttron.platform.vip.agent import Agent, Core, PubSub
from volttron.platform.messaging import topics
from volttron.platform.agent import utils
utils.setup_logging()
_log = logging.getLogger(__name__)
__version__ = "0.2"
# refre agent creation walkthrough
# link : http://volttron.readthedocs.io/en/4.0.1/devguides/agent_development/Agent-Development.html
# refer example agent
# link : http://volttron.readthedocs.io/en/4.0.1/devguides/agent_development/Agent-Configuration-Store.html#example-agent
def cloud_agent(config_path, **kwargs):
'''
Function: Return CloudAgent object with configuration information
Args: Same with Class Args
Returns: CloudAgent object
Note: None
Created: JinhoSon, 2017-04-14
Deleted: .
'''
# get config information
config = utils.load_config(config_path)
source = config.get('source')
destination_ip = config.get('destination_ip')
destination_port = config.get('destination_port')
services_topic_list = config.get('services_topic_list')
database_name = config.get('database_name')
collection_name = config.get('collection_name')
command_topic = config.get('command_topic')
cloud_broker_ip = config.get('cloud_broker_ip')
cloud_broker_port = config.get('cloud_broker_port')
cloud_producer_topic = config.get('cloud_producer_topic')
cloud_consumer_topic = config.get('cloud_consumer_topic')
if 'all' in services_topic_list:
services_topic_list = [topics.DRIVER_TOPIC_BASE, topics.LOGGER_BASE,
topics.ACTUATOR, topics.ANALYSIS_TOPIC_BASE]
return CloudAgent(source,
destination_ip,
destination_port,
services_topic_list,
database_name,
collection_name,
command_topic,
cloud_broker_ip,
cloud_broker_port,
cloud_producer_topic,
cloud_consumer_topic,
**kwargs)
class CloudAgent(Agent):
'''
----------------------------------------------------------------------------
Agent summary
----------------------------------------------------------------------------
Name: CloudAgent
Version: 0.2
Function:
1. Subscribe data from message bus
2. Send device data to Cloud(MongoDB)
3. Send command history to Cloud(MongoDB)
4. Send message(command) to Cloud(Kafka consumer)
5. Receive message(command) from Cloud(Kafka producer)
6. Publish data to message bus(test for command)
7. Command to device point using RPC
Args:
source (str): zone name
destination_ip (str): MongoDB server ip in Cloud
destination_port (str): MongoDB server port in Cloud
services_topic_list (list): Topic Data sended to MongoDB server in Cloud
database_name (str): MongoDB database name(like database)
collection_name (str): MongoDB collection name(like table)
command_topic (str): When CloudAgent receives a command from another agent,
the topic that is uesd when other agents publih to MessageBus
cloud_broker_ip (str): Kafka Broker ip in Cloud
cloud_broker_port (str): Kafka Broker port in Cloud
cloud_producer_topic (str): Topic for messaging(commanding) from Cloud to VOLTTRON
cloud_consumer_topic (str): Topic for messaging(commanding) from VOLTTRON to Cloud
Returns:
None
Note:
Version 0.1: Add - Function 1, 2
Version 0.2: Add - Function 3, 4, 5, 6, 7
'''
'''
History
=====
Create '__init__' (by JinhoSon, 2017-04-14)
Create 'post_data' (by SungonLee, 2017-04-14)
Create 'on_message_topic' (by SungonLee, 2017-04-20)
Create 'subscriber' (by SungonLee, 2017-04-20)
Create 'actuate_something' (by SungonLee, 2017-07-20)
Create 'publish_command' (by SungonLee, 2017-09-10)
Create 'command_to_cloud' (by SungonLee, 2017-09-10)
Modify '__init__' (by SungonLee, 2017-09-20)
Create 'command_to_cloud_' (by SungonLee, 2017-09-20)
Delete 'command_to_cloud_' (by SungonLee, 2017-09-23)
'''
def __init__(self, source,
destination_ip,
destination_port,
services_topic_list,
database_name,
collection_name,
command_topic,
cloud_broker_ip,
cloud_broker_port,
cloud_producer_topic,
cloud_consumer_topic,
**kwargs):
'''
Function:
1. initiallizing the configuration information
2. Create Connection with MongoDB server, Kafka Consumer, Kafka Producer in Cloud
Args: Same with Class Args
Returns: None
Note:
self.connection: connection with MongoDB in Cloud
self.consumer: connection with kafka consumer in Cloud
self.producer: connection with kafka producer in Cloud
Created: JinhoSon, 2017-04-14
Modified: SungonLee, 2017-09-20
Deleted: .
'''
super(CloudAgent, self).__init__(**kwargs)
# set config info
self.source = source
self.destination_ip = destination_ip
self.destination_port = destination_port
self.services_topic_list = services_topic_list
self.database_name = database_name
self.collection_name = collection_name
self.command_topic = command_topic
self.cloud_broker_ip = cloud_broker_ip
self.cloud_broker_port = cloud_broker_port
self.cloud_producer_topic = cloud_producer_topic
self.cloud_consumer_topic = cloud_consumer_topic
self.default_config = {"source": source,
"destination_ip": destination_ip,
"destination_port": destination_port,
"services_topic_list": services_topic_list,
"database_name": database_name,
"collection_name": collection_name,
"command_topic": command_topic,
"cloud_broker_ip": cloud_broker_ip,
"cloud_broker_port": cloud_broker_port,
"cloud_producer_topic": cloud_producer_topic,
"cloud_consumer_topic": cloud_consumer_topic
}
_log.info('default_config: {}'.format(self.default_config))
self.vip.config.set_default("config", self.default_config)
# setting up callback_method for configuration store interface
self.vip.config.subscribe(self.configure_new, actions="NEW", pattern="cloud/*")
self.vip.config.subscribe(self.configure_update, actions=["UPDATE",], pattern="cloud/*")
self.vip.config.subscribe(self.configure_delete, actions=["DELETE",], pattern="cloud/*")
self.new_value_ = 0
# connect with local(or remote) mongodb
self.connection = MongoClient(self.destination_ip, int(self.destination_port))
self.db = self.connection[str(self.database_name)]
self.collection = self.db[str(self.collection_name)]
# kafka
self.cloud_producer_addr = '{0}:{1}'.format(self.cloud_broker_ip, self.cloud_broker_port)
self.consumer = KafkaConsumer(bootstrap_servers=[self.cloud_producer_addr])
self.consumer.subscribe([self.cloud_producer_topic])
# kafak producer - command volttron to cloud
# produce json messages
self.cloud_consumer_addr = '{0}:{1}'.format(self.cloud_broker_ip, self.cloud_broker_port)
self.producer = KafkaProducer(bootstrap_servers=[self.cloud_consumer_addr],
value_serializer=lambda v: json.dumps(v).encode('utf-8')
)
# configuration callbacks
# lnke : http://volttron.readthedocs.io/en/4.0.1/devguides/agent_development/Agent-Configuration-Store.html
# Ensure that we use default values from anything missing in the configuration
def configure_new(self, config_name, action, contents):
_log.debug("configure_new")
config = self.default_config.copy()
config.update(contents)
# update cloud agent config
def configure_update(self, config_name, action, contents):
_log.debug("configure_update")
# delete cloud agent config
def configure_delete(self, config_name, action, contents):
_log.debug("configure_delete")
def post_data(self, peer=None, sender=None, bus=None, topic=None, headers=None, message=None):
'''
Function: Send device data to Cloud(MongoDB).
Args:
peer: the ZMQ identity of the bus owner sender is identity of the publishing peer
sender: identity of agent publishing messages to messagebus
bus:
topic: the full message topic
headers: case-insensitive dictionary (mapping) of message headers
message: possibly empty list of message parts
Returns: None
Note:
callback method for subscribing.
subscribe message topic: actuator, record, datalogger and device topics send data to MongoDB(Cloud or Local)
Created: JinhoSon, 2017-04-14
Modified: SungonLee, 2017-5-20
Deleted: .
'''
try:
_log.info('Post_data: subscribe from message bus, topic:{0}, message:{1}, sender:{2}'
.format(topic ,message, sender, bus))
post = {
'author': 'volttron.cloudagnet',
'source': self.source,
'date': str(datetime.datetime.now()),
'topic': topic,
'headers': headers,
'message': message,
}
post_id = self.collection.insert(post)
_log.debug('mongodb insertion success topic : {}, message : {}'
.format(topic, message))
except Exception as e:
_log.error('Post_data: {}'.format(e))
def command_to_cloud(self, peer, sender, bus, topic, headers, message):
'''
Function:
Send Command to Cloud.
Send Command history to Cloud(MongoDB).
Args: Same with 'post_data'
Returns: None
Note:
Callback method for subscribing.
Subscribe message topic: 'command-to-cloud' send command to cloud,
producer(CloudAgent)-> kafka broker(Cloud) -> consumer(Cloud)
Created: SungonLee, 2017-09-10
Deleted: .
'''
try:
_log.info('Command_to_cloud: subscribe from message bus, topic:{0}, message:{1}, sender:{2}'
.format(topic ,message, sender, bus))
new_value = message[0]
msg = {'from': 'CloudAgent', 'to':'Cloud'
,'message': 'message from VOLTTRON to Cloud', 'new_value': new_value}
# Send command to Consumer(in Cloud)
self.producer.send(self.cloud_consumer_topic, msg)
# Send command data to MongoDB(in Cloud)
self.post_data(topic=self.cloud_consumer_topic, message=msg)
except Exception as e:
_log.error('Command_to_cloud: {}'.format(e))
@Core.receiver("onstart")
def on_message_topic(self, sender, **kwargs):
'''
Function: Resister callback method for sending data(device data, command history) to Cloud(MongoDB).
Args: .
Returns: None
Note:
This method is executed after '__init__' method.
Subscribes to the platform message bus on the actuator, record, datalogger, and device topics.
Created: JinhoSon, 2017-04-14
Modified: SungonLee, 2017-05-20
Deleted: .
'''
_log.debug("sender {}, Kwargs {}".format(sender, kwargs))
# Define method for resistering callback method
def subscriber(subscription, callback_method):
'''
Args:
subscription: topic (e.g. "devices/fake-campus/fake-building/fake-device/PowerState")
callback_method: method resistered
Note:
callback_mothod: 'post_data', 'command_to_cloud'
'''
_log.debug("Subscribing to topic : {}".format(subscription))
self.vip.pubsub.subscribe(peer='pubsub',
prefix=subscription,
callback=callback_method)
# Resister callback method with 'subscriber'
for topic_subscriptions in self.services_topic_list:
subscriber(topic_subscriptions, self.post_data)
subscriber(self.command_topic, self.command_to_cloud)
@Core.periodic(1)
def actuate_something(self):
'''
Function:
Receive message(command) from Cloud(Kafka broker).
Use RPC to set device point value with message infomation.
Args: None
Returns: None
Note: None
Created: SungonLee, 2017-07-20
Modified: SungonLee, 2017-09-20
Deleted: .
'''
# partition type : nametuple
# if timeout_ms is 0, check that is there any message in broker imm
partition = self.consumer.poll(timeout_ms=0, max_records=None)
try:
if len(partition) > 0:
for p in partition:
for response in partition[p]:
# convert string to dictionary
response_dict = ast.literal_eval(response.value)
_log.info('Actuate_something: Receive message from cloud message: {}, new_value: {}'
.format(response_dict, response_dict['new_value']))
new_value = response_dict['new_value']
device_point = response_dict['device_point']
# Use RPC to get point-value in device
result = self.vip.rpc.call(
'platform.actuator',
'get_point',
device_point
).get(timeout=10)
_log.info("Actuate_something: Reading Before commmand: {}".format(result))
# Use RPC to set point-value in device
result = self.vip.rpc.call(
'platform.actuator',
'set_point',
self.core.identity,
device_point,
new_value,
).get(timeout=10)
_log.info("Actuate_something: Reading After command: {}".format(result))
# Send command data to MongoDB(in Cloud)
msg = {'from': 'Cloud',
'to':'CloudAgent',
'message': 'message from Cloud to VOLTTRON',
'device_point': device_point,
'new_value': new_value}
self.post_data(topic=self.cloud_producer_topic, message=msg)
else:
_log.info('Actuate_something: Not receive command from cloud')
except Exception as e:
_log.error('Actuate_something: {}'.format(e))
@Core.periodic(5)
def publish_command(self):
'''
Function:
Publish message(command) to MessageBus(VOLTTRON)
after that CloudAgent subscribes this message from MessageBus
after that CloudAgent sends this message(command) to Cloud.
Args: .
Returns: None
Note:
Test method for publishing example message to MessageBus.
Publish message(command) to MessageBus(VOLTTRON) with topic in config file 'command_topic'.
Period for Publishing message can be exchanged(current 5s).
Created: SungonLee, 2017-09-20
Deleted: .
'''
try:
# Create time, message, value info
headers = {
'date': str(datetime.datetime.now())
}
message = [
self.new_value_,
{
'message': 'message VOLTTRON to Cloud',
'new_value': self.new_value_,
}
]
self.new_value_ += 1
topic = self.command_topic
self.vip.pubsub.publish('pubsub', topic, headers, message)
_log.info('Publish_command: publish to message bus, topic:{0}, new_value_:{1}, message:{2}'
.format(topic ,self.new_value_, 'message VOLTTRON to Cloud'))
except Exception as e:
_log.error('Publish_command: {}'.format(e))
# @Core.periodic(5)
# def command_to_cloud_(self):
# '''
# Function:
# Send message(command) to Cloud(Kafka broker)
#
# Args: .
#
# Returns: None
#
# Note:
# Test method for sending example message to Cloud.
# Period for sending message can be exchanged(current 5s).
#
# Created: SungonLee, 2017-09-20
# Deleted: SungonLee, 2017-09-23
# '''
# try:
# new_value = random.randrange(200, 300)
# msg = {'title': 'cloud-title', 'message': 'volttron_to_cloud', 'new_value': new_value}
# # j_msg = json.dumps(msg)
# # print('mag: {}\nj_msg: {}\n\n'.format(msg, j_msg))
# _log.info('Command msg: {}\n'.format(msg))
# # sent('topic', value)
# self.producer.send('cloud-topic', msg)
#
# except Exception as e:
# _log.error('Command_to_cloud: {}'.format(e))
def main(argv=sys.argv):
'''Main method called to start the agent.'''
utils.vip_main(cloud_agent, identity='cloudagent',
version=__version__)
if __name__ == '__main__':
# Entry point for script
try:
sys.exit(main())
except KeyboardInterrupt:
pass
| mit | 2,953,979,375,699,708,400 | 38.014199 | 124 | 0.547728 | false |
donlorenzo/AdvancedConfigParser | src/AdvancedConfigParser.py | 1 | 17728 | # -*- coding: utf-8 -*-
# Copyright (c) 2010, 2014 Lorenz Quack
# This code is released under the MIT License:
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
u"""
AdvancedConfigParser
parse config files written in a .ini-file like style.
In addition to ini files this module has the following advanced features:
* arbitrarily nested subsections
* various (nested) types including int, float, str, list, dict
* various calculations in values
* refer to other keys in values
Example:
global_var = True
[Section_1]
pi = 3.141
[[Sub_Sec_1]]
tau = 2 * pi
[whatever]
foo = [Section_1.pi, Section_1.Sub_Section_1.tau, global_var]
bar = max(foo)
baz = foo if Section_1.pi < 2**2 < Section_1.Sub_Sec_1.tau/2 or True else bar
Configuration can be loaded from strings (parse_string()),
files (parse_file()) or file-like objects (parse_stream()).
Access to the sections and options is done by attribute access:
>>> config = AdvancedConfigParser.parse_string("filename")
>>> print(config.global_var)
>>> print(config.Section_1.pi + config.whatever.bar)
"""
try:
import __builtin__ as builtins
except ImportError:
import builtins
import io
import re
import ast
import operator
from ast_to_src import ast_to_src
def parse_file(filename):
with open(filename) as f:
return parse_stream(f)
def parse_string(s):
return parse_stream(io.StringIO(s))
def parse_stream(stream):
"""
parse the stream into a hirarchical tree of (sub-)sections and options.
return the root/global section.
"""
root = current_section = Section()
current_section._acp_name = "<global>"
current_nesting_level = 0
line = 0
while True:
buf = ""
tmp = stream.readline()
line += 1
if tmp == "":
break
buf += tmp
stripped_buf = buf.strip()
# preserve empty lines
if not stripped_buf:
current_section._acp_add_empty_line()
# ignore comments
elif stripped_buf.startswith("#"):
current_section._acp_add_comment(stripped_buf)
# handle section header
elif stripped_buf.startswith("["):
result = re.match(r"(\[+)([^\d\W]\w*)(\]+)", stripped_buf)
if result is None:
msg = "malformed section header in line {line}:\n{tmp}"
raise SyntaxError(msg.format(**locals()))
if len(result.group(1)) != len(result.group(3)):
msg = "section braket mismatch in line {line}:\n{tmp}"
raise SyntaxError(msg.format(**locals()))
level = min(len(result.group(1)), len(result.group(3)))
if level > current_nesting_level + 1:
msg = "wrong section nesting in line {line}"
raise SyntaxError(msg.format(**locals()))
while current_nesting_level >= level:
current_section = current_section._acp_parent
current_nesting_level -= 1
section_name = ast.parse(result.group(2)).body[0].value.id
if section_name in list(current_section._acp_section_names()):
msg = 'duplicate section "{section_name}".'.format(**locals())
raise SyntaxError(msg)
new_section = Section()
new_section._acp_name = section_name
current_section._acp_add_child(new_section)
current_section = new_section
current_nesting_level += 1
# handle options
else:
node = None
while node is None and tmp != "":
try:
node = ast.parse(stripped_buf)
except SyntaxError:
tmp = stream.readline()
buf += tmp
stripped_buf = buf.strip()
node = node.body[0]
assert isinstance(node, ast.Assign)
option_name = node.targets[0].id
if option_name in list(current_section._acp_option_names()):
msg = ('duplicate option "{option_name}" in '
'section "{current_section._acp_name}".')
raise SyntaxError(msg.format(**locals()))
new_option = Option()
new_option._acp_name = option_name
new_option._acp_value = node.value
current_section._acp_add_child(new_option)
return root
class Section(object):
"""
Section objects allow access to their sub-sections and options via
attribute access and subscript.
new sections and options may be added via "_acp_add_child()".
"""
def __init__(self):
self.__dict__["_acp_name"] = ""
self.__dict__["_acp_parent"] = None
self.__dict__["_acp_order"] = []
self.__dict__["_acp_nesting_level"] = 0
def __str__(self):
return '<Section "{self._acp_name}">'.format(**locals())
__repr__ = __str__
def __setattr__(self, attr, val):
obj = object.__getattribute__(self, attr)
if isinstance(obj, Option):
obj._acp_value = val
else:
super(Section, self).__setattr__(attr, val)
def __getattribute__(self, attr, raw=False):
obj = super(Section, self).__getattribute__(attr)
if isinstance(obj, Option) and not raw:
return obj._acp_value
else:
return obj
def __getitem__(self, key):
try:
return getattr(self, key)
except AttributeError as e:
raise KeyError(str(e))
def _acp_add_child(self, child):
child._acp_nesting_level = self._acp_nesting_level + 1
if child._acp_parent is None:
child._acp_parent = self
if child._acp_name in self.__dict__:
msg = "duplicate object: {child_name}"
raise SyntaxError(msg.format(child_name=child._acp_name))
self.__dict__[child._acp_name] = child
self.__dict__["_acp_order"].append(child._acp_name)
def _acp_add_empty_line(self):
self.__dict__["_acp_order"].append("\n")
def _acp_add_comment(self, comment):
self.__dict__["_acp_order"].append(comment)
def _acp_sections(self):
for section in (section for section in self.__dict__.values()
if isinstance(section, Section)):
yield section
def _acp_section_names(self):
for section_name in (sn for (sn, s) in self.__dict__.items()
if isinstance(s, Section)):
yield section_name
def _acp_options(self):
for option in (option for option in self.__dict__.values()
if isinstance(option, Option)):
yield option
def _acp_option_names(self):
for option_name in (o_name for o_name, option in self.__dict__.items()
if isinstance(option, Option)):
yield option_name
def _acp_children(self):
for child in (child for child in self.__dict__.values()
if isinstance(child, (Section, Option))):
yield child
def dump(self):
return self.pretty_print(do_indent=False)
def pretty_print(self, indent=0, do_indent=True):
if self._acp_name != "<global>":
template = "{indentation}{left}{section_name}{right}\n"
s = template.format(indentation=" " * indent,
left="[" * self._acp_nesting_level,
right="]" * self._acp_nesting_level,
section_name=self._acp_name)
if do_indent:
indent += 1
else:
s = ""
for child_name in self._acp_order:
if child_name == "\n":
s += "\n"
elif child_name.strip().startswith("#"):
s += "{indent}{comment}\n".format(indent=" " * indent,
comment=child_name)
else:
child = getattr(self, child_name)
if isinstance(child, Section):
s += child.pretty_print(indent)
else:
child_raw = self._acp_get_raw_option(child_name)
template = "{indentation}{option_name} = {option_raw}\n"
s += template.format(indentation=" " * indent,
option_name=child_name,
option_raw=child_raw)
return s
def _acp_get_raw_option(self, option_name):
return self.__getattribute__(option_name, True)._acp_raw_value
class LazyEval(object):
"""
evaluates the ast nodes lazy when used as a descriptor.
when we find that all involved ast-nodes are static we cache the result.
"""
def __init__(self):
self.cache = {}
def __get__(self, instance, owner):
# see if we already cached the result from a previous evaluation
if instance in self.cache:
return self.cache[instance]
# dynamically evaluate the ast-nodes
val, has_refs = self._acp_eval(instance._acp_parent,
instance._acp_ast_node)
# if the ast-nodes have no external references cache the result
if not has_refs:
self.cache[instance] = val
return val
def __set__(self, instance, value):
# if value is a ast-node invalidate the cache
if isinstance(value, ast.AST):
instance._acp_ast_node = value
try:
del self.cache[instance]
except KeyError:
pass
# else it is a static value which can be put directly into the cache
else:
self.cache[instance] = value
def _acp_eval(self, parent, node):
"""
dynamically and recursively evaluate the ast-nodes.
returns a 2-tuple. first is the actual value, second a bool indicating
if this ast-node has external dependencies and should not be cached.
"""
# first try simple conversion of literals
try:
return ast.literal_eval(node), False
except (SyntaxError, ValueError):
pass
# handle external references
if isinstance(node, (ast.Name, ast.Attribute)):
ref = ""
while isinstance(node, ast.Attribute):
ref = "." + node.attr + ref
node = node.value
ref = node.id + ref
return self._acp_resolve_reference(ref, parent), True
# handle lists, tuples and dicts
elif isinstance(node, (ast.List, ast.Tuple, ast.Dict)):
vals = []
has_refs = False
for child_node in ast.iter_child_nodes(node):
tmp = self._acp_eval(parent, child_node)
if not tmp:
continue
vals.append(tmp[0])
has_refs = tmp[1]
if isinstance(node, ast.List):
return list(vals), has_refs
elif isinstance(node, ast.Tuple):
return tuple(vals), has_refs
return vals, has_refs
# handle the following math operators +, -, *, /, //, %, **, |, &, ^
elif isinstance(node, ast.BinOp):
lhs, lhs_has_refs = self._acp_eval(parent, node.left)
rhs, rhs_has_refs = self._acp_eval(parent, node.right)
ops = {ast.Add: operator.add, ast.Sub: operator.sub,
ast.Mult: operator.mul, ast.Div: operator.truediv,
ast.FloorDiv: operator.floordiv, ast.Mod: operator.mod,
ast.Pow: operator.pow, ast.LShift: operator.lshift,
ast.RShift: operator.rshift, ast.BitOr: operator.or_,
ast.BitXor: operator.xor, ast.BitAnd: operator.and_,}
if node.op.__class__ in ops:
return (ops[node.op.__class__](lhs, rhs),
lhs_has_refs | rhs_has_refs)
else:
msg = 'op "{op_name}" not supported yet'
raise SyntaxError(msg.format(op_name=str(node.op.__class__)))
# handle calls to some selected builtin functions
elif isinstance(node, ast.Call):
if node.func.id in ("abs", "all", "any", "bin", "bool", "chr",
"complex", "dict", "divmod", "enumerate",
"float", "hex", "int", "len", "list", "max",
"min", "oct", "ord", "pow", "range", "reversed",
"round", "set", "sorted", "str", "sum", "tuple",
"type", "unichr", "zip", ):
has_refs = False
args = []
for arg_node in node.args:
arg, temp_has_refs = self._acp_eval(parent, arg_node)
args.append(arg)
has_refs |= temp_has_refs
kwargs = {}
for keyword_node in node.keywords:
kwargs[keyword_node.arg], temp_has_refs = self._acp_eval(parent, keyword_node.value)
has_refs |= temp_has_refs
return (builtins.__dict__[node.func.id](*args, **kwargs),
has_refs)
# handle ternary if operator
elif isinstance(node, ast.IfExp):
test, test_has_refs = self._acp_eval(parent, node.test)
if test:
result, has_refs = self._acp_eval(parent, node.body)
else:
result, has_refs = self._acp_eval(parent, node.orelse)
return result, has_refs | test_has_refs
# handle compares
elif isinstance(node, ast.Compare):
astOp2FuncOp = {ast.Eq: operator.eq, ast.NotEq: operator.ne,
ast.Lt: operator.lt, ast.LtE: operator.le,
ast.Gt: operator.gt, ast.GtE: operator.ge,
ast.Is: operator.is_, ast.IsNot: operator.is_not,
# don't use contains because arguments are reversed
ast.In: lambda a, b: a in b,
ast.NotIn: lambda a, b: a not in b}
left, left_has_refs = self._acp_eval(parent, node.left)
has_refs = left_has_refs
for ast_op, ast_right in zip(node.ops, node.comparators):
right, right_has_refs = self._acp_eval(parent, ast_right)
has_refs |= right_has_refs
op = astOp2FuncOp[ast_op.__class__]
if op(left, right):
left = right
else:
return False, has_refs
return True, has_refs
# handle boolean operators
elif isinstance(node, ast.BoolOp):
has_refs = False
if node.op.__class__ == ast.And:
for value in node.values:
v, value_has_refs = self._acp_eval(parent, value)
has_refs |= value_has_refs
if not v:
return False, has_refs
return True, has_refs
elif node.op.__class__ == ast.Or:
for value in node.values:
v, value_has_refs = self._acp_eval(parent, value)
has_refs |= value_has_refs
if v:
return True, has_refs
return False, has_refs
raise RuntimeError("unreachable")
# not sure what this is about...
elif isinstance(node, ast.Load):
pass
else:
raise RuntimeError("unhandled node: " + str(node))
@classmethod
def _acp_resolve_reference(cls, ref, parent):
"""
resolves external references by walking up the tree
until we find a complete match
"""
attrs = ref.split(".")
while parent is not None:
try:
obj = parent
for attr in attrs:
obj = getattr(obj, attr)
return obj
except (KeyError, AttributeError):
parent = parent._acp_parent
raise AttributeError(ref)
class Option(object):
def __init__(self):
self._acp_name = ""
self._acp_parent = None
self._acp_has_refs = True
self._acp_nesting_level = 0
self._acp_ast_node = None
def _acp_get_raw_value(self):
return ast_to_src(self._acp_ast_node)
_acp_value = LazyEval()
_acp_raw_value = property(_acp_get_raw_value)
def __str__(self):
return '<Option {self._acp_name}>'.format(**locals())
__repr__ = __str__
| mit | -2,125,530,160,059,667,500 | 38.838202 | 104 | 0.542137 | false |
rmanoni/mi-instrument | mi/instrument/kut/ek60/ooicore/driver.py | 1 | 39253 | """
@package mi.instrument.kut.ek60.ooicore.driver
@file /mi/instrument/kut/ek60/ooicore/driver.py
@author Richard Han
@brief Driver for the ooicore
Release notes:
This Driver supports the Kongsberg UnderWater Technology's EK60 Instrument.
"""
__author__ = 'Richard Han & Craig Risien'
__license__ = 'Apache 2.0'
import ftplib
import json
import tempfile
import urllib2
import yaml
from mi.core.common import BaseEnum
from mi.core.exceptions import InstrumentParameterException, InstrumentException, SampleException
from mi.core.exceptions import InstrumentConnectionException
from mi.core.instrument.data_particle import DataParticle, CommonDataParticleType, DataParticleKey
from mi.core.instrument.driver_dict import DriverDictKey
from mi.core.instrument.instrument_driver import SingleConnectionInstrumentDriver
from mi.core.instrument.instrument_driver import DriverEvent
from mi.core.instrument.instrument_driver import DriverAsyncEvent
from mi.core.instrument.instrument_driver import DriverProtocolState
from mi.core.instrument.instrument_driver import DriverParameter
from mi.core.instrument.instrument_driver import ResourceAgentState
from mi.core.instrument.instrument_fsm import ThreadSafeFSM
from mi.core.instrument.instrument_protocol import CommandResponseInstrumentProtocol
from mi.core.instrument.protocol_param_dict import ParameterDictType
from mi.core.log import get_logger
from mi.core.log import get_logging_metaclass
log = get_logger()
# newline.
NEWLINE = '\r\n'
# Default Instrument's IP Address
DEFAULT_HOST = "128.193.64.201"
YAML_FILE_NAME = "driver_schedule.yaml"
DEFAULT_PORT = "80"
USER_NAME = "ooi"
PASSWORD = "994ef22"
DEFAULT_CONFIG = {
'file_prefix': "Driver DEFAULT CONFIG_PREFIX",
'file_path': "DEFAULT_FILE_PATH", # relative to filesystem_root/data
'max_file_size': 288, # 50MB in bytes: 50 * 1024 * 1024
'intervals': [{
'name': "default",
'type': "constant",
'start_at': "00:00",
'duration': "00:15:00",
'repeat_every': "01:00",
'stop_repeating_at': "23:55",
'interval': 1000,
'max_range': 80,
'frequency': {
38000: {
'mode': 'active',
'power': 100,
'pulse_length': 256
},
120000: {
'mode': 'active',
'power': 100,
'pulse_length': 64
},
200000: {
'mode': 'active',
'power': 120,
'pulse_length': 64
}
}
}]
}
###
# Driver Constant Definitions
###
# String constants
CONNECTED = "connected"
CURRENT_RAW_FILENAME = "current_raw_filename"
CURRENT_RAW_FILESIZE = "current_raw_filesize"
CURRENT_RUNNING_INTERVAL = "current_running_interval"
CURRENT_UTC_TIME = "current_utc_time"
DURATION = "duration"
ER60_CHANNELS = "er60_channels"
ER60_STATUS = "er60_status"
EXECUTABLE = "executable"
FILE_PATH = "file_path"
FILE_PREFIX = "file_prefix"
FREQUENCY = "frequency"
FREQ_120K = "120000"
FREQ_200K = "200000"
FREQ_38K = "38000"
FS_ROOT = "fs_root"
GPTS_ENABLED = "gpts_enabled"
HOST = "host"
INTERVAL = "interval"
INTERVALS = "intervals"
RAW_OUTPUT = "raw_output"
MAX_FILE_SIZE = "max_file_size"
MAX_RANGE = "max_range"
MODE = "mode"
NAME = "name"
NEXT_SCHEDULED_INTERVAL = "next_scheduled_interval"
PID = "pid"
PORT = "port"
POWER = "power"
PULSE_LENGTH = "pulse_length"
SAMPLE_INTERVAL = "sample_interval"
SAMPLE_RANGE = "sample_range"
SAVE_INDEX = "save_index"
SAVE_BOTTOM = "save_bottom"
SAVE_RAW = "save_raw"
SCHEDULE = "schedule"
SCHEDULE_FILENAME = "schedule_filename"
SCHEDULED_INTERVALS_REMAINING = "scheduled_intervals_remaining"
START_AT = "start_at"
STOP_REPEATING_AT = "stop_repeating_at"
TYPE = "type"
class DataParticleType(BaseEnum):
"""
Data particle types produced by this driver
"""
RAW = CommonDataParticleType.RAW
ZPLSC_STATUS = 'zplsc_status'
class ProtocolState(BaseEnum):
"""
Instrument protocol states
"""
UNKNOWN = DriverProtocolState.UNKNOWN
COMMAND = DriverProtocolState.COMMAND
AUTOSAMPLE = DriverProtocolState.AUTOSAMPLE
class ProtocolEvent(BaseEnum):
"""
Protocol events
"""
ENTER = DriverEvent.ENTER
EXIT = DriverEvent.EXIT
GET = DriverEvent.GET
SET = DriverEvent.SET
DISCOVER = DriverEvent.DISCOVER
START_AUTOSAMPLE = DriverEvent.START_AUTOSAMPLE
STOP_AUTOSAMPLE = DriverEvent.STOP_AUTOSAMPLE
ACQUIRE_STATUS = DriverEvent.ACQUIRE_STATUS
class Capability(BaseEnum):
"""
Protocol events that should be exposed to users (subset of above).
"""
START_AUTOSAMPLE = ProtocolEvent.START_AUTOSAMPLE
STOP_AUTOSAMPLE = ProtocolEvent.STOP_AUTOSAMPLE
ACQUIRE_STATUS = ProtocolEvent.ACQUIRE_STATUS
GET = ProtocolEvent.GET
SET = ProtocolEvent.SET
class Parameter(DriverParameter):
"""
Device specific parameters.
"""
SCHEDULE = "schedule"
FTP_IP_ADDRESS = "ftp_ip_address"
FTP_USERNAME = "ftp_username"
FTP_PASSWORD = "ftp_password"
FTP_PORT = "ftp_port"
class Prompt(BaseEnum):
"""
Device i/o prompts..
"""
class Command(BaseEnum):
"""
Instrument command strings
"""
ACQUIRE_STATUS = 'acquire_status'
START_AUTOSAMPLE = 'start_autosample'
STOP_AUTOSAMPLE = 'stop_autosample'
GET = 'get_param'
SET = 'set_param'
###############################################################################
# Data Particles
###############################################################################
class ZPLSCStatusParticleKey(BaseEnum):
ZPLSC_CONNECTED = "zplsc_connected" # Connected to a running ER 60 instance
ZPLSC_ACTIVE_38K_MODE = "zplsc_active_38k_mode" # 38K Transducer transmit mode
ZPLSC_ACTIVE_38K_POWER = "zplsc_active_38k_power" # 38K Transducer transmit power in W
ZPLSC_ACTIVE_38K_PULSE_LENGTH = "zplsc_active_38k_pulse_length" # 38K Transducer transmit pulse length in seconds
ZPLSC_ACTIVE_38K_SAMPLE_INTERVAL = "zplsc_active_38k_sample_interval" # Sample interval in seconds
ZPLSC_ACTIVE_120K_MODE = "zplsc_active_120k_mode" # 120K Transducer transmit mode
ZPLSC_ACTIVE_120K_POWER = "zplsc_active_120k_power" # 120K Transducer transmit power in W
ZPLSC_ACTIVE_120K_PULSE_LENGTH = "zplsc_active_120k_pulse_length" # 120K Transducer Transmit pulse length in seconds
ZPLSC_ACTIVE_120K_SAMPLE_INTERVAL = "zplsc_active_120k_sample_interval" # 120K Sample Interval
ZPLSC_ACTIVE_200K_MODE = "zplsc_active_200k_mode" # 200K Transducer transmit mode
ZPLSC_ACTIVE_200K_POWER = "zplsc_active_200k_power" # 200K Transducer transmit power in W
ZPLSC_ACTIVE_200K_PULSE_LENGTH = "zplsc_active_200k_pulse_length" # 200K Transducer transmit pulse length in seconds
ZPLSC_ACTIVE_200K_SAMPLE_INTERVAL = "zplsc_active_200k_sample_interval" # 200K Transducer sample interval
ZPLSC_CURRENT_UTC_TIME = "zplsc_current_utc_time" # Current UTC Time
ZPLSC_EXECUTABLE = "zplsc_executable" # Executable used to launch ER60
ZPLSC_FS_ROOT = "zplsc_fs_root" # Root directory where data/logs/configs are stored
ZPLSC_NEXT_SCHEDULED_INTERVAL = "zplsc_next_scheduled_interval" # UTC time of next scheduled interval
ZPLSC_HOST = "zplsc_host" # Host IP Address
ZPLSC_PID = "zplsc_pid" # PID of running ER60 process
ZPLSC_PORT = "zplsc_port" # Host port number
ZPLSC_CURRENT_RAW_FILENAME = "zplsc_current_raw_filename" # File name of the current .raw file
ZPLSC_CURRENT_RAW_FILESIZE = "zplsc_current_raw_filesize" # File size of current .raw file
ZPLSC_FILE_PATH = "zplsc_file_path" # File storage path
ZPLSC_FILE_PREFIX = "zplsc_file_prefix" # Current file prefix
ZPLSC_MAX_FILE_SIZE = "zplsc_max_file_size" # Maximum file size
ZPLSC_SAMPLE_RANGE = "zplsc_sample_range" # Recording range
ZPLSC_SAVE_BOTTOM = "zplsc_save_bottom" # Save bottom file
ZPLSC_SAVE_INDEX = "zplsc_save_index" # Save index file
ZPLSC_SAVE_RAW = "zplsc_save_raw" # Save raw file
ZPLSC_SCHEDULED_INTERVALS_REMAINING = "zplsc_scheduled_intervals_remaining" # Number of intervals remaining in running schedule
ZPLSC_GPTS_ENABLED = "zplsc_gpts_enabled" # GPTs enabled
ZPLSC_SCHEDULE_FILENAME = "zplsc_schedule_filename" # Filename for .yaml schedule file
class ZPLSCStatusParticle(DataParticle):
"""
Routines for parsing raw data into a status particle structure. Override
the building of values, and the rest should come along for free.
Sample:
{'connected': True,
'er60_channels': {'GPT 38 kHz 00907207b7b1 6-1 OOI.38|200': {'frequency': 38000,
'mode': 'active',
'power': 100.0,
'pulse_length': 0.000256,
'sample_interval': 6.4e-05},
'GPT 120 kHz 00907207b7dc 1-1 ES120-7CD': {'frequency': 120000,
'mode': 'active',
'power': 100.0,
'pulse_length': 6.4e-05,
'sample_interval': 1.6e-05},
'GPT 200 kHz 00907207b7b1 6-2 OOI38|200': {'frequency': 200000,
'mode': 'active',
'power': 120.0,
'pulse_length': 6.4e-05,
'sample_interval': 1.6e-05}},
'er60_status': {'current_running_interval': None,
'current_utc_time': '2014-07-08 22:34:18.667000',
'executable': 'c:/users/ooi/desktop/er60.lnk',
'fs_root': 'D:/',
'host': '157.237.15.100',
'next_scheduled_interval': None,
'pid': 1864,
'port': 56635,
'raw_output': {'current_raw_filename': 'OOI-D20140707-T214500.raw',
'current_raw_filesize': None,
'file_path': 'D:\\data\\QCT_1',
'file_prefix': 'OOI',
'max_file_size': 52428800,
'sample_range': 220.0,
'save_bottom': True,
'save_index': True,
'save_raw': True},
'scheduled_intervals_remaining': 0},
'gpts_enabled': False,
'schedule': {},
'schedule_filename': 'qct_configuration_example_1.yaml'}
"""
__metaclass__ = get_logging_metaclass(log_level='trace')
_data_particle_type = DataParticleType.ZPLSC_STATUS
def _encode_value(self, name, value, encoding_function):
"""
Encode a value using the encoding function, if it fails store the error in a queue
Override to handle None values.
"""
encoded_val = None
if value is not None:
try:
encoded_val = encoding_function(value)
except Exception:
log.error("Data particle error encoding. Name:%s Value:%s", name, value)
self._encoding_errors.append({name: value})
return {DataParticleKey.VALUE_ID: name,
DataParticleKey.VALUE: encoded_val}
def _build_parsed_values(self):
"""
Parse ZPLSC Status response and return the ZPLSC Status particles
@throws SampleException If there is a problem with sample
"""
try:
log.debug("status raw_data = %s", self.raw_data)
config = self.raw_data
if not isinstance(config, dict):
raise SampleException("ZPLSC status data is not a dictionary" % self.raw_data)
active_200k_mode = None
active_200k_power = None
active_200k_pulse_length = None
active_200k_sample_interval = None
active_120k_mode = None
active_120k_power = None
active_120k_pulse_length = None
active_120k_sample_interval = None
active_38k_mode = None
active_38k_power = None
active_38k_pulse_length = None
active_38k_sample_interval = None
connected = config.get(CONNECTED)
er60_channels = config.get(ER60_CHANNELS)
if er60_channels is not None:
for key in er60_channels:
if '200 kHz' in key:
active_200k_mode = er60_channels[key].get(MODE)
active_200k_power = er60_channels[key].get(POWER)
active_200k_pulse_length = er60_channels[key].get(PULSE_LENGTH)
active_200k_sample_interval = er60_channels[key].get(SAMPLE_INTERVAL)
elif '120 kHz' in key:
active_120k_mode = er60_channels[key].get(MODE)
active_120k_power = er60_channels[key].get(POWER)
active_120k_pulse_length = er60_channels[key].get(PULSE_LENGTH)
active_120k_sample_interval = er60_channels[key].get(SAMPLE_INTERVAL)
elif '38 kHz' in key:
active_38k_mode = er60_channels[key].get(MODE)
active_38k_power = er60_channels[key].get(POWER)
active_38k_pulse_length = er60_channels[key].get(PULSE_LENGTH)
active_38k_sample_interval = er60_channels[key].get(SAMPLE_INTERVAL)
current_utc_time = None
executable = None
fs_root = None
next_scheduled_interval = 'None'
host = None
pid = '0'
port = None
current_raw_filename = None
current_raw_filesize = 0
file_path = None
file_prefix = None
max_file_size = None
sample_range = None
save_bottom = None
save_index = None
save_raw = None
scheduled_intervals_remaining = None
er60_status = config.get(ER60_STATUS)
if er60_status is not None:
current_utc_time = er60_status.get(CURRENT_UTC_TIME)
executable = er60_status.get(EXECUTABLE)
fs_root = er60_status.get(FS_ROOT)
if er60_status.get(NEXT_SCHEDULED_INTERVAL) is not None:
next_scheduled_interval = er60_status.get(NEXT_SCHEDULED_INTERVAL)
host = er60_status.get(HOST)
if er60_status.get(PID) is not None:
pid = er60_status.get(PID)
port = er60_status.get(PORT)
raw_output = er60_status.get(RAW_OUTPUT)
if raw_output is not None:
current_raw_filename = raw_output.get(CURRENT_RAW_FILENAME)
if raw_output.get(CURRENT_RAW_FILESIZE) is not None:
current_raw_filesize = raw_output.get(CURRENT_RAW_FILESIZE)
file_path = raw_output.get(FILE_PATH)
file_prefix = raw_output.get(FILE_PREFIX)
max_file_size = raw_output.get(MAX_FILE_SIZE)
sample_range = raw_output.get(SAMPLE_RANGE)
save_bottom = raw_output.get(SAVE_BOTTOM)
save_index = raw_output.get(SAVE_INDEX)
save_raw = raw_output.get(SAVE_RAW)
scheduled_intervals_remaining = er60_status.get(SCHEDULED_INTERVALS_REMAINING)
gpts_enabled = config.get(GPTS_ENABLED)
schedule_filename = config.get(SCHEDULE_FILENAME)
except KeyError:
raise SampleException("ValueError while converting ZPLSC Status: [%s]" % self.raw_data)
result = [
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_CONNECTED, connected, int),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_200K_MODE, active_200k_mode, str),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_200K_POWER, active_200k_power, float),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_200K_PULSE_LENGTH, active_200k_pulse_length, float),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_200K_SAMPLE_INTERVAL, active_200k_sample_interval, float),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_120K_MODE, active_120k_mode, str),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_120K_POWER, active_120k_power, float),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_120K_PULSE_LENGTH, active_120k_pulse_length, float),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_120K_SAMPLE_INTERVAL, active_120k_sample_interval, float),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_38K_MODE, active_38k_mode, str),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_38K_POWER, active_38k_power, float),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_38K_PULSE_LENGTH, active_38k_pulse_length, float),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_ACTIVE_38K_SAMPLE_INTERVAL, active_38k_sample_interval, float),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_CURRENT_UTC_TIME, current_utc_time, str),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_EXECUTABLE, executable, str),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_FS_ROOT, fs_root, str),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_NEXT_SCHEDULED_INTERVAL, next_scheduled_interval, str),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_HOST, host, str),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_PID, pid, str),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_PORT, port, int),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_CURRENT_RAW_FILENAME, current_raw_filename, str),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_CURRENT_RAW_FILESIZE, current_raw_filesize, int),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_FILE_PATH, file_path, str),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_FILE_PREFIX, file_prefix, str),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_MAX_FILE_SIZE, max_file_size, int),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_SAMPLE_RANGE, sample_range, float),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_SAVE_BOTTOM, save_bottom, int),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_SAVE_INDEX, save_index, int),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_SAVE_RAW, save_raw, int),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_SCHEDULED_INTERVALS_REMAINING, scheduled_intervals_remaining, int),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_GPTS_ENABLED, gpts_enabled, int),
self._encode_value(ZPLSCStatusParticleKey.ZPLSC_SCHEDULE_FILENAME, schedule_filename, str)
]
log.debug("build_parsed_value: %s", result)
return result
###############################################################################
# Driver
###############################################################################
class InstrumentDriver(SingleConnectionInstrumentDriver):
"""
InstrumentDriver subclass
Subclasses SingleConnectionInstrumentDriver with connection state machine.
"""
########################################################################
# Protocol builder.
########################################################################
def _build_protocol(self):
"""
Construct the driver protocol state machine.
"""
self._protocol = Protocol(Prompt, NEWLINE, self._driver_event)
###########################################################################
# Protocol
###########################################################################
class Protocol(CommandResponseInstrumentProtocol):
"""
Instrument protocol class
Subclasses CommandResponseInstrumentProtocol
"""
__metaclass__ = get_logging_metaclass(log_level='trace')
def __init__(self, prompts, newline, driver_event):
"""
Protocol constructor.
@param prompts A BaseEnum class containing instrument prompts.
@param newline The newline.
@param driver_event Driver process event callback.
"""
# Construct protocol superclass.
CommandResponseInstrumentProtocol.__init__(self, prompts, newline, driver_event)
# Build protocol state machine.
self._protocol_fsm = ThreadSafeFSM(ProtocolState, ProtocolEvent,
ProtocolEvent.ENTER, ProtocolEvent.EXIT)
# Add event handlers for protocol state machine.
self._protocol_fsm.add_handler(ProtocolState.UNKNOWN, ProtocolEvent.ENTER, self._handler_unknown_enter)
self._protocol_fsm.add_handler(ProtocolState.UNKNOWN, ProtocolEvent.DISCOVER, self._handler_unknown_discover)
self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.ENTER, self._handler_command_enter)
self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.START_AUTOSAMPLE, self._handler_command_autosample)
self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.ACQUIRE_STATUS, self._handler_command_acquire_status)
self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.GET, self._handler_command_get)
self._protocol_fsm.add_handler(ProtocolState.COMMAND, ProtocolEvent.SET, self._handler_command_set)
self._protocol_fsm.add_handler(ProtocolState.AUTOSAMPLE, ProtocolEvent.STOP_AUTOSAMPLE, self._handler_autosample_stop)
# Construct the parameter dictionary containing device parameters,
# current parameter values, and set formatting functions.
self._build_driver_dict()
self._build_command_dict()
self._build_param_dict()
# Add sample handlers.
# State state machine in UNKNOWN state.
self._protocol_fsm.start(ProtocolState.UNKNOWN)
# commands sent sent to device to be filtered in responses for telnet DA
self._sent_cmds = []
def _build_param_dict(self):
"""
Populate the parameter dictionary with parameters.
For each parameter key, add match string, match lambda function,
and value formatting function for set commands.
"""
self._param_dict.add(Parameter.SCHEDULE,
r'schedule:\s+(.*)',
lambda match: match.group(1),
str,
type=ParameterDictType.STRING,
display_name="Schedule",
description="Large block of text used to create the .yaml file defining the sampling schedule.",
startup_param=True,
default_value=yaml.dump(DEFAULT_CONFIG, default_flow_style=False))
self._param_dict.add(Parameter.FTP_IP_ADDRESS,
r'ftp address:\s+(\d\d\d\d\.\d\d\d\d\.\d\d\d\d\.\d\d\d)',
lambda match: match.group(1),
str,
type=ParameterDictType.STRING,
display_name="FTP IP Address",
description="IP address the driver uses to connect to the instrument FTP server.",
startup_param=True,
default_value=DEFAULT_HOST)
self._param_dict.add(Parameter.FTP_USERNAME,
r'username:(.*)',
lambda match: match.group(1),
str,
type=ParameterDictType.STRING,
display_name="FTP User Name",
description="Username used to connect to the FTP server.",
startup_param=True,
default_value=USER_NAME)
self._param_dict.add(Parameter.FTP_PASSWORD,
r'password:(.*)',
lambda match: match.group(1),
str,
type=ParameterDictType.STRING,
display_name="FTP Password",
description="Password used to connect to the FTP server.",
startup_param=True,
default_value=PASSWORD)
self._param_dict.add(Parameter.FTP_PORT,
r'port:(.*)',
lambda match: match.group(1),
str,
type=ParameterDictType.STRING,
display_name="FTP Port",
description="Location on the OOI infrastructure where .raw files and echogram images will be stored.",
startup_param=True,
default_value=DEFAULT_PORT)
def _build_driver_dict(self):
"""
Populate the driver dictionary with options
"""
self._driver_dict.add(DriverDictKey.VENDOR_SW_COMPATIBLE, True)
def _build_command_dict(self):
"""
Populate the command dictionary with command.
"""
self._cmd_dict.add(Capability.START_AUTOSAMPLE, display_name="Start Autosample")
self._cmd_dict.add(Capability.STOP_AUTOSAMPLE, display_name="Stop Autosample")
self._cmd_dict.add(Capability.ACQUIRE_STATUS, display_name="Acquire Status")
def _filter_capabilities(self, events):
"""
Return a list of currently available capabilities.
"""
return [x for x in events if Capability.has(x)]
########################################################################
# Unknown handlers.
########################################################################
def _handler_unknown_enter(self, *args, **kwargs):
"""
Enter unknown state.
"""
# Tell driver superclass to send a state change event.
# Superclass will query the state.
self._driver_event(DriverAsyncEvent.STATE_CHANGE)
def _handler_unknown_exit(self, *args, **kwargs):
"""
Exit unknown state.
"""
pass
def _handler_unknown_discover(self, *args, **kwargs):
"""
Discover current state
@retval (next_state, next_agent_state)
"""
# Try to get the status to check if the instrument is alive
host = self._param_dict.get_config_value(Parameter.FTP_IP_ADDRESS)
port = self._param_dict.get_config_value(Parameter.FTP_PORT)
response = self._url_request(host, port, '/status.json')
if response is None:
error_msg = "_handler_unknown_discover: Unable to connect to host: %s" % host
log.error(error_msg)
raise InstrumentConnectionException(error_msg)
return ProtocolState.COMMAND, ResourceAgentState.IDLE
########################################################################
# Command handlers.
########################################################################
def _handler_command_enter(self, *args, **kwargs):
"""
Enter command state.
@throws InstrumentTimeoutException if the device cannot be woken.
@throws InstrumentProtocolException if the update commands and not recognized.
"""
self._init_params()
# Tell driver superclass to send a state change event.
# Superclass will query the state.
self._driver_event(DriverAsyncEvent.STATE_CHANGE)
def _handler_command_exit(self, *args, **kwargs):
"""
Exit command state.
"""
pass
def _handler_command_get(self, *args, **kwargs):
"""
Get parameters while in the command state.
@param params List of the parameters to pass to the state
@retval returns (next_state, result) where result is a dict {}. No
agent state changes happening with Get, so no next_agent_state
@throw InstrumentParameterException for invalid parameter
"""
result_vals = {}
# Retrieve required parameter.
# Raise if no parameter provided, or not a dict.
try:
params = args[0]
except IndexError:
raise InstrumentParameterException('_handler_command_get requires a parameter dict.')
if Parameter.ALL in params:
log.debug("Parameter ALL in params")
params = Parameter.list()
params.remove(Parameter.ALL)
log.debug("_handler_command_get: params = %s", params)
if params is None or not isinstance(params, list):
raise InstrumentParameterException("GET parameter list not a list!")
# fill the return values from the update
for param in params:
if not Parameter.has(param):
raise InstrumentParameterException("Invalid parameter!")
result_vals[param] = self._param_dict.get(param)
self._param_dict.get_config_value(param)
result = result_vals
log.debug("Get finished, next_state: %s, result: %s", None, result)
return None, result
def _handler_command_set(self, *args, **kwargs):
"""
Set parameter
@retval next state, result
"""
startup = False
try:
params = args[0]
except IndexError:
raise InstrumentParameterException('_handler_command_set: command requires a parameter dict.')
try:
startup = args[1]
except IndexError:
pass
if not isinstance(params, dict):
raise InstrumentParameterException('Set parameters not a dict.')
# For each key, val in the params, set the param dictionary.
old_config = self._param_dict.get_config()
self._set_params(params, startup)
new_config = self._param_dict.get_config()
if old_config != new_config:
self._driver_event(DriverAsyncEvent.CONFIG_CHANGE)
return None, None
def _set_params(self, *args, **kwargs):
"""
Issue commands to the instrument to set various parameters
"""
try:
params = args[0]
except IndexError:
raise InstrumentParameterException('Set command requires a parameter dict.')
# verify param is not readonly param
self._verify_not_readonly(*args, **kwargs)
for key, val in params.iteritems():
log.debug("KEY = %s VALUE = %s", key, val)
self._param_dict.set_value(key, val)
if key == Parameter.SCHEDULE:
self._ftp_schedule_file()
# Load the schedule file
host = self._param_dict.get(Parameter.FTP_IP_ADDRESS)
port = self._param_dict.get_config_value(Parameter.FTP_PORT)
log.debug("_set_params: stop the current schedule file")
self._url_request(host, port, '/stop_schedule', data={})
log.debug("_set_params: upload driver YAML file to host %s", host)
res = self._url_request(host, port, '/load_schedule', data=json.dumps({'filename': YAML_FILE_NAME}))
log.debug("_set_params: result from load = %s", res)
log.debug("set complete, update params")
def _ftp_schedule_file(self):
"""
Construct a YAML schedule file and
ftp the file to the Instrument server
"""
# Create a temporary file and write the schedule YAML information to the file
try:
config_file = tempfile.TemporaryFile()
log.debug("temporary file created")
if config_file is None or not isinstance(config_file, file):
raise InstrumentException("config_file is not a temp file!")
config_file.write(self._param_dict.get(Parameter.SCHEDULE))
config_file.seek(0)
log.debug("finished writing config file:\n%r", self._param_dict.get(Parameter.SCHEDULE))
except Exception as e:
log.error("Create schedule YAML file exception: %s", e)
raise e
# FTP the schedule file to the ZPLSC server
host = ''
try:
log.debug("Create a ftp session")
host = self._param_dict.get_config_value(Parameter.FTP_IP_ADDRESS)
log.debug("Got host ip address %s", host)
ftp_session = ftplib.FTP()
ftp_session.connect(host)
ftp_session.login(USER_NAME, PASSWORD)
log.debug("ftp session was created...")
ftp_session.set_pasv(False)
ftp_session.cwd("config")
ftp_session.storlines('STOR ' + YAML_FILE_NAME, config_file)
files = ftp_session.dir()
log.debug("*** Config yaml file sent: %s", files)
ftp_session.quit()
config_file.close()
except (ftplib.socket.error, ftplib.socket.gaierror), e:
log.error("ERROR: cannot reach FTP Host %s: %s ", host, e)
raise InstrumentException("ERROR: cannot reach FTP Host %s " % host)
log.debug("*** FTP %s to ftp host %s successfully", YAML_FILE_NAME, host)
def _url_request(self, host, port, page, data=None):
"""
Loads a schedule file previously uploaded to the instrument and sets it as
the active instrument configuration
"""
result = None
url = "https://%s:%d/%s" % (host, port, page)
try:
if data is not None:
log.debug("Request data: %s", data)
req = urllib2.Request(url, data=data, headers={'Content-Type': 'application/json'})
else:
log.debug("No request data")
req = urllib2.Request(url)
log.debug("Request url: %s", req.__dict__)
f = urllib2.urlopen(req, timeout=10)
res = f.read()
f.close()
except urllib2.HTTPError as e:
log.error("Failed to open url %s. %s", url, e)
return result
except urllib2.URLError as e:
log.error("Failed to open url %s. %s", url, e)
return result
try:
result = json.loads(res)
except ValueError:
log.error("Request from url %s is not in valid json format, returned: %s.", url, res)
return result
def _handler_command_autosample(self, *args, **kwargs):
"""
Start autosample mode
@retval next_state, (next_resource_state, result) tuple
"""
# FTP the driver schedule file to the instrument server
self._ftp_schedule_file()
# Stop the current running schedule file just in case one is running and
# load the driver schedule file
host = self._param_dict.get(Parameter.FTP_IP_ADDRESS)
port = self._param_dict.get_config_value(Parameter.FTP_PORT)
log.debug("_handler_command_autosample: stop the current schedule file")
self._url_request(host, port, '/stop_schedule', data={})
log.debug("_handler_command_autosample: upload driver YAML file to host %s", host)
res = self._url_request(host, port, '/load_schedule', data=json.dumps({'filename': YAML_FILE_NAME}))
log.debug(" result from load = %s", res)
if res.get('result') != 'OK':
raise InstrumentException('_handler_command_autosample: Load Instrument Schedule File Error.')
res = self._url_request(host, port, '/start_schedule', data={})
if res.get('result') != 'OK':
raise InstrumentException('_handler_command_autosample: Start Schedule File Error.')
return ProtocolState.AUTOSAMPLE, (ResourceAgentState.STREAMING, None)
def _handler_command_acquire_status(self, *args, **kwargs):
"""
Acquire status from the instrument
@retval next_state, (next_resource_state, result) tuple
"""
host = self._param_dict.get_config_value(Parameter.FTP_IP_ADDRESS)
port = self._param_dict.get_config_value(Parameter.FTP_PORT)
response = self._url_request(host, port, '/status.json')
if response:
log.debug("_handler_command_acquire_status: response from status = %r", response)
particle = ZPLSCStatusParticle(response, port_timestamp=self._param_dict.get_current_timestamp())
self._driver_event(DriverAsyncEvent.SAMPLE, particle.generate())
else:
log.error("_handler_command_acquire_status: Failed to acquire status from instrument.")
return None, (None, None)
########################################################################
# Autosample handlers
########################################################################
def _handler_autosample_enter(self, *args, **kwargs):
"""
Enter autosample mode
"""
self._driver_event(DriverAsyncEvent.STATE_CHANGE)
def _handler_autosample_stop(self):
"""
Stop autosample mode
@retval next_state, (next_resource_state, result) tuple
"""
host = self._param_dict.get_config_value(Parameter.FTP_IP_ADDRESS)
port = self._param_dict.get_config_value(Parameter.FTP_PORT)
log.debug("_handler_autosample_stop: stop the current schedule file")
res = self._url_request(host, port, '/stop_schedule', data={})
log.debug("handler_autosample_stop: stop schedule returns %r", res)
return ProtocolState.COMMAND, (ResourceAgentState.COMMAND, None) | bsd-2-clause | 1,710,098,018,871,872,300 | 42.567148 | 132 | 0.572542 | false |
EmbodiedCognition/pagoda | pagoda/physics.py | 1 | 47156 | '''This module contains convenience wrappers for ODE objects.'''
from __future__ import division
import collections
import numpy as np
import ode
BodyState = collections.namedtuple(
'BodyState', 'name position quaternion linear_velocity angular_velocity')
class Registrar(type):
'''A metaclass that builds a registry of its subclasses.'''
def __init__(cls, name, bases, dct):
if not hasattr(cls, '_registry'):
cls._registry = {}
else:
key = name.lower()
for i in range(3, len(name) + 1):
cls._registry[key[:i]] = cls
super(Registrar, cls).__init__(name, bases, dct)
def build(cls, key, *args, **kwargs):
return cls._registry[key.lower()](*args, **kwargs)
class Body(Registrar(str('Base'), (), {})):
'''This class wraps things that participate in the ODE physics simulation.
This class basically provides lots of Python-specific properties that call
the equivalent ODE getters and setters for things like position, rotation,
etc.
'''
def __init__(self, name, world, density=1000., mass=None, **shape):
self.name = name
self.world = world
self.shape = shape
m = ode.Mass()
self.init_mass(m, density, mass)
self.ode_body = ode.Body(world.ode_world)
self.ode_body.setMass(m)
self.ode_geom = getattr(ode, 'Geom%s' % self.__class__.__name__)(
world.ode_space, **shape)
self.ode_geom.setBody(self.ode_body)
def __str__(self):
return '{0.__class__.__name__} {0.name} at {1}'.format(
self, self.position.round(3))
@property
def mass(self):
'''The ODE mass object for this body.'''
return self.ode_body.getMass()
@property
def state(self):
'''The state of this body includes:
- name of the body (str)
- position (3-tuple)
- quaternion (4-tuple)
- linear velocity (3-tuple)
- angular velocity (3-tuple)
'''
return BodyState(self.name,
tuple(self.position),
tuple(self.quaternion),
tuple(self.linear_velocity),
tuple(self.angular_velocity))
@state.setter
def state(self, state):
'''Set the state of this body.
Parameters
----------
state : BodyState tuple
The desired state of the body.
'''
assert self.name == state.name, \
'state name "{}" != body name "{}"'.format(state.name, self.name)
self.position = state.position
self.quaternion = state.quaternion
self.linear_velocity = state.linear_velocity
self.angular_velocity = state.angular_velocity
@property
def position(self):
'''The (x, y, z) coordinates of the center of this body.'''
return np.array(self.ode_body.getPosition())
@position.setter
def position(self, position):
'''Set the (x, y, z) coordinates of the center of this body.
Parameters
----------
position : 3-tuple of float
The coordinates of the desired center of this body.
'''
self.ode_body.setPosition(tuple(position))
@property
def rotation(self):
'''The rotation matrix for this body.'''
return np.array(self.ode_body.getRotation()).reshape((3, 3))
@rotation.setter
def rotation(self, rotation):
'''Set the rotation of this body using a rotation matrix.
Parameters
----------
rotation : sequence of 9 floats
The desired rotation matrix for this body.
'''
if isinstance(rotation, np.ndarray):
rotation = rotation.ravel()
self.ode_body.setRotation(tuple(rotation))
@property
def quaternion(self):
'''The (w, x, y, z) rotation quaternion for this body.'''
return np.array(self.ode_body.getQuaternion())
@quaternion.setter
def quaternion(self, quaternion):
self.ode_body.setQuaternion(tuple(quaternion))
@property
def linear_velocity(self):
'''Current linear velocity of this body (in world coordinates).'''
return np.array(self.ode_body.getLinearVel())
@linear_velocity.setter
def linear_velocity(self, velocity):
'''Set the linear velocity for this body.
Parameters
----------
velocity : 3-tuple of float
The desired velocity for this body, in world coordinates.
'''
self.ode_body.setLinearVel(tuple(velocity))
@property
def angular_velocity(self):
'''Current angular velocity of this body (in world coordinates).'''
return np.array(self.ode_body.getAngularVel())
@angular_velocity.setter
def angular_velocity(self, velocity):
'''Set the angular velocity for this body.
Parameters
----------
velocity : 3-tuple of float
The desired angular velocity for this body, in world coordinates.
'''
self.ode_body.setAngularVel(tuple(velocity))
@property
def force(self):
'''Current net force acting on this body (in world coordinates).'''
return np.array(self.ode_body.getForce())
@force.setter
def force(self, force):
'''Set the force acting on this body.
Parameters
----------
force : 3-tuple of float
The desired force acting on this body, in world coordinates.
'''
self.ode_body.setForce(tuple(force))
@property
def torque(self):
'''Current net torque acting on this body (in world coordinates).'''
return np.array(self.ode_body.getTorque())
@torque.setter
def torque(self, torque):
'''Set the torque acting on this body.
Parameters
----------
torque : 3-tuple of float
The desired torque acting on this body, in world coordinates.
'''
self.ode_body.setTorque(tuple(torque))
@property
def is_kinematic(self):
'''True iff this body is kinematic.'''
return self.ode_body.isKinematic()
@is_kinematic.setter
def is_kinematic(self, is_kinematic):
'''Set the kinematic/dynamic attribute for this body.
In pagoda, kinematic bodies have infinite mass and do interact with
other bodies via collisions.
Parameters
----------
is_kinematic : bool
If True, this body will be set to kinematic. If False, it will be
set to dynamic.
'''
if is_kinematic:
self.ode_body.setKinematic()
else:
self.ode_body.setDynamic()
@property
def follows_gravity(self):
'''True iff this body follows gravity.'''
return self.ode_body.getGravityMode()
@follows_gravity.setter
def follows_gravity(self, follows_gravity):
'''Set whether this body follows gravity.
Parameters
----------
follows_gravity : bool
This body will follow gravity iff this parameter is True.
'''
self.ode_body.setGravityMode(follows_gravity)
def rotate_to_body(self, x):
'''Rotate the given vector to the same orientation as this body.
Parameters
----------
x : 3-tuple of float
A point in three dimensions.
Returns
-------
xrot : 3-tuple of float
The same point after rotation into the orientation of this body.
'''
return np.dot(x, self.rotation)
def body_to_world(self, position):
'''Convert a body-relative offset to world coordinates.
Parameters
----------
position : 3-tuple of float
A tuple giving body-relative offsets.
Returns
-------
position : 3-tuple of float
A tuple giving the world coordinates of the given offset.
'''
return np.array(self.ode_body.getRelPointPos(tuple(position)))
def world_to_body(self, position):
'''Convert a point in world coordinates to a body-relative offset.
Parameters
----------
position : 3-tuple of float
A world coordinates position.
Returns
-------
offset : 3-tuple of float
A tuple giving the body-relative offset of the given position.
'''
return np.array(self.ode_body.getPosRelPoint(tuple(position)))
def relative_offset_to_world(self, offset):
'''Convert a relative body offset to world coordinates.
Parameters
----------
offset : 3-tuple of float
The offset of the desired point, given as a relative fraction of the
size of this body. For example, offset (0, 0, 0) is the center of
the body, while (0.5, -0.2, 0.1) describes a point halfway from the
center towards the maximum x-extent of the body, 20% of the way from
the center towards the minimum y-extent, and 10% of the way from the
center towards the maximum z-extent.
Returns
-------
position : 3-tuple of float
A position in world coordinates of the given body offset.
'''
return np.array(self.body_to_world(offset * self.dimensions / 2))
def add_force(self, force, relative=False, position=None, relative_position=None):
'''Add a force to this body.
Parameters
----------
force : 3-tuple of float
A vector giving the forces along each world or body coordinate axis.
relative : bool, optional
If False, the force values are assumed to be given in the world
coordinate frame. If True, they are assumed to be given in the
body-relative coordinate frame. Defaults to False.
position : 3-tuple of float, optional
If given, apply the force at this location in world coordinates.
Defaults to the current position of the body.
relative_position : 3-tuple of float, optional
If given, apply the force at this relative location on the body. If
given, this method ignores the ``position`` parameter.
'''
b = self.ode_body
if relative_position is not None:
op = b.addRelForceAtRelPos if relative else b.addForceAtRelPos
op(force, relative_position)
elif position is not None:
op = b.addRelForceAtPos if relative else b.addForceAtPos
op(force, position)
else:
op = b.addRelForce if relative else b.addForce
op(force)
def add_torque(self, torque, relative=False):
'''Add a torque to this body.
Parameters
----------
force : 3-tuple of float
A vector giving the torque along each world or body coordinate axis.
relative : bool, optional
If False, the torque values are assumed to be given in the world
coordinate frame. If True, they are assumed to be given in the
body-relative coordinate frame. Defaults to False.
'''
op = self.ode_body.addRelTorque if relative else self.ode_body.addTorque
op(torque)
def join_to(self, joint, other_body=None, **kwargs):
'''Connect this body to another one using a joint.
This method creates a joint to fasten this body to the other one. See
:func:`World.join`.
Parameters
----------
joint : str
The type of joint to use when connecting these bodies.
other_body : :class:`Body` or str, optional
The other body to join with this one. If not given, connects this
body to the world.
'''
self.world.join(joint, self, other_body, **kwargs)
def connect_to(self, joint, other_body, offset=(0, 0, 0), other_offset=(0, 0, 0),
**kwargs):
'''Move another body next to this one and join them together.
This method will move the ``other_body`` so that the anchor points for
the joint coincide. It then creates a joint to fasten the two bodies
together. See :func:`World.move_next_to` and :func:`World.join`.
Parameters
----------
joint : str
The type of joint to use when connecting these bodies.
other_body : :class:`Body` or str
The other body to join with this one.
offset : 3-tuple of float, optional
The body-relative offset where the anchor for the joint should be
placed. Defaults to (0, 0, 0). See :func:`World.move_next_to` for a
description of how offsets are specified.
other_offset : 3-tuple of float, optional
The offset on the second body where the joint anchor should be
placed. Defaults to (0, 0, 0). Like ``offset``, this is given as an
offset relative to the size and shape of ``other_body``.
'''
anchor = self.world.move_next_to(self, other_body, offset, other_offset)
self.world.join(joint, self, other_body, anchor=anchor, **kwargs)
class Box(Body):
@property
def lengths(self):
return self.shape['lengths']
@property
def dimensions(self):
return np.array(self.lengths).squeeze()
@property
def volume(self):
return np.prod(self.lengths)
def init_mass(self, m, density, mass):
if mass:
density = mass / self.volume
m.setBox(density, *self.lengths)
class Sphere(Body):
@property
def radius(self):
return self.shape['radius']
@property
def dimensions(self):
d = 2 * self.radius
return np.array([d, d, d]).squeeze()
@property
def volume(self):
return 4 / 3 * np.pi * self.radius ** 3
def init_mass(self, m, density, mass):
if mass:
density = mass / self.volume
m.setSphere(density, self.radius)
class Cylinder(Body):
@property
def radius(self):
return self.shape['radius']
@property
def length(self):
return self.shape['length']
@property
def dimensions(self):
d = 2 * self.radius
return np.array([d, d, self.length]).squeeze()
@property
def volume(self):
return self.length * np.pi * self.radius ** 2
def init_mass(self, m, density, mass):
if mass:
density = mass / self.volume
m.setCylinder(density, 3, self.radius, self.length)
class Capsule(Body):
@property
def radius(self):
return self.shape['radius']
@property
def length(self):
return self.shape['length']
@property
def dimensions(self):
d = 2 * self.radius
return np.array([d, d, d + self.length]).squeeze()
@property
def volume(self):
return 4 / 3 * np.pi * self.radius ** 3 + \
self.length * np.pi * self.radius ** 2
def init_mass(self, m, density, mass):
if mass:
density = mass / self.volume
m.setCapsule(density, 3, self.radius, self.length)
def _get_params(target, param, dof):
'''Get the given param from each of the DOFs for a joint.'''
return [target.getParam(getattr(ode, 'Param{}{}'.format(param, s)))
for s in ['', '2', '3'][:dof]]
def _set_params(target, param, values, dof):
'''Set the given param for each of the DOFs for a joint.'''
if not isinstance(values, (list, tuple, np.ndarray)):
values = [values] * dof
assert dof == len(values)
for s, value in zip(['', '2', '3'][:dof], values):
target.setParam(getattr(ode, 'Param{}{}'.format(param, s)), value)
class Joint(Registrar(str('Base'), (), {})):
'''Base class for joints connecting two bodies.
In ODE, :class:`Body` objects represent mass/inertia properties, while
:class:`Joint` and :class:`Motor` objects represent mathematical constraints
that govern how specific pairs of bodies interact. For example, a
:class:`BallJoint` that connects two bodies will force the anchor point for
those two bodies to remain in the same location in world coordinates -- any
linear force that displaces one of the bodies will also cause a force to be
applied to the second body, because of the constraint imposed by the ball
joint. As another example, a :class:`Slider` that connects two bodies allows
those two bodies to displace relative to one another along a single axis,
but not to rotate with respect to one another -- any torque applied to one
body will also cause a torque to be applied to the other body.
Constraints can be applied to angular degrees of freedom (e.g.,
:class:`AMotor`), linear degrees of freedom (e.g., :class:`BallJoint`,
:class:`LMotor`), or both (e.g., :class:`PistonJoint`).
Both joints and motors apply constraints to pairs of bodies, but they are
quite different in many ways and so are represented using specific
subclasses. This superclass is just a mixin to avoid repeating the getters
and setters that are common between motors and joints.
'''
ADOF = 0
LDOF = 0
@property
def feedback(self):
'''Feedback buffer (list of 3-tuples) for this ODE motor/joint.'''
return self.ode_obj.getFeedback()
@property
def positions(self):
'''List of positions for linear degrees of freedom.'''
return [self.ode_obj.getPosition(i) for i in range(self.LDOF)]
@property
def position_rates(self):
'''List of position rates for linear degrees of freedom.'''
return [self.ode_obj.getPositionRate(i) for i in range(self.LDOF)]
@property
def angles(self):
'''List of angles for rotational degrees of freedom.'''
return [self.ode_obj.getAngle(i) for i in range(self.ADOF)]
@property
def angle_rates(self):
'''List of angle rates for rotational degrees of freedom.'''
return [self.ode_obj.getAngleRate(i) for i in range(self.ADOF)]
@property
def axes(self):
'''List of axes for this object's degrees of freedom.'''
return [np.array(self.ode_obj.getAxis(i))
for i in range(self.ADOF or self.LDOF)]
@axes.setter
def axes(self, axes):
'''Set the axes for this object's degrees of freedom.
Parameters
----------
axes : list of axes specifications
A list of axis values to set. This list must have the same number of
elements as the degrees of freedom of the underlying ODE object.
Each element can be
(a) None, which has no effect on the corresponding axis, or
(b) three floats specifying the axis to set.
'''
assert self.ADOF == len(axes) or self.LDOF == len(axes)
for i, axis in enumerate(axes):
if axis is not None:
self.ode_obj.setAxis(i, 0, axis)
@property
def lo_stops(self):
'''List of lo stop values for this object's degrees of freedom.'''
return _get_params(self.ode_obj, 'LoStop', self.ADOF + self.LDOF)
@lo_stops.setter
def lo_stops(self, lo_stops):
'''Set the lo stop values for this object's degrees of freedom.
Parameters
----------
lo_stops : float or sequence of float
A lo stop value to set on all degrees of freedom, or a list
containing one such value for each degree of freedom. For rotational
degrees of freedom, these values must be in radians.
'''
_set_params(self.ode_obj, 'LoStop', lo_stops, self.ADOF + self.LDOF)
@property
def hi_stops(self):
'''List of hi stop values for this object's degrees of freedom.'''
return _get_params(self.ode_obj, 'HiStop', self.ADOF + self.LDOF)
@hi_stops.setter
def hi_stops(self, hi_stops):
'''Set the hi stop values for this object's degrees of freedom.
Parameters
----------
hi_stops : float or sequence of float
A hi stop value to set on all degrees of freedom, or a list
containing one such value for each degree of freedom. For rotational
degrees of freedom, these values must be in radians.
'''
_set_params(self.ode_obj, 'HiStop', hi_stops, self.ADOF + self.LDOF)
@property
def velocities(self):
'''List of target velocity values for rotational degrees of freedom.'''
return _get_params(self.ode_obj, 'Vel', self.ADOF + self.LDOF)
@velocities.setter
def velocities(self, velocities):
'''Set the target velocities for this object's degrees of freedom.
Parameters
----------
velocities : float or sequence of float
A target velocity value to set on all degrees of freedom, or a list
containing one such value for each degree of freedom. For rotational
degrees of freedom, these values must be in radians / second.
'''
_set_params(self.ode_obj, 'Vel', velocities, self.ADOF + self.LDOF)
@property
def max_forces(self):
'''List of max force values for rotational degrees of freedom.'''
return _get_params(self.ode_obj, 'FMax', self.ADOF + self.LDOF)
@max_forces.setter
def max_forces(self, max_forces):
'''Set the maximum forces for this object's degrees of freedom.
Parameters
----------
max_forces : float or sequence of float
A maximum force value to set on all degrees of freedom, or a list
containing one such value for each degree of freedom.
'''
_set_params(self.ode_obj, 'FMax', max_forces, self.ADOF + self.LDOF)
@property
def erps(self):
'''List of ERP values for this object's degrees of freedom.'''
return _get_params(self.ode_obj, 'ERP', self.ADOF + self.LDOF)
@erps.setter
def erps(self, erps):
'''Set the ERP values for this object's degrees of freedom.
Parameters
----------
erps : float or sequence of float
An ERP value to set on all degrees of freedom, or a list
containing one such value for each degree of freedom.
'''
_set_params(self.ode_obj, 'ERP', erps, self.ADOF + self.LDOF)
@property
def cfms(self):
'''List of CFM values for this object's degrees of freedom.'''
return _get_params(self.ode_obj, 'CFM', self.ADOF + self.LDOF)
@cfms.setter
def cfms(self, cfms):
'''Set the CFM values for this object's degrees of freedom.
Parameters
----------
cfms : float or sequence of float
A CFM value to set on all degrees of freedom, or a list
containing one such value for each degree of freedom.
'''
_set_params(self.ode_obj, 'CFM', cfms, self.ADOF + self.LDOF)
@property
def stop_cfms(self):
'''List of lo/hi stop CFM values.'''
return _get_params(self.ode_obj, 'StopCFM', self.ADOF + self.LDOF)
@stop_cfms.setter
def stop_cfms(self, stop_cfms):
'''Set the CFM values for this object's DOF limits.
Parameters
----------
stop_cfms : float or sequence of float
A CFM value to set on all degrees of freedom limits, or a list
containing one such value for each degree of freedom limit.
'''
_set_params(self.ode_obj, 'StopCFM', stop_cfms, self.ADOF + self.LDOF)
@property
def stop_erps(self):
'''List of lo/hi stop ERP values.'''
return _get_params(self.ode_obj, 'StopERP', self.ADOF + self.LDOF)
@stop_erps.setter
def stop_erps(self, stop_erps):
'''Set the ERP values for this object's DOF limits.
Parameters
----------
stop_erps : float or sequence of float
An ERP value to set on all degrees of freedom limits, or a list
containing one such value for each degree of freedom limit.
'''
_set_params(self.ode_obj, 'StopERP', stop_erps, self.ADOF + self.LDOF)
def enable_feedback(self):
'''Enable feedback on this ODE object.'''
self.ode_obj.setFeedback(True)
def disable_feedback(self):
'''Disable feedback on this ODE object.'''
self.ode_obj.setFeedback(False)
class Dynamic(Joint):
'''This class wraps an ODE motor -- either an LMotor or an AMotor.
Parameters
----------
name : str
A name for this object in the world.
world : :class:`World`
A world object to which this motor belongs.
body_a : :class:`Body`
A first body connected to this joint.
body_b : :class:`Body`, optional
A second body connected to this joint. If not given, the joint will
connect the first body to the world.
feedback : bool, optional
Feedback will be enabled on this motor iff this is True. Defaults to
False.
dof : int, optional
Number of degrees of freedom in this motor. Defaults to 3.
jointgroup : ode.JointGroup, optional
A joint group to which this motor belongs. Defaults to the default joint
group in the world.
'''
def __init__(self, name, world, body_a, body_b=None, feedback=False, dof=3,
jointgroup=None):
self.name = name
self.ode_obj = self.MOTOR_FACTORY(world.ode_world, jointgroup=jointgroup)
self.ode_obj.attach(body_a.ode_body, body_b.ode_body if body_b else None)
self.ode_obj.setNumAxes(dof)
self.cfms = 1e-8
if feedback:
self.enable_feedback()
else:
self.disable_feedback()
class AMotor(Dynamic):
'''An angular motor applies torques to change an angle in the physics world.
AMotors can be created in "user" mode---in which case the user must supply
all axis and angle values---or, for 3-DOF motors, in "euler" mode---in which
case the first and last axes must be specified, and ODE computes the middle
axis automatically.
'''
MOTOR_FACTORY = ode.AMotor
def __init__(self, *args, **kwargs):
mode = kwargs.pop('mode', 'user')
if isinstance(mode, str):
mode = ode.AMotorEuler if mode.lower() == 'euler' else ode.AMotorUser
super(AMotor, self).__init__(*args, **kwargs)
self.ode_obj.setMode(mode)
@property
def ADOF(self):
'''Number of angular degrees of freedom for this motor.'''
return self.ode_obj.getNumAxes()
@property
def axes(self):
'''List of axes for this object's degrees of freedom.'''
return [np.array(self.ode_obj.getAxis(i)) for i in range(self.ADOF)]
@axes.setter
def axes(self, axes):
'''Set the axes for this object's degrees of freedom.
Parameters
----------
axes : list of axis parameters
A list of axis values to set. This list must have the same number of
elements as the degrees of freedom of the underlying ODE object.
Each element can be
(a) None, which has no effect on the corresponding axis, or
(b) three floats specifying the axis to set, or
(c) a dictionary with an "axis" key specifying the axis to set and
an optional "rel" key (defaults to 0) specifying the relative
body to set the axis on.
'''
assert len(axes) == self.ADOF
for i, ax in enumerate(axes):
if ax is None:
continue
if not isinstance(ax, dict):
ax = dict(axis=ax)
self.ode_obj.setAxis(i, ax.get('rel', 0), ax['axis'])
def add_torques(self, torques):
'''Add the given torques along this motor's axes.
Parameters
----------
torques : sequence of float
A sequence of torque values to apply to this motor's axes.
'''
self.ode_obj.addTorques(*torques)
class LMotor(Dynamic):
'''An LMotor applies forces to change a position in the physics world.'''
MOTOR_FACTORY = ode.LMotor
@property
def LDOF(self):
'''Number of linear degrees of freedom for this motor.'''
return self.ode_obj.getNumAxes()
class Kinematic(Joint):
'''This class wraps kinematic ODE joints with some Python properties.
Parameters
----------
name : str
Name of the joint to create. This is only to make the joint discoverable
in the world.
world : :class:`World`
Wrapper for the world in which this joint exists.
body_a : :class:`Body`
Wrapper for the first body that this joint connects.
body_b : :class:`Body`, optional
Wrapper for the second body that this joint connects. If this is None,
the joint will connect ``body_a`` to the ``world``.
anchor : 3-tuple of floats, optional
Anchor in world coordinates for the joint. Optional for :class:`Fixed`
joint.
feedback : bool, optional
If this is True, a force feedback structure will be enabled for this
joint, which will make it possible to record the forces that this joint
exerts on its two bodies. By default, no structure will be allocated.
jointgroup : ODE joint group, optional
Add the joint to this group. Defaults to the default world joint group.
'''
def __init__(self, name, world, body_a, body_b=None, anchor=None,
feedback=False, jointgroup=None, amotor=True, lmotor=True):
self.name = name
build = getattr(ode, '{}Joint'.format(self.__class__.__name__))
self.ode_obj = build(world.ode_world, jointgroup=jointgroup)
self.ode_obj.attach(body_a.ode_body, body_b.ode_body if body_b else None)
if anchor is not None:
self.ode_obj.setAnchor(tuple(anchor))
self.ode_obj.setParam(ode.ParamCFM, 0)
self.amotor = None
if self.ADOF > 0 and amotor:
self.amotor = AMotor(name=name + ':amotor',
world=world,
body_a=body_a,
body_b=body_b,
feedback=feedback,
jointgroup=jointgroup,
dof=self.ADOF,
mode='euler' if self.ADOF == 3 else 'user')
self.lmotor = None
if self.LDOF > 0 and lmotor:
self.lmotor = LMotor(name=name + ':lmotor',
world=world,
body_a=body_a,
body_b=body_b,
feedback=feedback,
jointgroup=jointgroup,
dof=self.LDOF)
if feedback:
self.enable_feedback()
else:
self.disable_feedback()
def __str__(self):
return self.name
@property
def anchor(self):
'''3-tuple specifying location of this joint's anchor.'''
return np.array(self.ode_obj.getAnchor())
@property
def anchor2(self):
'''3-tuple specifying location of the anchor on the second body.'''
return np.array(self.ode_obj.getAnchor2())
def add_torques(self, *torques):
'''Add the given torques along this joint's axes.
Parameters
----------
torques : sequence of float
A sequence of torque values to apply to this motor's axes.
'''
self.amotor.add_torques(*torques)
class Fixed(Kinematic):
ADOF = 0
LDOF = 0
class Slider(Kinematic):
ADOF = 0
LDOF = 1
@property
def positions(self):
'''List of positions for this joint's linear degrees of freedom.'''
return [self.ode_obj.getPosition()]
@property
def position_rates(self):
'''List of position rates for this joint's degrees of freedom.'''
return [self.ode_obj.getPositionRate()]
@property
def axes(self):
'''Axis of displacement for this joint.'''
return [np.array(self.ode_obj.getAxis())]
@axes.setter
def axes(self, axes):
'''Set the linear axis of displacement for this joint.
Parameters
----------
axes : list containing one 3-tuple of floats
A list of the axes for this joint. For a slider joint, which has one
degree of freedom, this must contain one 3-tuple specifying the X,
Y, and Z axis for the joint.
'''
self.lmotor.axes = [axes[0]]
self.ode_obj.setAxis(tuple(axes[0]))
class Hinge(Kinematic):
ADOF = 1
LDOF = 0
@property
def angles(self):
'''List of angles for this joint's rotational degrees of freedom.'''
return [self.ode_obj.getAngle()]
@property
def angle_rates(self):
'''List of angle rates for this joint's degrees of freedom.'''
return [self.ode_obj.getAngleRate()]
@property
def axes(self):
'''Axis of rotation for this joint.'''
return [np.array(self.ode_obj.getAxis())]
@axes.setter
def axes(self, axes):
'''Set the angular axis of rotation for this joint.
Parameters
----------
axes : list containing one 3-tuple of floats
A list of the axes for this joint. For a hinge joint, which has one
degree of freedom, this must contain one 3-tuple specifying the X,
Y, and Z axis for the joint.
'''
self.amotor.axes = [axes[0]]
self.ode_obj.setAxis(tuple(axes[0]))
class Piston(Kinematic):
ADOF = 1
LDOF = 1
@property
def axes(self):
'''Axis of rotation and displacement for this joint.'''
return [np.array(self.ode_obj.getAxis())]
@axes.setter
def axes(self, axes):
self.amotor.axes = [axes[0]]
self.lmotor.axes = [axes[0]]
self.ode_obj.setAxis(axes[0])
class Universal(Kinematic):
ADOF = 2
LDOF = 0
@property
def axes(self):
'''A list of axes of rotation for this joint.'''
return [np.array(self.ode_obj.getAxis1()),
np.array(self.ode_obj.getAxis2())]
@axes.setter
def axes(self, axes):
self.amotor.axes = [axes[0], axes[1]]
setters = [self.ode_obj.setAxis1, self.ode_obj.setAxis2]
for axis, setter in zip(axes, setters):
if axis is not None:
setter(tuple(axis))
@property
def angles(self):
'''A list of two angles for this joint's degrees of freedom.'''
return [self.ode_obj.getAngle1(), self.ode_obj.getAngle2()]
@property
def angle_rates(self):
'''A list of two angle rates for this joint's degrees of freedom.'''
return [self.ode_obj.getAngle1Rate(), self.ode_obj.getAngle2Rate()]
class Ball(Kinematic):
ADOF = 3
LDOF = 0
def __init__(self, name, *args, **kwargs):
super(Ball, self).__init__(name, *args, **kwargs)
# we augment ball joints with an additional motor that allows us to set
# rotation limits.
keys = 'name world body_a body_b feedback dof jointgroup'.split()
self.alimit = AMotor(name + ':alimit', *args, dof=self.ADOF, mode='euler',
**{k: v for k, v in kwargs.items() if k in keys})
@property
def angles(self):
return self.alimit.angles
@property
def angle_rates(self):
return self.alimit.angle_rates
@property
def axes(self):
return self.alimit.axes
@axes.setter
def axes(self, axes):
if len(axes) == 2:
axes = dict(rel=1, axis=axes[0]), None, dict(rel=2, axis=axes[1])
self.amotor.axes = axes
self.alimit.axes = axes
@property
def lo_stops(self):
return self.alimit.lo_stops
@lo_stops.setter
def lo_stops(self, lo_stops):
self.alimit.lo_stops = lo_stops
@property
def hi_stops(self):
return self.alimit.hi_stops
@hi_stops.setter
def hi_stops(self, hi_stops):
self.alimit.hi_stops = hi_stops
def make_quaternion(theta, *axis):
'''Given an angle and an axis, create a quaternion.'''
x, y, z = axis
r = np.sqrt(x * x + y * y + z * z)
st = np.sin(theta / 2.)
ct = np.cos(theta / 2.)
return [x * st / r, y * st / r, z * st / r, ct]
def center_of_mass(bodies):
'''Given a set of bodies, compute their center of mass in world coordinates.
'''
x = np.zeros(3.)
t = 0.
for b in bodies:
m = b.mass
x += b.body_to_world(m.c) * m.mass
t += m.mass
return x / t
class World(object):
'''A wrapper for an ODE World object, for running in a simulator.'''
def __init__(self, dt=1. / 60, max_angular_speed=20):
self.ode_world = ode.World()
self.ode_world.setMaxAngularSpeed(max_angular_speed)
self.ode_space = ode.QuadTreeSpace((0, 0, 0), (100, 100, 20), 10)
self.ode_floor = ode.GeomPlane(self.ode_space, (0, 0, 1), 0)
self.ode_contactgroup = ode.JointGroup()
self.frame_no = 0
self.dt = dt
self.elasticity = 0.1
self.friction = 2000
self.gravity = 0, 0, -9.81
self.cfm = 1e-6
self.erp = 0.7
self._bodies = {}
self._joints = {}
@property
def gravity(self):
'''Current gravity vector in the world.'''
return self.ode_world.getGravity()
@gravity.setter
def gravity(self, gravity):
'''Set the gravity vector in the world.
Parameters
----------
gravity : 3-tuple of float
The vector where gravity should point.
'''
return self.ode_world.setGravity(gravity)
@property
def cfm(self):
'''Current global CFM value.'''
return self.ode_world.getCFM()
@cfm.setter
def cfm(self, cfm):
'''Set the global CFM value.
Parameters
----------
cfm : float
The desired global CFM value.
'''
return self.ode_world.setCFM(cfm)
@property
def erp(self):
'''Current global ERP value.'''
return self.ode_world.getERP()
@erp.setter
def erp(self, erp):
'''Set the global ERP value.
Parameters
----------
erp : float
The desired global ERP value.
'''
return self.ode_world.setERP(erp)
@property
def bodies(self):
'''Sequence of all bodies in the world, sorted by name.'''
for k in sorted(self._bodies):
yield self._bodies[k]
@property
def joints(self):
'''Sequence of all joints in the world, sorted by name.'''
for k in sorted(self._joints):
yield self._joints[k]
def get_body(self, key):
'''Get a body by key.
Parameters
----------
key : str, None, or :class:`Body`
The key for looking up a body. If this is None or a :class:`Body`
instance, the key itself will be returned.
Returns
-------
body : :class:`Body`
The body in the world with the given key.
'''
return self._bodies.get(key, key)
def get_joint(self, key):
'''Get a joint by key.
Parameters
----------
key : str
The key for a joint to look up.
Returns
-------
joint : :class:`Joint`
The joint in the world with the given key, or None if there is no
such joint.
'''
return self._joints.get(key, None)
def create_body(self, shape, name=None, **kwargs):
'''Create a new body.
Parameters
----------
shape : str
The "shape" of the body to be created. This should name a type of
body object, e.g., "box" or "cap".
name : str, optional
The name to use for this body. If not given, a default name will be
constructed of the form "{shape}{# of objects in the world}".
Returns
-------
body : :class:`Body`
The created body object.
'''
shape = shape.lower()
if name is None:
for i in range(1 + len(self._bodies)):
name = '{}{}'.format(shape, i)
if name not in self._bodies:
break
self._bodies[name] = Body.build(shape, name, self, **kwargs)
return self._bodies[name]
def join(self, shape, body_a, body_b=None, name=None, **kwargs):
'''Create a new joint that connects two bodies together.
Parameters
----------
shape : str
The "shape" of the joint to use for joining together two bodies.
This should name a type of joint, such as "ball" or "piston".
body_a : str or :class:`Body`
The first body to join together with this joint. If a string is
given, it will be used as the name of a body to look up in the
world.
body_b : str or :class:`Body`, optional
If given, identifies the second body to join together with
``body_a``. If not given, ``body_a`` is joined to the world.
name : str, optional
If given, use this name for the created joint. If not given, a name
will be constructed of the form
"{body_a.name}^{shape}^{body_b.name}".
Returns
-------
joint : :class:`Joint`
The joint object that was created.
'''
ba = self.get_body(body_a)
bb = self.get_body(body_b)
shape = shape.lower()
if name is None:
name = '{}^{}^{}'.format(ba.name, shape, bb.name if bb else '')
self._joints[name] = Joint.build(
shape, name, self, body_a=ba, body_b=bb, **kwargs)
return self._joints[name]
def move_next_to(self, body_a, body_b, offset_a, offset_b):
'''Move one body to be near another one.
After moving, the location described by ``offset_a`` on ``body_a`` will
be coincident with the location described by ``offset_b`` on ``body_b``.
Parameters
----------
body_a : str or :class:`Body`
The body to use as a reference for moving the other body. If this is
a string, it is treated as the name of a body to look up in the
world.
body_b : str or :class:`Body`
The body to move next to ``body_a``. If this is a string, it is
treated as the name of a body to look up in the world.
offset_a : 3-tuple of float
The offset of the anchor point, given as a relative fraction of the
size of ``body_a``. See :func:`Body.relative_offset_to_world`.
offset_b : 3-tuple of float
The offset of the anchor point, given as a relative fraction of the
size of ``body_b``.
Returns
-------
anchor : 3-tuple of float
The location of the shared point, which is often useful to use as a
joint anchor.
'''
ba = self.get_body(body_a)
bb = self.get_body(body_b)
if ba is None:
return bb.relative_offset_to_world(offset_b)
if bb is None:
return ba.relative_offset_to_world(offset_a)
anchor = ba.relative_offset_to_world(offset_a)
offset = bb.relative_offset_to_world(offset_b)
bb.position = bb.position + anchor - offset
return anchor
def get_body_states(self):
'''Return the complete state of all bodies in the world.
Returns
-------
states : list of state information tuples
A list of body state information for each body in the world. See
:func:`Body.state`.
'''
return [b.state for b in self.bodies]
def set_body_states(self, states):
'''Set the states of some bodies in the world.
Parameters
----------
states : sequence of states
A complete state tuple for one or more bodies in the world. See
:func:`get_body_states`.
'''
for state in states:
self.get_body(state.name).state = state
def step(self, substeps=2):
'''Step the world forward by one frame.
Parameters
----------
substeps : int, optional
Split the step into this many sub-steps. This helps to prevent the
time delta for an update from being too large.
'''
self.frame_no += 1
dt = self.dt / substeps
for _ in range(substeps):
self.ode_contactgroup.empty()
self.ode_space.collide(None, self.on_collision)
self.ode_world.step(dt)
def needs_reset(self):
'''Return True iff the world needs to be reset.'''
return False
def reset(self):
'''Reset the state of the world.'''
pass
def on_key_press(self, key, modifiers, keymap):
'''Handle an otherwise unhandled keypress event (from a GUI).'''
if key == keymap.ENTER:
self.reset()
return True
def are_connected(self, body_a, body_b):
'''Determine whether the given bodies are currently connected.
Parameters
----------
body_a : str or :class:`Body`
One body to test for connectedness. If this is a string, it is
treated as the name of a body to look up.
body_b : str or :class:`Body`
One body to test for connectedness. If this is a string, it is
treated as the name of a body to look up.
Returns
-------
connected : bool
Return True iff the two bodies are connected.
'''
return bool(ode.areConnected(
self.get_body(body_a).ode_body,
self.get_body(body_b).ode_body))
def on_collision(self, args, geom_a, geom_b):
'''Callback function for the collide() method.
Parameters
----------
args : None
Arguments passed when the callback was registered. Not used.
geom_a : ODE geometry
The geometry object of one of the bodies that has collided.
geom_b : ODE geometry
The geometry object of one of the bodies that has collided.
'''
body_a = geom_a.getBody()
body_b = geom_b.getBody()
if ode.areConnected(body_a, body_b) or \
(body_a and body_a.isKinematic()) or \
(body_b and body_b.isKinematic()):
return
for c in ode.collide(geom_a, geom_b):
c.setBounce(self.elasticity)
c.setMu(self.friction)
ode.ContactJoint(self.ode_world, self.ode_contactgroup, c).attach(
geom_a.getBody(), geom_b.getBody())
| mit | -2,953,832,453,207,946,000 | 32.349364 | 86 | 0.58192 | false |
IQSS/geoconnect | gc_apps/gis_shapefiles/views_02_visualize.py | 1 | 4711 | from __future__ import print_function
import logging
from django.http import HttpResponse
from django.views.generic import View
from django.template.loader import render_to_string
from django.conf import settings
from gc_apps.gis_shapefiles.models import ShapefileInfo
from gc_apps.worldmap_layers.models import WorldMapLayerInfo
from gc_apps.worldmap_connect.send_shapefile_service import SendShapefileService
from gc_apps.geo_utils.geoconnect_step_names import GEOCONNECT_STEP_KEY, STEP2_STYLE,\
PANEL_TITLE_MAP_DATA_FILE, PANEL_TITLE_STYLE_MAP
from shared_dataverse_information.layer_classification.forms import\
ClassifyLayerForm, ATTRIBUTE_VALUE_DELIMITER
from gc_apps.geo_utils.message_helper_json import MessageHelperJSON
from gc_apps.gis_tabular.views import build_map_html
LOGGER = logging.getLogger(__name__)
from gc_apps.geo_utils.msg_util import msg, msgt
"""
Handle AJAX requests to Visualize a Layer
- Upon successful visualizations, several pieces of the page are update including
- page title
- breadcrumb
- main content panel
"""
def render_ajax_basic_err_msg(err_note, shapefile_info=None):
"""Convenience method for returning an error message via AJAX"""
d = { 'DATAVERSE_SERVER_URL' : settings.DATAVERSE_SERVER_URL\
, 'ERR_NOTE' : err_note\
, 'shapefile_info' : shapefile_info\
}
return render_to_string('gis_shapefiles/view_02_ajax_basic_err.html', d)
class ViewAjaxVisualizeShapefile(View):
"""
Given the md5 of a ShapefileInfo, attempt to visualize the file on WorldMap
Return a JSON response
"""
def get(self, request, shp_md5):
"""Use the SendShapefileService to create a map from a shapefile.
- SendShapefileService takes care of details starting with retrieving
the ShapefileInfo object
"""
# OK if shp_md5 is None, SendShapefileService creates error message
#
send_shp_service = SendShapefileService(**dict(shp_md5=shp_md5))
# Send the shapefile to WorldMap
#
success = send_shp_service.send_shapefile_to_worldmap()
# -----------------------------------
# Did it work? NOPE! Failed along the way!
# -----------------------------------
if not success:
err_note = ('Sorry! The shapefile mapping did not work.'
'<br /><span class="small">{0}</span>').format(\
'<br />'.join(send_shp_service.err_msgs))
LOGGER.error(err_note)
err_note_html = render_ajax_basic_err_msg(err_note,\
send_shp_service.shapefile_info)
json_msg = MessageHelperJSON.get_json_fail_msg(err_note_html, dict(id_main_panel_content=err_note_html))
return HttpResponse(json_msg, content_type="application/json", status=200)
# -----------------------------------
# Yes! We have a new map layer
# -----------------------------------
worldmap_shapefile_layerinfo = send_shp_service.get_worldmap_layerinfo()
shapefile_info = worldmap_shapefile_layerinfo.get_gis_data_info()
assert worldmap_shapefile_layerinfo is not None,\
"Failure in SendShapefileService! Said success but not worldmap_layerinfo (WorldMapShapefileLayerInfo)"
# -----------------------------------------
# Build the Map HTML to replace the form
# -----------------------------------------
map_html, user_message_html = build_map_html(request, worldmap_shapefile_layerinfo)
if map_html is None: # Failed! Send an error
LOGGER.error("Failed to create map HTML using WorldMapShapefileLayerInfo: %s (%d)",\
worldmap_shapefile_layerinfo, worldmap_shapefile_layerinfo.id)
user_msg = 'Sorry! Failed to create map. Please try again. (code: s3)'
json_msg = MessageHelperJSON.get_json_fail_msg(user_msg)
return HttpResponse(json_msg, content_type="application/json", status=200)
# -----------------------------------------
# Looks good. In the JSON response, send
# back the map HTML
# -----------------------------------------
data_dict = dict(\
map_html=map_html,
user_message_html=user_message_html,
id_main_panel_title=PANEL_TITLE_STYLE_MAP,
message='Success! The shapefile was successfully mapped!')
json_msg = MessageHelperJSON.get_json_success_msg("great job", data_dict=data_dict)
return HttpResponse(json_msg, content_type="application/json", status=200)
| apache-2.0 | 1,538,729,477,637,049,300 | 37.300813 | 116 | 0.612397 | false |
nickmilon/milonpy | milonpy/utils/basic2.py | 1 | 14147 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
#######################################################
'''
module: utilities.basic2
Created:Aug 21, 2012
author: nickmilon
Description: Description: Simple utilities (2) and Vars - Very Limited IMPORTS HERE !
'''
#######################################################
from sys import stdout
from datetime import datetime , timedelta
from basic import FMT_dtGen,FMT_tGen, color_txt ,color_switch_txt,dictDot
from time import sleep ,time,mktime
import re
from random import random
def re_is_sameLen(txt,rexp):return len(txt)==len(rexp.findall(txt))
def re_is_same(txt,rexp):return txt==u''.join(rexp.findall(txt))
def re_diff(txt,rexp):return ''.join(list(set([c for c in txt]) - set(rexp.findall(txt))))
#re_gr=re.compile(ur'[\u03AC-\u03CE]|[;\s]', re.IGNORECASE| re.VERBOSE| re.UNICODE |re.MULTILINE)
def lst_randomize(lst):
"returns list in random order"
return [i[1] for i in [[random(),i] for i in sorted(lst)] ]
def time_seconds_since_epoch(dt=None):
if dt is None:dt=datetime.utcnow()
return mktime(dt.timetuple())+1e-6*dt.microsecond
def autoRetry(exceptionOrTuple,retries=3,sleepSeconds=1, BackOfFactor=1,loggerFun=None):
""" exceptionOrTuple= exception or tuple of exceptions,BackOfFactor=factor to back off on each retry loggerFun i.e. logger.info """
def wrapper(func):
def fun_call(*args, **kwargs):
tries = 0
while tries < retries:
try:
return func(*args, **kwargs)
except exceptionOrTuple, e:
tries += 1
if loggerFun:loggerFun("exception [%s] e=[%s] handled tries :%d sleeping[%f]" % (exceptionOrTuple ,e,tries,sleepSeconds * tries * BackOfFactor) )
sleep(sleepSeconds * tries * BackOfFactor) #* tries)
raise
return fun_call
return wrapper
def parseJSfunFromFile(filepath,functionName):
"""
helper function to get a js function string from a file containing js functions. Function must be named starting in first column and file must end with //eof//
lazyloads re
"""
with open( filepath) as fin:
r=re.search("(^.*?)(?P<fun>function\s+?%s.*?)(^fun|//eof//)" % functionName,fin.read(),re.MULTILINE|re.DOTALL)
return r.group('fun').strip() if r else False
def stdout_switchColor(color):
stdout.write (color_switch_txt(color))
def stdout_write_flush(txt,stAfter="\r",color=None):
if color:txt= color_txt(color,txt)
stdout.write("%s%s" %(txt,stAfter) )
stdout.flush()
class timeElapsed(object):
""" overwrite str_dtimes str_deltas to return "" to exclude this form output string
@todo: logging handler
"""
def __init__(self, cnt_max=1,name_str=""):
self.name_str=name_str
self.cnt_max= cnt_max
self.dt_start=datetime.utcnow()
self.dt_last=self.dt_start
self.dt_current=self.dt_start
self.cnt=0
self.cnt_last=0
self.cnt_last_dif=0
self.perc_done=0.0
self.time_remaining=0
self.time_elapsed_since_start=timedelta(0)
self.time_elapsed_since_last=timedelta(0)
self.time_remaining =timedelta(0)
self.units=['sec','min','hour']
self.set_cnt_max(cnt_max)
def set_cnt_max(self,val):
self.cnt_max=val
self.frmt_str="%s%d%s" %("%",len(str(val)),"d" )
def set_auto_unit(self,velocity,unit_idx=0):
if velocity < 1 and unit_idx < 2:
velocity=velocity * 60
unit_idx+=1
return self.set_auto_unit(velocity, unit_idx)
else:
return velocity, self.units[unit_idx]
def frmt_max(self,val):
return self.frmt_str % val
def update(self,cur_val,getStr=True,):
cur_val=float(cur_val)
if cur_val > self.cnt_max:self.set_cnt_max(self.cnt_max+int(cur_val/10))
self.dt_current=datetime.utcnow()
self.time_elapsed_since_start = self.dt_current- self.dt_start
self.time_elapsed_since_last=self.dt_current- self.dt_last
self.cnt_last_dif=self.cnt_last-cur_val
self.perc_done=cur_val/self.cnt_max
self.time_remaining =timedelta(seconds=int ( self.time_elapsed_since_start.total_seconds() * ( (1-self.perc_done)/self.perc_done)))
self.cnt=cur_val
self.v_start= self.cnt/self.time_elapsed_since_start.total_seconds()
self.v_last= self.cnt_last_dif/self.time_elapsed_since_last.total_seconds()
self.dt_last=self.dt_current
self.cnt_last=cur_val
return self.toStr() if getStr else True
def update_last(self,cur_val,getStr=True):
self.cnt_max=cur_val
return self.update(cur_val,getStr)
def str_counters(self):
return u"|%s of %s" %(self.frmt_max(self.cnt), self.frmt_max(self.cnt_max))
def str_dtimes(self):
return u"⌚ %s %s %s" % (self.dt_start.strftime(FMT_dtGen),self.dt_current.strftime(FMT_tGen), (self.dt_current+self.time_remaining).strftime(FMT_tGen))
def str_tdeltas(self):
return u"⌛ %s %s %s" %(self._str_tdelta(self.time_elapsed_since_start),self._str_tdelta(self.time_elapsed_since_last), self._str_tdelta(self.time_remaining) )
@staticmethod
def _str_tdelta(tdelta):
str_td=str(tdelta)
tmp=str_td.find(".")
if tmp !=-1 : str_td= str_td[:tmp]
return u"%8s" % str_td
def toStr(self):
return u"[%s:%6.2f%%%s%s%s]" %(self.name_str,100* self.perc_done, self.str_counters(),
self.str_tdeltas(),self.str_dtimes() )
class SubToEvent(object):
''' lightwaight Event handler modeled after Peter Thatcher's http://www.valuedlessons.com/2008/04/events-in-python.html
usage:
watcher = SubToEvent()
def log_docs(doc):print doc
watcher += log_docs
watcher += lambda x:str(x)
watcher.stop()
'''
def __init__(self,channelName=''):
self.channelName=channelName
self.handlers = set()
def handle(self, handler):
self.handlers.add(handler)
return self
def unhandle(self, handler):
try:
self.handlers.remove(handler)
except:
raise ValueError("No_such_handler")
return self
def fire(self, *args, **kargs):
for handler in self.handlers:
handler(*args, **kargs)
def fireTopic(self,topic=None,verb=None,payload=None):
self.fire ((self.channelName,topic,verb,payload))
def getHandlerCount(self):
return len(self.handlers)
__iadd__ = handle
__isub__ = unhandle
__call__ = fire
__len__ = getHandlerCount
class multiOrderedDict(object):
'''
deletes can't be multi
'''
def __init__(self,lst):
self.lstDic=lst
def __getitem__ (self,key):
return self._getOrSetDictItem(key)
def __setitem__(self, key, val):
return self._getOrSetDictItem(key,True,val)
def __delitem__ (self, key):
return self._getOrSetDictItem(key,delete=True)
def get(self,key,orVal=None):
try:
return self[key]
except KeyError:
return orVal
def keys(self):
return[i[0] for i in self.lstDic if self.isKey(i[0])]
def values(self):
return [self[i] for i in self.keys()]
def isKey(self,k):
return True
def _getOrSetDictItem (self,key,setVal=False,newVal=None,multi=False,delete=False):
idx=[]
for n,i in enumerate(self.lstDic):
if i[0]==key and self.isKey(i[0]):
idx.append(n)
if setVal:self.lstDic[n]=[i[0],newVal]
if not multi: break
if len(idx)>0:
if delete:
self.lstDic.pop(idx[0]) #can't be multi
return None
rt= [self.lstDic[i][1:] for i in idx ]
if multi:
return rt
else:
return rt[0][0]
else:
if setVal:
self.lstDic.append([key,newVal])
return newVal
else:
raise KeyError (key)
def toDict(self):
return dict(zip(self.keys(),self.values()))
def toString(self):
return str(self.toDict())
__str__ = toString
class confFileDict(multiOrderedDict):
def __init__(self,path,skipBlanks=True,skipRemarks=True):
self.path=path
with open(self.path) as fin:
rlines=fin.readlines()
if skipBlanks:rlines=[i for i in rlines if not i=='\n']
if skipRemarks:rlines=[i for i in rlines if not i.startswith("#")]
lstDic=[ map(lambda x: x.strip(), i.split("=") ) for i in rlines]
super(confFileDict, self).__init__(lstDic)
def isKey(self,key):
return key !='' and not key.startswith("#")
def toStr(self):
s=''
for i in self.lstDic:
s+= "=".join(i)+'\n'
return s.rstrip()
def toFile(self,path=None):
if not path:path=self.path
with open(path, 'w') as fl:
fl.write(self.toStr)
def PrintTiming(func):
"""set up a decorator function for timing"""
def wrapper(*args, **kargs):
t1 = time.time()
res = func(*args, **kargs)
tel = time.time()-t1
timeformated = time.strftime( "%H:%M:%S",time.gmtime(tel))
print '-'*5 + '%s took %0.3f ms' % (func.func_name + str(kargs) + str(args), (tel)*1000.0) + '|' + timeformated + '|'+ '-'*10
return res
return wrapper
def totalsVertical(orgD,resD,funct,initFunc):
'''Apply funct to resD dict values by orgD values, creates keys in resD if do not exist
usufull for vertical persentage and totals
attention : it is ditractive replacing resD with results
i.e: to incr resD values by OrgD values resultAply(orgDict,resultsDict,lambda x,y:x+y, lambda x:0)
to find perc of org : .resultAply(res,dorg[0].value,lambda x,y:100*y/x if x!=0 else None,None)
'''
for k in orgD.keys():
if isinstance(orgD[k],dict):
if resD.get(k):
totalsVertical(orgD[k],resD[k],funct,initFunc)
else:
if initFunc:
resD[k]=totalsVertical(orgD[k],dictDot({}),funct,initFunc)
else: continue
elif isinstance(orgD[k],(float,int)):
if resD.get(k,False) is False :
if initFunc:
resD[k]=initFunc(orgD[k])
else:
continue
resD[k] = funct(orgD[k],resD[k])
else:
if initFunc:resD[k]=orgD[k]
return resD
def totalsVertSimple(orgD,resD,funct):
''' simplified and faster version of totalsVertical assumes all key/values of orgD are present in resD
'''
for k in orgD.keys():
if isinstance(orgD[k],dict):totalsVertSimple(orgD[k],resD[k],funct)
elif isinstance(orgD[k],(float,int)):orgD[k]=funct(orgD[k],resD[k])
return orgD
def totalsHorizontal(value,a_dict,funct=lambda x,y:100*x/y):
for k in a_dict.keys():
if isinstance(a_dict[k],dict):totalsHorizontal(value,a_dict[k])
elif isinstance(a_dict[k],(float,int)):a_dict[k]=funct(a_dict[k],value)
return a_dict
class TextWrapper(object):
''' http://jrgraphix.net/r/Unicode/ '''
elipsis=u"\u2026" # "…"
numbers=u"₀₁₂₃₄₅₆₇₈₉₊₋₌₍₎"
def __init__(self, maxLen=140,minLen=100, contChr=u'⎘',inclNumbers=True,strFlag=u'',strFirst=u'',strRest=u'',strAll=u''):
self.contChr=contChr
self.inlNumbers=inclNumbers
self.strFlag=strFlag
self.strFirst=strFirst
self.strRest=strRest
self.strAll=strAll
self.maxLen=maxLen
self.minLen=minLen
def compineStr(self,s,cnt,totalCnt=None):
return "%s%s%s%s%s" %(self.strFlag,self.formatNumOfTotal(cnt+1,totalCnt) if self.inlNumbers else u'', self.strAll, self.strFirst if cnt==0 else self.strRest,s)
def splits(self,astr):
n=self.maxLen-1- len(self.contChr)
minLen=self.minLen
cnt=0
s=self.compineStr(astr, cnt)
while len(s) > n:
cnt+=1
rf=s[0:n].rfind(u'\n',minLen)
if rf == -1:rf=s[0:n].rfind(u'.',minLen)
if rf == -1:rf=s[0:n].rfind(u' ',minLen)
spltn = rf+1 if rf !=-1 else n
#print "(%3d) %3d %3d %3d [%s]" %(cnt, rf,n,spltn,s[0:n])
rt=s[:spltn].rstrip()
remainingStr=s[spltn:]
if self.contChr !=u'':
if len(remainingStr)>1:rt+=self.contChr
else:
rt+=remainingStr
remainingStr=u''
yield rt
s=self.compineStr(remainingStr, cnt) if remainingStr !=u'' else u''
yield s
def formatNumOfTotal(self,cnt, totalCnt=None):
return u"%s∕%s" %(self.formatNum(cnt),u'??' if totalCnt is None else self.formatNum(totalCnt)) #'∕' is not '/' but math '\u2215'
def formatNum(self,num):
header=map(int,str(num))
rt=[self.numbers[i] for i in header]
return ''.join(rt)
def format(self,text):
rt=[]
for i in self.splits(text):
if i !=u'':rt.append(i)
if self.inlNumbers:
rt2=[]
maxCnt=len(rt)
for cnt,vl in enumerate(rt):
old= self.formatNumOfTotal(cnt+1,None)
new= u'' if maxCnt == 1 else self.formatNumOfTotal(cnt+1,maxCnt)
if new !=u'':new += u' '* (len(old)-len(new))
rt2.append(vl.replace(old, new , 1))
return rt2
return rt
################## tests
def test_timeElapsed(x):
et=timeElapsed(x,"foo")
for i in range(1,x):
sleep(1)
print et.update(i, True)
print et.update_last(i)
###################
| apache-2.0 | -7,230,937,652,603,792,000 | 37.752747 | 168 | 0.57072 | false |
AnthonyCheetham/naco_ispy | data_handling_scripts/queue_cal_analysis.py | 1 | 1931 | # -*- coding: utf-8 -*-
"""
Created on Fri Nov 11 10:41:32 2016
Program to run through the calibrations folders and queue all data for analysis
It isn't yet smart enough to check which ones are done already
@author: cheetham
"""
import naco_ispy,subprocess,os,argparse,glob
parser = argparse.ArgumentParser(description='This program queues up all unprocessed NACO ISPY calibration data for analysis.')
parser.add_argument('-dry_run', dest="dry_run",action='store_const',const=True,
default=False, help='Dont actually queue the analysis, but print the commands it will do')
parser.add_argument('--num', action="store", dest="num", type=int, default=-1,
help='Maximum number of datasets to process')
# Get the input arguments
args = parser.parse_args()
num = args.num
data_folder = '/data/NACO/'
# db_filename = '/data/NACO/calib_table.dat'
# data_folder='/Users/cheetham/data/naco_data/GTO/'
#db_filename='/Users/cheetham/data/data_archive/GTO/obs_table.dat'
dry_run = args.dry_run
# First, load the target database
# calib_db = naco_ispy.databases.calib_table(filename=db_filename, data_folder=data_folder)
scripts_directory = os.path.expanduser('~/code/naco_ispy/processing_scripts/')
# Instead of using the database, use glob to find all folders
all_folders = glob.glob(data_folder+'Calib/*/')
# Loop through the targets in the database
for targ_ix,targ_folder in enumerate(all_folders[0:num]):
# Check what we want to process
process_script = scripts_directory+'naco_calibrations.slurm'
# The command to run:
cmd = "echo 'bash "+process_script+"' | at -q b now"
# Change to the right directory
os.chdir(targ_folder)
if dry_run:
print('Queueing analysis for '+targ_folder)
print(' '+cmd)
else:
# Execute the processing command
subprocess.call(cmd,shell=True)
| gpl-3.0 | -3,123,554,130,894,556,000 | 32.310345 | 127 | 0.684102 | false |
Maronato/SpottedBot | custom_auth/facebook_methods.py | 1 | 4677 | import facebook
from django.conf import settings
from django.shortcuts import reverse
from urllib.parse import urlencode, quote, unquote
from django.contrib.auth import login
from django.contrib import messages
app_id = settings.SOCIAL_FACEBOOK_KEY
app_secret = settings.SOCIAL_FACEBOOK_SECRET
def get_graph():
"""Get App Graph Object.
returns a graph object containing an app token from the registered facebook app
"""
graph = facebook.GraphAPI(version='3.1')
graph.access_token = graph.get_app_access_token(app_id, app_secret)
return graph
def canv_url(request):
"""Return Canvas URL.
Generates the canvas_url used by facebook to redirect after auth
"""
# Check whether the last call was secure and use its protocol
if request.is_secure():
return 'https://' + request.get_host() + reverse('social_login:facebook_login_response')
else:
return 'https://' + request.get_host() + reverse('social_login:facebook_login_response')
def auth_url(request):
"""Auth URL.
Returns the facebook auth url using the current app's domain
"""
canvas_url = canv_url(request)
# Permissions set by user. Default is none
perms = settings.SOCIAL_FACEBOOK_PERMISSIONS
url = "https://www.facebook.com/dialog/oauth?"
# Payload
kvps = {'client_id': app_id, 'redirect_uri': canvas_url}
# Add 'next' as state if provided
next_param = f"next_url={quote(request.GET.get('next', ''))}"
# Add 'redirected' as state if provided
redirected_param = f"redirected={request.GET.get('redirected', '')}"
if request.GET.get('next', False):
kvps['state'] = next_param
redirected_param = f',{redirected_param}'
if request.GET.get('redirected', False):
kvps['state'] = kvps.get('state', '') + redirected_param
# Format permissions if needed
if perms:
kvps['scope'] = ",".join(perms)
# Return the url
return url + urlencode(kvps)
def debug_token(token):
"""Debug Token.
Returns debug string from token
"""
return get_graph().debug_access_token(token, app_id, app_secret)
def login_successful(code, request):
"""Login Successful.
Process successful login by creating or updating an user using Facebook's response
"""
canvas_url = canv_url(request)
graph = get_graph()
# Get token info from user
try:
token_info = graph.get_access_token_from_code(code, canvas_url, app_id, app_secret)
except facebook.GraphAPIError:
# For some reason, the auth code has already been used, redirect to login again
return 'auth code used'
# Extract token from token info
access_token = token_info['access_token']
# Debug the token, as per documentation
debug = debug_token(access_token)['data']
# Get the user's scope ID from debug data
social_id = debug['user_id']
token_expires = debug.get('expires_at') - debug.get('issued_at')
if debug.get('expires_at') == 0:
token_expires = 99999999
scopes = debug.get('scopes', [])
# Get some user info like name and url
extra_data = graph.get_object(str(social_id) + '/?fields=name,first_name,last_name,link')
name = extra_data['name']
first_name = extra_data['first_name']
last_name = extra_data['last_name']
link = extra_data.get('link', '')
# Call FacebookUser's method to create or update based on social_id, that returns an facebookuser object
from .models import FacebookUser
new = FacebookUser.create_or_update(social_id, access_token, token_expires, first_name, last_name, name, link, scopes)
# Try to login the user
if new.user.is_active:
login(request, new.user)
messages.add_message(request, messages.SUCCESS, 'Olá, ' + first_name + '!')
else:
messages.add_message(request, messages.ERROR, 'Essa conta foi desativada!')
return request
def login_canceled(request):
# If the user has canceled the login process, or something else happened, do nothing and display error message
messages.add_message(request, messages.ERROR, 'Oops! Algo de errado aconteceu :( Se isso se repetir, fale conosco!')
return request
def decode_state_data(state):
if not state:
return {}
parts = state.split(',')
data = {}
for part in parts:
p = part.split('=')
data[p[0]] = unquote(p[1])
return data
def code_already_used_url(next_url, redirected):
state = {}
if next_url:
state['next'] = next_url
state['redirected'] = int(redirected) + 1 if redirected else 0
return reverse('social_login:facebook_login') + '?' + urlencode(state)
| agpl-3.0 | 7,889,079,061,397,053,000 | 30.38255 | 122 | 0.663388 | false |
LegoStormtroopr/canard | SQBLWidgets/sqblUI/statementText.py | 1 | 1584 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui/statementText.ui'
#
# Created: Sat Jul 25 12:17:11 2015
# by: PyQt4 UI code generator 4.9.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(400, 300)
self.verticalLayout = QtGui.QVBoxLayout(Form)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.label = QtGui.QLabel(Form)
self.label.setWordWrap(True)
self.label.setObjectName(_fromUtf8("label"))
self.verticalLayout.addWidget(self.label)
self.statementText = QtGui.QTextEdit(Form)
self.statementText.setStyleSheet(_fromUtf8("margin-left:8px;"))
self.statementText.setObjectName(_fromUtf8("statementText"))
self.verticalLayout.addWidget(self.statementText)
self.label.setBuddy(self.statementText)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("Form", "<html><head/><body><p><span style=\" font-weight:600;\">Statement Text</span> - <small>The text shown to a respondent.</small></p></body></html>", None, QtGui.QApplication.UnicodeUTF8))
| gpl-3.0 | -4,289,232,995,782,310,000 | 39.615385 | 250 | 0.696338 | false |
escapewindow/signingscript | src/signingscript/vendored/mozbuild/mozbuild/action/generate_symbols_file.py | 3 | 3333 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, print_function, unicode_literals
import argparse
import buildconfig
import os
from StringIO import StringIO
from mozbuild.preprocessor import Preprocessor
from mozbuild.util import DefinesAction
def generate_symbols_file(output, *args):
''' '''
parser = argparse.ArgumentParser()
parser.add_argument('input')
parser.add_argument('-D', action=DefinesAction)
parser.add_argument('-U', action='append', default=[])
args = parser.parse_args(args)
input = os.path.abspath(args.input)
pp = Preprocessor()
pp.context.update(buildconfig.defines['ALLDEFINES'])
if args.D:
pp.context.update(args.D)
for undefine in args.U:
if undefine in pp.context:
del pp.context[undefine]
# Hack until MOZ_DEBUG_FLAGS are simply part of buildconfig.defines
if buildconfig.substs.get('MOZ_DEBUG'):
pp.context['DEBUG'] = '1'
# Ensure @DATA@ works as expected (see the Windows section further below)
if buildconfig.substs['OS_TARGET'] == 'WINNT':
pp.context['DATA'] = 'DATA'
else:
pp.context['DATA'] = ''
pp.out = StringIO()
pp.do_filter('substitution')
pp.do_include(input)
symbols = [s.strip() for s in pp.out.getvalue().splitlines() if s.strip()]
libname, ext = os.path.splitext(os.path.basename(output.name))
if buildconfig.substs['OS_TARGET'] == 'WINNT':
# A def file is generated for MSVC link.exe that looks like the
# following:
# LIBRARY library.dll
# EXPORTS
# symbol1
# symbol2
# ...
#
# link.exe however requires special markers for data symbols, so in
# that case the symbols look like:
# data_symbol1 DATA
# data_symbol2 DATA
# ...
#
# In the input file, this is just annotated with the following syntax:
# data_symbol1 @DATA@
# data_symbol2 @DATA@
# ...
# The DATA variable is "simply" expanded by the preprocessor, to
# nothing on non-Windows, such that we only get the symbol name on
# those platforms, and to DATA on Windows, so that the "DATA" part
# is, in fact, part of the symbol name as far as the symbols variable
# is concerned.
assert ext == '.def'
output.write('LIBRARY %s\nEXPORTS\n %s\n'
% (libname, '\n '.join(symbols)))
elif buildconfig.substs.get('GCC_USE_GNU_LD'):
# A linker version script is generated for GNU LD that looks like the
# following:
# liblibrary.so {
# global:
# symbol1;
# symbol2;
# ...
# local:
# *;
# };
output.write('%s {\nglobal:\n %s;\nlocal:\n *;\n};'
% (libname, ';\n '.join(symbols)))
elif buildconfig.substs['OS_TARGET'] == 'Darwin':
# A list of symbols is generated for Apple ld that simply lists all
# symbols, with an underscore prefix.
output.write(''.join('_%s\n' % s for s in symbols))
return set(pp.includes)
| mpl-2.0 | 6,821,171,240,241,072,000 | 35.228261 | 78 | 0.60426 | false |
samueldeng/crosslan | headquarter/iptman.py | 1 | 2445 | import iptc
import logging
logging.basicConfig(format='[%(levelname)s]\t%(asctime)s\t%(message)s', datefmt='%m/%d/%Y %I:%M:%S %p',
level=logging.DEBUG)
log = logging.getLogger("iptman")
class IptMan():
def __init__(self):
pass
@staticmethod
def insert_rule(port):
try:
# New Rule.
rule = iptc.Rule()
rule.protocol = "tcp"
# Add match to the rule.
match = iptc.Match(rule, "tcp")
match.sport = str(port)
rule.add_match(match)
# Add target to the rule.
target = iptc.Target(rule, "ACCEPT")
rule.target = target
# Insert rule to the OUTPUT chain in filter Table.
output_chain = iptc.Chain(iptc.Table(iptc.Table.FILTER), "OUTPUT")
output_chain.insert_rule(rule)
except Exception, e:
raise e
@staticmethod
def delete_rule(port):
try:
filter_table = iptc.Table(iptc.Table.FILTER)
output_chain = iptc.Chain(filter_table, "OUTPUT")
rule_del = None
for rule in output_chain.rules:
sport = str(rule.matches[0].parameters["sport"])
if sport == str(port):
rule_del = rule
break
if rule_del is not None:
output_chain.delete_rule(rule_del)
except Exception, e:
raise e
@staticmethod
def get_rule_counter(port):
try:
filter_table = iptc.Table(iptc.Table.FILTER)
filter_table.refresh()
output_chain = iptc.Chain(filter_table, "OUTPUT")
bytes_counts = None
for rule in output_chain.rules:
sport = str(rule.matches[0].parameters["sport"])
# log.debug(rule.get_counters())
if sport == str(port):
counter = rule.get_counters()
packets = counter[0]
bytes_counts = counter[1]
log.debug("packet #:" + str(packets))
log.debug("bytes #:" + str(bytes_counts))
break
if bytes_counts is None:
raise Exception("NotFoundPort")
return bytes_counts
except Exception, e:
raise e
def unit_test():
pass
if __name__ == "__main__":
unit_test() | gpl-2.0 | 5,179,356,324,944,784,000 | 27.44186 | 103 | 0.503476 | false |
google-research/falken | service/generated_flatbuffers/tflite/LessEqualOptions.py | 1 | 2218 | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: tflite
import flatbuffers
from flatbuffers.compat import import_numpy
np = import_numpy()
class LessEqualOptions(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsLessEqualOptions(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = LessEqualOptions()
x.Init(buf, n + offset)
return x
@classmethod
def LessEqualOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False):
return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed)
# LessEqualOptions
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
def LessEqualOptionsStart(builder): builder.StartObject(0)
def LessEqualOptionsEnd(builder): return builder.EndObject()
class LessEqualOptionsT(object):
# LessEqualOptionsT
def __init__(self):
pass
@classmethod
def InitFromBuf(cls, buf, pos):
lessEqualOptions = LessEqualOptions()
lessEqualOptions.Init(buf, pos)
return cls.InitFromObj(lessEqualOptions)
@classmethod
def InitFromObj(cls, lessEqualOptions):
x = LessEqualOptionsT()
x._UnPack(lessEqualOptions)
return x
# LessEqualOptionsT
def _UnPack(self, lessEqualOptions):
if lessEqualOptions is None:
return
# LessEqualOptionsT
def Pack(self, builder):
LessEqualOptionsStart(builder)
lessEqualOptions = LessEqualOptionsEnd(builder)
return lessEqualOptions
| apache-2.0 | 3,640,308,164,800,651,300 | 29.805556 | 114 | 0.708747 | false |
Southpaw-TACTIC/Team | src/python/Lib/site-packages/PySide/examples/itemviews/addressbook/addresswidget.py | 1 | 10279 | #!/usr/bin/python
"""**************************************************************************
**
** Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
** All rights reserved.
** Contact: Nokia Corporation ([email protected])
**
** This file is part of the examples of the Qt Toolkit.
**
** You may use this file under the terms of the BSD license as follows:
**
** "Redistribution and use in source and binary forms, with or without
** modification, are permitted provided that the following conditions are
** met:
** * Redistributions of source code must retain the above copyright
** notice, this list of conditions and the following disclaimer.
** * Redistributions in binary form must reproduce the above copyright
** notice, this list of conditions and the following disclaimer in
** the documentation and/or other materials provided with the
** distribution.
** * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
** the names of its contributors may be used to endorse or promote
** products derived from this software without specific prior written
** permission.
**
** THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
** "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
** LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
** A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
** OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
** SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
** LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
** DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
** THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
** (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
** OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
**
*****************************************************************************
** February 2011
** - addressbook example ported to PySide by Arun Srinivasan
** <[email protected]>
**************************************************************************"""
try:
import cpickle as pickle
except ImportError:
import pickle
from PySide.QtCore import (Qt, Signal, QRegExp, QModelIndex)
from PySide.QtGui import (QWidget, QTabWidget, QItemSelectionModel,
QMessageBox, QTableView, QSortFilterProxyModel,
QAbstractItemView, QItemSelection)
from tablemodel import TableModel
from newaddresstab import NewAddressTab
from adddialogwidget import AddDialogWidget
class AddressWidget(QTabWidget):
""" The central widget of the application. Most of the addressbook's
functionality is contained in this class.
"""
selectionChanged = Signal(QItemSelection)
def __init__(self, parent=None):
""" Initialize the AddressWidget. """
super(AddressWidget, self).__init__(parent)
self.tableModel = TableModel()
self.newAddressTab = NewAddressTab()
self.newAddressTab.sendDetails.connect(self.addEntry)
self.addTab(self.newAddressTab, "Address Book")
self.setupTabs()
def addEntry(self, name=None, address=None):
""" Add an entry to the addressbook. """
if name is None and address is None:
addDialog = AddDialogWidget()
if addDialog.exec_():
name = addDialog.name
address = addDialog.address
address = {"name": name, "address": address}
addresses = self.tableModel.addresses[:]
# The QT docs for this example state that what we're doing here
# is checking if the entered name already exists. What they
# (and we here) are actually doing is checking if the whole
# name/address pair exists already - ok for the purposes of this
# example, but obviously not how a real addressbook application
# should behave.
try:
addresses.remove(address)
QMessageBox.information(self, "Duplicate Name",
"The name \"%s\" already exists." % name)
except ValueError:
# The address didn't already exist, so let's add it to the model.
# Step 1: create the row
self.tableModel.insertRows(0)
# Step 2: get the index of the newly created row and use it.
# to set the name
ix = self.tableModel.index(0, 0, QModelIndex())
self.tableModel.setData(ix, address["name"], Qt.EditRole)
# Step 3: lather, rinse, repeat for the address.
ix = self.tableModel.index(0, 1, QModelIndex())
self.tableModel.setData(ix, address["address"], Qt.EditRole)
# Remove the newAddressTab, as we now have at least one
# address in the model.
self.removeTab(self.indexOf(self.newAddressTab))
# The screenshot for the QT example shows nicely formatted
# multiline cells, but the actual application doesn't behave
# quite so nicely, at least on Ubuntu. Here we resize the newly
# created row so that multiline addresses look reasonable.
tableView = self.currentWidget()
tableView.resizeRowToContents(ix.row())
def editEntry(self):
""" Edit an entry in the addressbook. """
tableView = self.currentWidget()
proxyModel = tableView.model()
selectionModel = tableView.selectionModel()
# Get the name and address of the currently selected row.
indexes = selectionModel.selectedRows()
for index in indexes:
row = proxyModel.mapToSource(index).row()
ix = self.tableModel.index(row, 0, QModelIndex())
name = self.tableModel.data(ix, Qt.DisplayRole)
ix = self.tableModel.index(row, 1, QModelIndex())
address = self.tableModel.data(ix, Qt.DisplayRole)
# Open an addDialogWidget, and only allow the user to edit the address.
addDialog = AddDialogWidget()
addDialog.setWindowTitle("Edit a Contact")
addDialog.nameText.setReadOnly(True)
addDialog.nameText.setText(name)
addDialog.addressText.setText(address)
# If the address is different, add it to the model.
if addDialog.exec_():
newAddress = addDialog.address
if newAddress != address:
ix = self.tableModel.index(row, 1, QModelIndex())
self.tableModel.setData(ix, newAddress, Qt.EditRole)
def removeEntry(self):
""" Remove an entry from the addressbook. """
tableView = self.currentWidget()
proxyModel = tableView.model()
selectionModel = tableView.selectionModel()
# Just like editEntry, but this time remove the selected row.
indexes = selectionModel.selectedRows()
for index in indexes:
row = proxyModel.mapToSource(index).row()
self.tableModel.removeRows(row)
# If we've removed the last address in the model, display the
# newAddressTab
if self.tableModel.rowCount() == 0:
self.insertTab(0, self.newAddressTab, "Address Book")
def setupTabs(self):
""" Setup the various tabs in the AddressWidget. """
groups = ["ABC", "DEF", "GHI", "JKL", "MNO", "PQR", "STU", "VW", "XYZ"]
for group in groups:
proxyModel = QSortFilterProxyModel(self)
proxyModel.setSourceModel(self.tableModel)
proxyModel.setDynamicSortFilter(True)
tableView = QTableView()
tableView.setModel(proxyModel)
tableView.setSortingEnabled(True)
tableView.setSelectionBehavior(QAbstractItemView.SelectRows)
tableView.horizontalHeader().setStretchLastSection(True)
tableView.verticalHeader().hide()
tableView.setEditTriggers(QAbstractItemView.NoEditTriggers)
tableView.setSelectionMode(QAbstractItemView.SingleSelection)
# This here be the magic: we use the group name (e.g. "ABC") to
# build the regex for the QSortFilterProxyModel for the group's
# tab. The regex will end up looking like "^[ABC].*", only
# allowing this tab to display items where the name starts with
# "A", "B", or "C". Notice that we set it to be case-insensitive.
reFilter = "^[%s].*" % group
proxyModel.setFilterRegExp(QRegExp(reFilter, Qt.CaseInsensitive))
proxyModel.setFilterKeyColumn(0) # Filter on the "name" column
proxyModel.sort(0, Qt.AscendingOrder)
tableView.selectionModel().selectionChanged.connect(self.selectionChanged)
self.addTab(tableView, group)
# Note: the QT example uses a QDataStream for the saving and loading.
# Here we're using a python dictionary to store the addresses, which
# can't be streamed using QDataStream, so we just use cpickle for this
# example.
def readFromFile(self, filename):
""" Read contacts in from a file. """
try:
f = open(filename, "rb")
addresses = pickle.load(f)
except IOError:
QMessageBox.information(self, "Unable to open file: %s" % filename)
finally:
f.close()
if len(addresses) == 0:
QMessageBox.information(self, "No contacts in file: %s" % filename)
else:
for address in addresses:
self.addEntry(address["name"], address["address"])
def writeToFile(self, filename):
""" Save all contacts in the model to a file. """
try:
f = open(filename, "wb")
pickle.dump(self.tableModel.addresses, f)
except IOError:
QMessageBox.information(self, "Unable to open file: %s" % filename)
finally:
f.close()
if __name__ == "__main__":
import sys
from PySide.QtGui import QApplication
app = QApplication(sys.argv)
addressWidget = AddressWidget()
addressWidget.show()
sys.exit(app.exec_())
| epl-1.0 | -137,049,467,144,635,650 | 40.447581 | 86 | 0.628369 | false |
googleapis/python-grafeas | grafeas/grafeas_v1/services/grafeas/transports/grpc_asyncio.py | 1 | 26599 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import packaging.version
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.protobuf import empty_pb2 # type: ignore
from grafeas.grafeas_v1.types import grafeas
from .base import GrafeasTransport, DEFAULT_CLIENT_INFO
from .grpc import GrafeasGrpcTransport
class GrafeasGrpcAsyncIOTransport(GrafeasTransport):
"""gRPC AsyncIO backend transport for Grafeas.
`Grafeas <https://grafeas.io>`__ API.
Retrieves analysis results of Cloud components such as Docker
container images.
Analysis results are stored as a series of occurrences. An
``Occurrence`` contains information about a specific analysis
instance on a resource. An occurrence refers to a ``Note``. A note
contains details describing the analysis and is generally stored in
a separate project, called a ``Provider``. Multiple occurrences can
refer to the same note.
For example, an SSL vulnerability could affect multiple images. In
this case, there would be one note for the vulnerability and an
occurrence for each image with the vulnerability referring to that
note.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(
cls,
host: str = "",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
def __init__(
self,
*,
host: str = "",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def get_occurrence(
self,
) -> Callable[[grafeas.GetOccurrenceRequest], Awaitable[grafeas.Occurrence]]:
r"""Return a callable for the get occurrence method over gRPC.
Gets the specified occurrence.
Returns:
Callable[[~.GetOccurrenceRequest],
Awaitable[~.Occurrence]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_occurrence" not in self._stubs:
self._stubs["get_occurrence"] = self.grpc_channel.unary_unary(
"/grafeas.v1.Grafeas/GetOccurrence",
request_serializer=grafeas.GetOccurrenceRequest.serialize,
response_deserializer=grafeas.Occurrence.deserialize,
)
return self._stubs["get_occurrence"]
@property
def list_occurrences(
self,
) -> Callable[
[grafeas.ListOccurrencesRequest], Awaitable[grafeas.ListOccurrencesResponse]
]:
r"""Return a callable for the list occurrences method over gRPC.
Lists occurrences for the specified project.
Returns:
Callable[[~.ListOccurrencesRequest],
Awaitable[~.ListOccurrencesResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_occurrences" not in self._stubs:
self._stubs["list_occurrences"] = self.grpc_channel.unary_unary(
"/grafeas.v1.Grafeas/ListOccurrences",
request_serializer=grafeas.ListOccurrencesRequest.serialize,
response_deserializer=grafeas.ListOccurrencesResponse.deserialize,
)
return self._stubs["list_occurrences"]
@property
def delete_occurrence(
self,
) -> Callable[[grafeas.DeleteOccurrenceRequest], Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the delete occurrence method over gRPC.
Deletes the specified occurrence. For example, use
this method to delete an occurrence when the occurrence
is no longer applicable for the given resource.
Returns:
Callable[[~.DeleteOccurrenceRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_occurrence" not in self._stubs:
self._stubs["delete_occurrence"] = self.grpc_channel.unary_unary(
"/grafeas.v1.Grafeas/DeleteOccurrence",
request_serializer=grafeas.DeleteOccurrenceRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_occurrence"]
@property
def create_occurrence(
self,
) -> Callable[[grafeas.CreateOccurrenceRequest], Awaitable[grafeas.Occurrence]]:
r"""Return a callable for the create occurrence method over gRPC.
Creates a new occurrence.
Returns:
Callable[[~.CreateOccurrenceRequest],
Awaitable[~.Occurrence]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_occurrence" not in self._stubs:
self._stubs["create_occurrence"] = self.grpc_channel.unary_unary(
"/grafeas.v1.Grafeas/CreateOccurrence",
request_serializer=grafeas.CreateOccurrenceRequest.serialize,
response_deserializer=grafeas.Occurrence.deserialize,
)
return self._stubs["create_occurrence"]
@property
def batch_create_occurrences(
self,
) -> Callable[
[grafeas.BatchCreateOccurrencesRequest],
Awaitable[grafeas.BatchCreateOccurrencesResponse],
]:
r"""Return a callable for the batch create occurrences method over gRPC.
Creates new occurrences in batch.
Returns:
Callable[[~.BatchCreateOccurrencesRequest],
Awaitable[~.BatchCreateOccurrencesResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "batch_create_occurrences" not in self._stubs:
self._stubs["batch_create_occurrences"] = self.grpc_channel.unary_unary(
"/grafeas.v1.Grafeas/BatchCreateOccurrences",
request_serializer=grafeas.BatchCreateOccurrencesRequest.serialize,
response_deserializer=grafeas.BatchCreateOccurrencesResponse.deserialize,
)
return self._stubs["batch_create_occurrences"]
@property
def update_occurrence(
self,
) -> Callable[[grafeas.UpdateOccurrenceRequest], Awaitable[grafeas.Occurrence]]:
r"""Return a callable for the update occurrence method over gRPC.
Updates the specified occurrence.
Returns:
Callable[[~.UpdateOccurrenceRequest],
Awaitable[~.Occurrence]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_occurrence" not in self._stubs:
self._stubs["update_occurrence"] = self.grpc_channel.unary_unary(
"/grafeas.v1.Grafeas/UpdateOccurrence",
request_serializer=grafeas.UpdateOccurrenceRequest.serialize,
response_deserializer=grafeas.Occurrence.deserialize,
)
return self._stubs["update_occurrence"]
@property
def get_occurrence_note(
self,
) -> Callable[[grafeas.GetOccurrenceNoteRequest], Awaitable[grafeas.Note]]:
r"""Return a callable for the get occurrence note method over gRPC.
Gets the note attached to the specified occurrence.
Consumer projects can use this method to get a note that
belongs to a provider project.
Returns:
Callable[[~.GetOccurrenceNoteRequest],
Awaitable[~.Note]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_occurrence_note" not in self._stubs:
self._stubs["get_occurrence_note"] = self.grpc_channel.unary_unary(
"/grafeas.v1.Grafeas/GetOccurrenceNote",
request_serializer=grafeas.GetOccurrenceNoteRequest.serialize,
response_deserializer=grafeas.Note.deserialize,
)
return self._stubs["get_occurrence_note"]
@property
def get_note(self) -> Callable[[grafeas.GetNoteRequest], Awaitable[grafeas.Note]]:
r"""Return a callable for the get note method over gRPC.
Gets the specified note.
Returns:
Callable[[~.GetNoteRequest],
Awaitable[~.Note]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_note" not in self._stubs:
self._stubs["get_note"] = self.grpc_channel.unary_unary(
"/grafeas.v1.Grafeas/GetNote",
request_serializer=grafeas.GetNoteRequest.serialize,
response_deserializer=grafeas.Note.deserialize,
)
return self._stubs["get_note"]
@property
def list_notes(
self,
) -> Callable[[grafeas.ListNotesRequest], Awaitable[grafeas.ListNotesResponse]]:
r"""Return a callable for the list notes method over gRPC.
Lists notes for the specified project.
Returns:
Callable[[~.ListNotesRequest],
Awaitable[~.ListNotesResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_notes" not in self._stubs:
self._stubs["list_notes"] = self.grpc_channel.unary_unary(
"/grafeas.v1.Grafeas/ListNotes",
request_serializer=grafeas.ListNotesRequest.serialize,
response_deserializer=grafeas.ListNotesResponse.deserialize,
)
return self._stubs["list_notes"]
@property
def delete_note(
self,
) -> Callable[[grafeas.DeleteNoteRequest], Awaitable[empty_pb2.Empty]]:
r"""Return a callable for the delete note method over gRPC.
Deletes the specified note.
Returns:
Callable[[~.DeleteNoteRequest],
Awaitable[~.Empty]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_note" not in self._stubs:
self._stubs["delete_note"] = self.grpc_channel.unary_unary(
"/grafeas.v1.Grafeas/DeleteNote",
request_serializer=grafeas.DeleteNoteRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_note"]
@property
def create_note(
self,
) -> Callable[[grafeas.CreateNoteRequest], Awaitable[grafeas.Note]]:
r"""Return a callable for the create note method over gRPC.
Creates a new note.
Returns:
Callable[[~.CreateNoteRequest],
Awaitable[~.Note]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_note" not in self._stubs:
self._stubs["create_note"] = self.grpc_channel.unary_unary(
"/grafeas.v1.Grafeas/CreateNote",
request_serializer=grafeas.CreateNoteRequest.serialize,
response_deserializer=grafeas.Note.deserialize,
)
return self._stubs["create_note"]
@property
def batch_create_notes(
self,
) -> Callable[
[grafeas.BatchCreateNotesRequest], Awaitable[grafeas.BatchCreateNotesResponse]
]:
r"""Return a callable for the batch create notes method over gRPC.
Creates new notes in batch.
Returns:
Callable[[~.BatchCreateNotesRequest],
Awaitable[~.BatchCreateNotesResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "batch_create_notes" not in self._stubs:
self._stubs["batch_create_notes"] = self.grpc_channel.unary_unary(
"/grafeas.v1.Grafeas/BatchCreateNotes",
request_serializer=grafeas.BatchCreateNotesRequest.serialize,
response_deserializer=grafeas.BatchCreateNotesResponse.deserialize,
)
return self._stubs["batch_create_notes"]
@property
def update_note(
self,
) -> Callable[[grafeas.UpdateNoteRequest], Awaitable[grafeas.Note]]:
r"""Return a callable for the update note method over gRPC.
Updates the specified note.
Returns:
Callable[[~.UpdateNoteRequest],
Awaitable[~.Note]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_note" not in self._stubs:
self._stubs["update_note"] = self.grpc_channel.unary_unary(
"/grafeas.v1.Grafeas/UpdateNote",
request_serializer=grafeas.UpdateNoteRequest.serialize,
response_deserializer=grafeas.Note.deserialize,
)
return self._stubs["update_note"]
@property
def list_note_occurrences(
self,
) -> Callable[
[grafeas.ListNoteOccurrencesRequest],
Awaitable[grafeas.ListNoteOccurrencesResponse],
]:
r"""Return a callable for the list note occurrences method over gRPC.
Lists occurrences referencing the specified note.
Provider projects can use this method to get all
occurrences across consumer projects referencing the
specified note.
Returns:
Callable[[~.ListNoteOccurrencesRequest],
Awaitable[~.ListNoteOccurrencesResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_note_occurrences" not in self._stubs:
self._stubs["list_note_occurrences"] = self.grpc_channel.unary_unary(
"/grafeas.v1.Grafeas/ListNoteOccurrences",
request_serializer=grafeas.ListNoteOccurrencesRequest.serialize,
response_deserializer=grafeas.ListNoteOccurrencesResponse.deserialize,
)
return self._stubs["list_note_occurrences"]
__all__ = ("GrafeasGrpcAsyncIOTransport",)
| apache-2.0 | -3,522,779,740,541,284,000 | 41.626603 | 89 | 0.617956 | false |
terranum-ch/GraphLink | graphlink/ui/gkui_node_manager.py | 1 | 2716 | #!/urs/bin/python
import os
import wx
from ..core.gk_node import GKNode
from .gkui_node_dlg import GKUINodeEditDialog
class GKUINodeManager(object):
def __init__(self, parentframe, listctrl):
self.m_listctrl = listctrl
assert (self.m_listctrl is not None), "listctrl is None!"
self.m_parent_frame = parentframe
self.m_nodes = []
self.m_node_paths = []
def add_node_path(self, nodepath):
"""specify search path for nodes"""
if nodepath not in self.m_node_paths:
self.m_node_paths.append(nodepath)
def has_node_paths(self):
"""return True if some nodes path are defined"""
if len(self.m_node_paths) == 0:
return False
return True
def add_node_to_list(self, node):
"""add node to the internal list if it isn't already present"""
if node not in self.m_nodes:
self.m_nodes.append(node)
def get_node_count(self):
"""get the number of nodes"""
return len(self.m_nodes)
def reload_path(self):
"""clear the list ctrl and parse the node paths"""
for path in self.m_node_paths:
if os.path.exists(path) is False:
wx.LogError("{} didn't exist!".format(path))
else:
for myfile in os.listdir(path):
if myfile.endswith(".gkn"): # node files
node = GKNode()
if node.load_from_file(myfile) is False:
wx.LogWarning("Error loading: {}".format(myfile))
else:
self.add_node_to_list(node)
# reload the node list
self.reload_list()
def reload_list(self):
"""reload the node list"""
self.m_listctrl.DeleteAllItems()
for index, node in enumerate(self.m_nodes):
self.m_listctrl.Append([index + 1, node.m_name])
def add_node_dialog(self):
"""display the add node dialog"""
mynode = GKNode()
myDlg = GKUINodeEditDialog(self.m_parent_frame, mynode)
if myDlg.ShowModal() == wx.ID_SAVE:
self.add_node_to_list(mynode)
self.reload_list()
def edit_node_dialog(self):
"""display the edit node dialog"""
my_node_index = self.m_listctrl.GetFirstSelected()
if my_node_index == -1:
wx.LogWarning("Nothing selected, select à node first!")
return False
my_node = self.m_nodes[my_node_index]
assert(my_node)
myDlg = GKUINodeEditDialog(self.m_parent_frame, my_node)
if myDlg.ShowModal() == wx.ID_SAVE:
self.reload_list()
| apache-2.0 | -1,177,501,102,955,131,400 | 31.710843 | 77 | 0.559116 | false |
gratefulfrog/lib | python/chempy/cex.py | 1 | 17584 | #A* -------------------------------------------------------------------
#B* This file contains source code for the PyMOL computer program
#C* copyright 1998-2000 by Warren Lyford Delano of DeLano Scientific.
#D* -------------------------------------------------------------------
#E* It is unlawful to modify or remove this copyright notice.
#F* -------------------------------------------------------------------
#G* Please see the accompanying LICENSE file for further information.
#H* -------------------------------------------------------------------
#I* Additional authors of this source file include:
#-* Scott Dixon, Metaphorics, LLC
#-*
#-*
#Z* -------------------------------------------------------------------
"""
Author: Scott Dixon, Metaphorics, LLC
This source code is contributed to the public domain and may be freely
copied and distributed for research, profit, fun or any other reason,
with these restrictions: (1) unmodified or functionally equivalent code
derived from this code must contain this notice, (2) all derived code
must acknowledge the author and institution, and (3) no liability is
assumed by the author(s) for any use or misuse of this software.
CEX input routines. Reads each CEX object into a test based tree.
Provides a CEX smiles interpreter class which can be specialized to create
appropriate molecule object """
import string
class CEXstream:
"""Input stream which read from file object"""
(START, COMMENT, QUOTE, NOTQUOTE, GOTQUOTE, TAG, VALUE, END) = range(8)
TAG_CHAR = string.letters + string.digits + "$_/"
def __init__(self,file):
self.file = file
self.dt=None
self.oldch = 0
self.buff = ""
self.p = 0
self.len = 0
def readEntry(self):
"""Read one tag<value> entry from stream"""
# find nonblank character
str = ""
p = 0
while 1:
try:
if self.buff[p] not in string.whitespace:
break
p = p + 1
except IndexError:
self.buff = self.file.read(1000)
p = 0
if len(self.buff) == 0:
return (None, None)
self.buff = self.buff[p:]
if self.buff[0] == "|":
self.buff = self.buff[1:]
return ("|","")
while 1:
try:
while 1:
p = string.index(self.buff,">") + 1
str = str + self.buff[:p]
self.buff = self.buff[p:]
if string.count(str,'"') %2 == 0:
break
except (ValueError, IndexError):
str = str + self.buff
self.buff = self.file.read(1000)
if len(self.buff)==0:
if string.find(str,"|") >= 0:
return ("|","")
else:
return (None, None)
else: break
s = string.find(str,"<")
if s < 0:
return (None, None)
else:
return (str[:s],str[s+1:-1])
class CEXsmilesError(Exception):
def __init__(self,smiles,p,msg):
self.args="Smiles error: " + msg + "\n" + smiles + "\n" + p*" " + "^"
class CEXsmilesParser:
"""A simple CEX smiles parser adapted from Dave Weininger's C version in the
CEX toolkit"""
MX_NESTING=4096
MX_RINGS=1000
ptab = {"*":0,
"H":1, "He":2, "Li":3, "Be":4, "B":5, "C":6, "N":7, "O":8, "F":9, "Ne":10,
"Na":11, "Mg":12, "Al":13, "Si":14, "P":15, "S":16, "Cl":17, "Ar":18, "K":19, "Ca":20,
"Sc:":21, "Ti":22, "V":23, "Cr":24, "Mn":25, "Fe":26, "Co":27, "Ni":28, "Cu":29, "Zn":30,
"Ga":31, "Ge":32, "As":33, "Se":34, "Br":35, "Kr":36, "Rb":37, "Sr":38, "Y":39, "Zr":40,
"Nb":41, "Mo":42, "Tc":43, "Ru":44, "Rh":45, "Pd":46, "Ag":47, "Cd":48, "In":49, "Sn":50,
"Sb":51, "Te":52, "I":53, "Xe":54, "Cs":55, "Ba":56, "La":57, "Ce":58, "Pr":59, "Nd":60,
"Pm":61, "Sm":62, "Eu":63, "Gd":64, "Tb":65, "Dy":66, "Ho":67, "Er":68, "Tm":69, "Yb":70,
"Lu":71, "Hf":72, "Ta":73, "W":74, "Re":75, "Os":76, "Ir":77, "Pt":78, "Au":79, "Hg":80,
"Tl":81, "Pb":82, "Bi":83, "Po":84, "At":85, "Rn":86, "Fr":87, "Ra":88, "Ac":89, "Th":90,
"Pa":91, "U":92, "Np":93, "Pu":94, "Am":95, "Cm":96, "Bk":97, "Cf":98, "Es":99, "Fm":100,
"Md":101, "No":102, "Lr":103, "Rf":104, "Ha":105}
stab = {0:"*",
1:"H", 2:"He", 3:"Li", 4:"Be", 5:"B", 6:"C", 7:"N", 8:"O", 9:"F", 10:"Ne",
11:"Na", 12:"Mg", 13:"Al", 14:"Si", 15:"P", 16:"S", 17:"Cl", 18:"Ar", 19:"K", 20:"Ca",
21:"Sc:", 22:"Ti", 23:"V", 24:"Cr", 25:"Mn", 26:"Fe", 27:"Co", 28:"Ni", 29:"Cu", 30:"Zn",
31:"Ga", 32:"Ge", 33:"As", 34:"Se", 35:"Br", 36:"Kr", 37:"Rb", 38:"Sr", 39:"Y", 40:"Zr",
41:"Nb", 42:"Mo", 43:"Tc", 44:"Ru", 45:"Rh", 46:"Pd", 47:"Ag", 48:"Cd", 49:"In", 50:"Sn",
51:"Sb", 52:"Te", 53:"I", 54:"Xe", 55:"Cs", 56:"Ba", 57:"La", 58:"Ce", 59:"Pr", 60:"Nd",
61:"Pm", 62:"Sm", 63:"Eu", 64:"Gd", 65:"Tb", 66:"Dy", 67:"Ho", 68:"Er", 69:"Tm", 70:"Yb",
71:"Lu", 72:"Hf", 73:"Ta", 74:"W", 75:"Re", 76:"Os", 77:"Ir", 78:"Pt", 79:"Au", 80:"Hg",
81:"Tl", 82:"Pb", 83:"Bi", 84:"Po", 85:"At", 86:"Rn", 87:"Fr", 88:"Ra", 89:"Ac", 90:"Th",
91:"Pa", 92:"U", 93:"Np", 94:"Pu", 95:"Am", 96:"Cm", 97:"Bk", 98:"Cf", 99:"Es", 100:"Fm",
101:"Md", 102:"No", 103:"Lr", 104:"Rf", 105:"Ha"}
def sym2num(self,sym):
try:
return CEXsmilesParser.ptab[sym]
except KeyError:
return -1
def num2sym(self,num):
try:
return CEXsmilesParser.stab[num]
except KeyError:
return ""
def needquote(self,atnum):
if atnum in (0,5,6,7,8,9,15,16,17,35,53): return 0
else: return 1
def __init__(self):
self.atomN = 0
def MakeAtom(self, atnum):
print "Atom %d, atomic number %d" % (self.atomN, atnum)
self.atomN = self.atomN + 1
return self.atomN-1
def MakeBond(self, at1, at2, bo):
print "Bond between %d and %d, order %d" % (at1, at2,bo)
def SetHcount(self, atom, count):
print "Explicit H count %d for atom %d" % (count, atom)
def SetFormalCharge(self, atom, charge):
print "Charge for atom %d is %d" % (atom, charge)
def SetAtomicMass(self, atom, mass):
print "Mass from atom %d is %d" % (atom, mass)
def parse(self,smiles):
self.smiles=smiles + 3*"\0" # guard zone for illegal smiles
self.__init__()
self.ringat = [None]*CEXsmilesParser.MX_RINGS
self.fromat = [None]*CEXsmilesParser.MX_RINGS
self.ringbo = [0]*CEXsmilesParser.MX_NESTING
self.molname = ""
lev = 0
atnum = -1
imph = -1
bo = 0
charge = 0
quoted = 0
mass = 0
# adapted from Dave Wieninger's code in the CEX toolkits
p = 0
while p < len(self.smiles):
pp = p + 1
ch = self.smiles[p]
if ch == "(":
self.fromat[lev + 1] = self.fromat[lev]
lev = lev + 1
elif ch == ")": lev = lev - 1
elif ch == "[":
if quoted:
# error, no closing ]
raise CEXsmilesError(smiles,p,"No closing ]")
else:
quoted = 1
if self.smiles[pp] in string.digits:
p = pp
while self.smiles[p+1] in string.digits:
p = p + 1
mass = string.atoi(self.smiles[pp:p+1])
elif ch == "]":
if not quoted:
# error, no opening ]
raise CEXsmilesError(smiles,p,"No opening ]")
else:
quoted = 0
elif ch == ".":
self.fromat[lev] = None # disconnected parts
# bond types
elif ch == "=": bo = 2
elif ch == "#": bo = 3
elif ch == "-" and not quoted: bo = 1
# atom charge
elif ch == "-" or ch == "+":
if not quoted:
# error charge not in []
raise CEXsmilesError(smiles,p,"Charge not in []")
elif self.fromat[lev] is None:
# error charge precedes atomic symbol
raise CEXsmilesError(smiles,p,"Charge precedes atomic symbol")
else:
charge = 0
sign = 1
if ch == "-": sign = -1
while self.smiles[p+1] in string.digits:
charge = 10*charge + string.atoi(self.smiles[p+1])
p = p + 1
if charge == 0: charge = 1
charge = sign*charge
# allow for multiple + and - specifiers
while self.smiles[p+1] == "+":
charge = charge + 1
p = p + 1
while self.smiles[p+1] == "-":
charge = charge - 1
p = p + 1
if charge != 0: self.SetFormalCharge(atom, charge)
elif ch in string.digits or ch == "%" or ch == "^":
# deal with ring closures
if ch == "%":
if self.smiles[p+1] in string.digits and self.smiles[p+2] in string.digits:
ir = string.atoi(self.smiles[p+1:p+3])
p = p + 2
else:
# error expect 2 digits after %
raise CEXsmilesError(smiles,p,"Expect 2 digits after %")
elif ch == "^":
if self.smiles[p+1] in string.digits and self.smiles[p+2] in string.digits and self.smiles[p+3] in string.digits:
ir = string.atoi(self.smiles[p+1:p+4])
p = p + 3
else:
#error expect 3 digits after ^
raise CEXsmilesError(smiles,p,"Expect 3 digits after ^")
else:
ir = string.atoi(ch)
if self.ringat[ir] is None:
self.ringat[ir] = self.fromat[lev]
self.ringbo[ir] = bo
elif bo and self.ringbo[ir] and bo != self.ringbo[ir]:
#error conflicting closure bond orders
raise CEXsmilesError(smiles,p,"Conflicting closure bond orders")
else:
if not bo: bo = 1
if self.ringbo[ir]: bo = self.ringbo[ir]
self.MakeBond(self.fromat[lev],self.ringat[ir],bo)
self.ringat[ir] = None
self.ringbo[ir] = 0
bo = 0
elif ch in "*ABCDEFGHIKLMNOPRSTUVWXYZ":
# recognize atomic symbols
atnum = -1
if self.smiles[pp] in string.lowercase:
atnum = self.sym2num(self.smiles[p:p+2])
if atnum > -1: p = p + 1
else: atnum = self.sym2num(self.smiles[p])
if atnum < 0:
#error bad atomic symbol
raise CEXsmilesError(smiles,p,"Bad atomic symbol")
if not quoted and self.needquote(atnum):
# error symbol needs []'s
raise CEXsmilesError(smiles,p,"Symbol needs []")
atom = self.MakeAtom(atnum)
if not bo: bo = 1
if (self.fromat[lev] is not None) and atom != self.fromat[lev]:
self.MakeBond(atom,self.fromat[lev],bo)
self.fromat[lev] = atom
if not quoted: imph = -1
if mass > 0: self.SetAtomicMass(atom, mass)
if quoted and atom is not None:
#deal with explict hydrogen counts
if self.smiles[p+1] != "H":
imph = 0
else:
imph = 1
p = p + 1
j = p
while self.smiles[p+1] in string.digits:
p = p + 1
if j < p: imph = string.atoi(self.smiles[j+1:p+1])
if imph >= 0: self.SetHcount(atom,imph)
# reset default attributes to undefined
bo = 0
charge = 0
mass = 0
imph = -1
elif ch in string.whitespace:
# extract molecul name from following text
self.molname = self.smiles[p+1:-3]
break
elif ch == "\0":
pass #ignore guard characters
else:
# everything else is an error
# error invalid character
raise CEXsmilesError(smiles,p,"Invalid character")
# end of while
p = p + 1
class CEXprop:
def __init__(self, tag, value):
self.name = tag
self.value = value
def __str__(self):
return self.name + "<" + self.value + ">"
class CEXchild(CEXprop):
def __init__(self, tag, value):
CEXprop.__init__(self, tag, value)
self.proplist = []
def __str__(self):
str = self.name + "<" + self.value + ">"
for p in self.properties():
str = str + "\n" + p.__str__()
return str
def addProp(self, prop):
self.proplist.append(prop)
def properties(self):
return self.proplist
class CEXroot(CEXchild):
def __init__(self, tag, value):
CEXchild.__init__(self, tag, value)
self.childlist = []
def addChild(self, child):
self.childlist.append(child)
def children(self):
return self.childlist
def __str__(self):
str = self.name + "<" + self.value + ">"
for p in self.properties():
str = str + "\n" + p.__str__()
for p in self.children():
str = str + "\n" + p.__str__()
return str
def readTree(cxstream):
"""Read tree of CEX object from stream"""
(tag, value) = cxstream.readEntry()
if not tag: return None
if tag[0] != "$": return None
root = CEXroot(tag,value)
(tag, value) = cxstream.readEntry()
if tag == None: return None
while 1:
if tag == "|": break
if tag == None: break
if tag[0] == "/":
root.addProp(CEXprop(tag, value))
(tag, value) = cxstream.readEntry()
else:
# Hardwired for root/child two level hierarchy
child = CEXchild(tag, value)
while 1:
(tag, value) = cxstream.readEntry()
if tag == "|": break
if tag == None: break
if tag[0] == "/":
child.addProp(CEXprop(tag, value))
continue
else: break
root.addChild(child)
return root
def __follow_child(rec):
print " " + rec.name, rec.value
for prop in rec.properties():
print " " + prop.name, prop.value
def spew(rec):
print rec.name, rec.value
for prop in rec.properties():
print prop.name, prop.value
for child in rec.children():
__follow_child(child)
def selectChildren(rec, string):
return filter(lambda x, string=string: x.name==string, rec.children())
def selectProperty(rec, string):
for prop in rec.properties():
if prop.name == string: return prop
if __name__ == "__main__":
import StringIO
def test(string):
print "test: ",string
s = StringIO.StringIO(string)
c = CEXstream(s)
print c.readEntry()
s.close()
test("|")
test("tag<value>")
test(" tag<value>")
test("$tag<value>")
test("/tag<value>")
test("/tag_tag<value>")
test('tag<"value">')
test('tag<"value>">')
test('tag<"""value>">')
def test2(string):
print "test2: ", string
s = StringIO.StringIO(string)
c = CEXstream(s)
tree = readTree(c)
spew(tree)
test2("$root<test>|")
test2("$root<test>/prop<value>|")
test2("$root<test>child<value>|")
test2("$root<test>/prop<value>/prop2<value2>|")
test2("$root<test>/prop<value>/prop2<value2>child<valuec>|")
test2("$root<test>/prop<value>/prop2<value2>child<valuec>/cprop<cv>|")
def test2a(string):
print "test2a: ", string
s = StringIO.StringIO(string)
c = CEXstream(s)
tree = readTree(c)
spew(tree)
tree = readTree(c)
spew(tree)
test2a("$root<test>/prop<value>/prop2<value2>child<valuec>/cprop<cv>|$root2<test2>/prop<val>child<val>|")
def test3(string):
print "test3: ",string
parser = CEXsmilesParser()
try:
parser.parse(string)
print parser.molname
except CEXsmilesError, data:
print data
test3("[C+2]")
test3("[C++]")
test3("[C+-]")
test3("[C-2]")
test3("[C--]")
test3("[C-+]")
test3("[CH3+2]")
test3("N1#CC1")
test3("N1#[CH3+2]C=1")
test3("C%12CC%12")
test3("C^123CC^123")
test3("N1#[13CH3+2]C=1 test")
test3("[N+1]C")
test3("[N+]C")
test3("N=[N+]=[N-]")
test3("CC[[N]")
test3("C=1CC-1")
test3("[C]]")
test3("C@1")
test3("C+2")
test3("[+2C]")
test3("Si")
test3("[Tx]")
test3("C%1CC%1")
test3("C^12CC^12")
test3("[NH2+]")
| gpl-2.0 | -1,504,338,603,752,551,400 | 38.162584 | 133 | 0.469177 | false |
chrplr/AIP2015 | resources/python-scripts/dualscope.py | 1 | 28502 | #!/usr/bin/env python
"""
Oscilloscope + spectrum analyser in Python.
------------------------------------------------------------
Copyright (C) 2008, Roger Fearick, University of Cape Town
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
------------------------------------------------------------
Version 0.7c
Dependencies:
uumpy -- numerics, fft
PyQt4, PyQwt5 -- gui, graphics
pyaudio -- sound card -- Enthought unstable branch!
This code provides an oscillator and spectrum analyzer using
the PC sound card as input.
The interface, based on qwt, uses a familar 'knob based' layout
so that it approximates an analogue scope.
Two traces are provided with imput via the sound card "line in" jack.
Traces can be averaged to reduce influence of noise.
The cross-correlation between the inputs can be computed.
The spectrum analyser has both log (dB) scale and linear scale.
A cross hair status display permits the reading ov values off the screen.
Printing is provided.
"""
# dualscope6.py derived from dualscopy5.py 11/8/05
# adds autocorrelation
# Update for Qt4: 4-11/10/2007 rwf
# dualscope7.py: use pyaudio 27/2/08 rwf
import sys
from PyQt4 import Qt
from PyQt4 import Qwt5 as Qwt
from numpy import *
import numpy.fft as FFT
import pyaudio
import icons # part of this package -- toolbar icons
# audio setup
CHUNK = 8192 # input buffer size in frames
FORMAT = pyaudio.paInt16
CHANNELS = 2
RATE = 48000 # depends on sound card: 96000 might be possible
# scope configuration
BOTHLR=0
LEFT=1
RIGHT=2
soundbuffersize=CHUNK
samplerate=float(RATE)
scopeheight=350
LRchannel=BOTHLR
PENWIDTH=2
# status messages
freezeInfo = 'Freeze: Press mouse button and drag'
cursorInfo = 'Cursor Pos: Press mouse button in plot region'
# utility classes
class LogKnob(Qwt.QwtKnob):
"""
Provide knob with log scale
"""
def __init__(self, *args):
apply(Qwt.QwtKnob.__init__, (self,) + args)
self.setScaleEngine(Qwt.QwtLog10ScaleEngine())
def setRange(self,minR,maxR):
self.setScale(minR,maxR)
Qwt.QwtKnob.setRange(self, log10(minR), log10(maxR), 0.333333)
def setValue(self,val):
Qwt.QwtKnob.setValue(self,log10(val))
class LblKnob:
"""
Provide knob with a label
"""
def __init__(self, wgt, x,y, name, logscale=0):
if logscale:
self.knob=LogKnob(wgt)
else:
self.knob=Qwt.QwtKnob(wgt)
color=Qt.QColor(200,200,210)
self.knob.palette().setColor(Qt.QPalette.Active,
Qt.QPalette.Button,
color )
self.lbl=Qt.QLabel(name, wgt)
self.knob.setGeometry(x, y, 140, 100)
# oooh, eliminate this ...
if name[0]=='o': self.knob.setKnobWidth(40)
self.lbl.setGeometry(x, y+90, 140, 15)
self.lbl.setAlignment(Qt.Qt.AlignCenter)
def setRange(self,*args):
apply(self.knob.setRange, args)
def setValue(self,*args):
apply(self.knob.setValue, args)
def setScaleMaxMajor(self,*args):
apply(self.knob.setScaleMaxMajor, args)
class Scope(Qwt.QwtPlot):
"""
Oscilloscope display widget
"""
def __init__(self, *args):
apply(Qwt.QwtPlot.__init__, (self,) + args)
self.setTitle('Scope');
self.setCanvasBackground(Qt.Qt.white)
# grid
self.grid = Qwt.QwtPlotGrid()
self.grid.enableXMin(True)
self.grid.setMajPen(Qt.QPen(Qt.Qt.gray, 0, Qt.Qt.SolidLine))
self.grid.attach(self)
# axes
self.enableAxis(Qwt.QwtPlot.yRight);
self.setAxisTitle(Qwt.QwtPlot.xBottom, 'Time [s]');
self.setAxisTitle(Qwt.QwtPlot.yLeft, 'Amplitude [V]');
self.setAxisMaxMajor(Qwt.QwtPlot.xBottom, 10);
self.setAxisMaxMinor(Qwt.QwtPlot.xBottom, 0);
self.setAxisScaleEngine(Qwt.QwtPlot.yRight, Qwt.QwtLinearScaleEngine());
self.setAxisMaxMajor(Qwt.QwtPlot.yLeft, 10);
self.setAxisMaxMinor(Qwt.QwtPlot.yLeft, 0);
self.setAxisMaxMajor(Qwt.QwtPlot.yRight, 10);
self.setAxisMaxMinor(Qwt.QwtPlot.yRight, 0);
# curves for scope traces: 2 first so 1 is on top
self.curve2 = Qwt.QwtPlotCurve('Trace2')
self.curve2.setPen(Qt.QPen(Qt.Qt.magenta,PENWIDTH))
self.curve2.setYAxis(Qwt.QwtPlot.yRight)
self.curve2.attach(self)
self.curve1 = Qwt.QwtPlotCurve('Trace1')
self.curve1.setPen(Qt.QPen(Qt.Qt.blue,PENWIDTH))
self.curve1.setYAxis(Qwt.QwtPlot.yLeft)
self.curve1.attach(self)
# default settings
self.triggerval=0.0
self.maxamp=1.0
self.maxamp2=1.0
self.freeze=0
self.average=0
self.autocorrelation=0
self.avcount=0
self.datastream = None
self.offset1=0.0
self.offset2=0.0
# set data
# NumPy: f, g, a and p are arrays!
self.dt=1.0/samplerate
self.f = arange(0.0, 1.0, self.dt)
self.a1 = 0.0*self.f
self.a2 = 0.0*self.f
self.curve1.setData(self.f, self.a1)
self.curve2.setData(self.f, self.a2)
# start self.timerEvent() callbacks running
self.startTimer(100)
# plot
self.replot()
# convenience methods for knob callbacks
def setMaxAmp(self, val):
self.maxamp=val
def setMaxAmp2(self, val):
self.maxamp2=val
def setMaxTime(self, val):
self.maxtime=val
def setOffset1(self, val):
self.offset1=val
def setOffset2(self, val):
self.offset2=val
def setTriggerLevel(self, val):
self.triggerval=val
# plot scope traces
def setDisplay(self):
l=len(self.a1)
if LRchannel==BOTHLR:
self.curve1.setData(self.f[0:l], self.a1[:l]+self.offset1*self.maxamp)
self.curve2.setData(self.f[0:l], self.a2[:l]+self.offset2*self.maxamp2)
elif LRchannel==RIGHT:
self.curve1.setData([0.0,0.0], [0.0,0.0])
self.curve2.setData(self.f[0:l], self.a2[:l]+self.offset2*self.maxamp2)
elif LRchannel==LEFT:
self.curve1.setData(self.f[0:l], self.a1[:l]+self.offset1*self.maxamp)
self.curve2.setData([0.0,0.0], [0.0,0.0])
self.replot()
def getValue(self, index):
return self.f[index],self.a[index]
def setAverage(self, state):
self.average = state
self.avcount=0
def setAutoc(self, state):
self.autocorrelation = state
self.avcount=0
def setFreeze(self, freeze):
self.freeze = 1-self.freeze
def setDatastream(self, datastream):
self.datastream = datastream
# timer callback that does the work
def timerEvent(self,e): # Scope
if self.datastream == None: return
x=self.datastream.read(CHUNK)
if self.freeze==1 or self.avcount>16: return
X=fromstring(x,dtype='h')
if len(X) == 0: return
P=array(X,dtype='d')/32768.0
val=self.triggerval*self.maxamp
i=0
R=P[0::2]
L=P[1::2]
if self.autocorrelation:
lenX=len(R)
if lenX == 0: return
if lenX!=soundbuffersize:
print lenX
window=blackman(lenX)
A1=FFT.fft(R*window) #lenX
A2=FFT.fft(L*window) #lenX
B2=(A1*conjugate(A2))/10.0
R=FFT.ifft(B2).real
else: # normal scope
# set trigger levels
for i in range(len(R)-1):
if R[i]<val and R[i+1]>=val: break
if i > len(R)-2: i=0
R=R[i:]
L=L[i:]
if self.average == 0:
self.a1=R
self.a2=L
else:
self.avcount+=1
if self.avcount==1:
self.sumR=R
self.sumL=L
else:
lp=min(len(R),len(self.sumR))
self.sumR=self.sumR[:lp]+R[:lp]
self.sumL=self.sumL[:lp]+L[:lp]
self.a1=self.sumR/self.avcount
self.a2=self.sumL/self.avcount
self.setDisplay()
inittime=0.01
initamp=0.1
class ScopeFrame(Qt.QFrame):
"""
Oscilloscope widget --- contains controls + display
"""
def __init__(self, *args):
apply(Qt.QFrame.__init__, (self,) + args)
# the following: setPal.. doesn't seem to work on Win
try:
self.setPaletteBackgroundColor( QColor(240,240,245))
except: pass
knobpos=scopeheight+30
self.setFixedSize(700, scopeheight+150)
self.freezeState = 0
self.knbLevel = LblKnob(self,560,50,"Trigger level")
self.knbTime = LblKnob(self,560, 220,"Time", 1)
self.knbSignal = LblKnob(self,150, knobpos, "Signal1",1)
self.knbSignal2 = LblKnob(self,450, knobpos, "Signal2",1)
self.knbOffset1=LblKnob(self,10, knobpos,"offset1")
self.knbOffset2=LblKnob(self,310, knobpos,"offset2")
self.knbTime.setRange(0.0001, 1.0)
self.knbTime.setValue(0.01)
self.knbSignal.setRange(0.0001, 1.0)
self.knbSignal.setValue(0.1)
self.knbSignal2.setRange(0.0001, 1.0)
self.knbSignal2.setValue(0.1)
self.knbOffset2.setRange(-1.0, 1.0, 0.001)
self.knbOffset2.setValue(0.0)
self.knbOffset1.setRange(-1.0, 1.0, 0.001)
self.knbOffset1.setValue(0.0)
self.knbLevel.setRange(-1.0, 1.0, 0.001)
self.knbLevel.setValue(0.1)
self.knbLevel.setScaleMaxMajor(10)
self.plot = Scope(self)
self.plot.setGeometry(10, 10, 550, scopeheight)
self.picker = Qwt.QwtPlotPicker(
Qwt.QwtPlot.xBottom,
Qwt.QwtPlot.yLeft,
Qwt.QwtPicker.PointSelection | Qwt.QwtPicker.DragSelection,
Qwt.QwtPlotPicker.CrossRubberBand,
Qwt.QwtPicker.ActiveOnly, #AlwaysOn,
self.plot.canvas())
self.picker.setRubberBandPen(Qt.QPen(Qt.Qt.green))
self.picker.setTrackerPen(Qt.QPen(Qt.Qt.cyan))
self.connect(self.knbTime.knob, Qt.SIGNAL("valueChanged(double)"),
self.setTimebase)
self.knbTime.setValue(0.01)
self.connect(self.knbSignal.knob, Qt.SIGNAL("valueChanged(double)"),
self.setAmplitude)
self.connect(self.knbSignal2.knob, Qt.SIGNAL("valueChanged(double)"),
self.setAmplitude2)
self.knbSignal.setValue(0.1)
self.connect(self.knbLevel.knob, Qt.SIGNAL("valueChanged(double)"),
self.setTriggerlevel)
self.connect(self.knbOffset1.knob, Qt.SIGNAL("valueChanged(double)"),
self.plot.setOffset1)
self.connect(self.knbOffset2.knob, Qt.SIGNAL("valueChanged(double)"),
self.plot.setOffset2)
self.knbLevel.setValue(0.1)
self.plot.setAxisScale( Qwt.QwtPlot.xBottom, 0.0, 10.0*inittime)
self.plot.setAxisScale( Qwt.QwtPlot.yLeft, -5.0*initamp, 5.0*initamp)
self.plot.setAxisScale( Qwt.QwtPlot.yRight, -5.0*initamp, 5.0*initamp)
self.plot.show()
def _calcKnobVal(self,val):
ival=floor(val)
frac=val-ival
if frac >=0.9:
frac=1.0
elif frac>=0.66:
frac=log10(5.0)
elif frac>=log10(2.0):
frac=log10(2.0)
else: frac=0.0
dt=10**frac*10**ival
return dt
def setTimebase(self, val):
dt=self._calcKnobVal(val)
self.plot.setAxisScale( Qwt.QwtPlot.xBottom, 0.0, 10.0*dt)
self.plot.replot()
def setAmplitude(self, val):
dt=self._calcKnobVal(val)
self.plot.setAxisScale( Qwt.QwtPlot.yLeft, -5.0*dt, 5.0*dt)
self.plot.setMaxAmp( 5.0*dt )
self.plot.replot()
def setAmplitude2(self, val):
dt=self._calcKnobVal(val)
self.plot.setAxisScale( Qwt.QwtPlot.yRight, -5.0*dt, 5.0*dt)
self.plot.setMaxAmp2( 5.0*dt )
self.plot.replot()
def setTriggerlevel(self, val):
self.plot.setTriggerLevel(val)
self.plot.setDisplay()
#--------------------------------------------------------------------
class FScope(Qwt.QwtPlot):
"""
Power spectrum display widget
"""
def __init__(self, *args):
apply(Qwt.QwtPlot.__init__, (self,) + args)
self.setTitle('Power spectrum');
self.setCanvasBackground(Qt.Qt.white)
# grid
self.grid = Qwt.QwtPlotGrid()
self.grid.enableXMin(True)
self.grid.setMajPen(Qt.QPen(Qt.Qt.gray, 0, Qt.Qt.SolidLine));
self.grid.attach(self)
# axes
self.setAxisTitle(Qwt.QwtPlot.xBottom, 'Frequency [Hz]');
self.setAxisTitle(Qwt.QwtPlot.yLeft, 'Power [dB]');
self.setAxisMaxMajor(Qwt.QwtPlot.xBottom, 10);
self.setAxisMaxMinor(Qwt.QwtPlot.xBottom, 0);
self.setAxisMaxMajor(Qwt.QwtPlot.yLeft, 10);
self.setAxisMaxMinor(Qwt.QwtPlot.yLeft, 0);
# curves
self.curve2 = Qwt.QwtPlotCurve('PSTrace2')
self.curve2.setPen(Qt.QPen(Qt.Qt.magenta,PENWIDTH))
self.curve2.setYAxis(Qwt.QwtPlot.yLeft)
self.curve2.attach(self)
self.curve1 = Qwt.QwtPlotCurve('PSTrace1')
self.curve1.setPen(Qt.QPen(Qt.Qt.blue,PENWIDTH))
self.curve1.setYAxis(Qwt.QwtPlot.yLeft)
self.curve1.attach(self)
self.triggerval=0.0
self.maxamp=1.0
self.freeze=0
self.average=0
self.avcount=0
self.logy=1
self.datastream=None
self.dt=1.0/samplerate
self.df=1.0/(soundbuffersize*self.dt)
self.f = arange(0.0, samplerate, self.df)
self.a = 0.0*self.f
self.p = 0.0*self.f
self.curve1.setData(self.f, self.a)
self.setAxisScale( Qwt.QwtPlot.xBottom, 0.0, 10.0*initfreq)
self.setAxisScale( Qwt.QwtPlot.yLeft, -120.0, 0.0)
self.startTimer(100)
self.replot()
def resetBuffer(self):
self.df=1.0/(soundbuffersize*self.dt)
self.f = arrayrange(0.0, 20000.0, self.df)
self.a = 0.0*self.f
self.p = 0.0*self.f
self.curve1.setData(self.curve1, self.f, self.a)
def setMaxAmp(self, val):
if val>0.6:
self.setAxisScale( Qwt.QwtPlot.yLeft, -120.0, 0.0)
self.logy=1
else:
self.setAxisScale( Qwt.QwtPlot.yLeft, 0.0, 10.0*val)
self.logy=0
self.maxamp=val
def setMaxTime(self, val):
self.maxtime=val
def setTriggerLevel(self, val):
self.triggerval=val
def setDisplay(self):
n=soundbuffersize/2
if LRchannel==BOTHLR:
self.curve1.setData(self.f[0:n], self.a[:n])
self.curve2.setData(self.f[0:n], self.a2[:n])
elif LRchannel==RIGHT:
self.curve1.setData([0.0,0.0], [0.0,0.0])
self.curve2.setData(self.f[0:n], self.a2[:n])
elif LRchannel==LEFT:
self.curve1.setData(self.f[0:n], self.a[:n])
self.curve2.setData([0.0,0.0], [0.0,0.0])
self.replot()
def getValue(self, index):
return self.f[index],self.a[index]
def setAverage(self, state):
self.average = state
self.avcount=0
def setFreeze(self, freeze):
self.freeze = 1-self.freeze
def setDatastream(self, datastream):
self.datastream = datastream
def timerEvent(self,e): # FFT
if self.datastream == None: return
x=self.datastream.read(CHUNK)
if self.freeze==1: return
X=fromstring(x,dtype='h')
if len(X) == 0: return
P=array(X,dtype='d')/32768.0
val=self.triggerval*self.maxamp
i=0
R=P[0::2]
L=P[1::2]
lenX=len(R)
if lenX == 0: return
if lenX!=(CHUNK): print 'size fail',lenX
window=blackman(lenX)
sumw=sum(window*window)
A=FFT.fft(R*window) #lenX
B=(A*conjugate(A)).real
A=FFT.fft(L*window) #lenX
B2=(A*conjugate(A)).real
sumw*=2.0 # sym about Nyquist (*4); use rms (/2)
sumw/=self.dt # sample rate
B=B/sumw
B2=B2/sumw
if self.logy:
P1=log10(B)*10.0+20.0#60.0
P2=log10(B2)*10.0+20.0#60.0
else:
P1=B
P2=B2
if self.average == 0:
self.a=P1
self.a2=P2
else:
self.avcount+=1
if self.avcount==1:
self.sumP1=P1
self.sumP2=P2
else:
self.sumP1=self.sumP1+P1
self.sumP2=self.sumP2+P2
self.a=self.sumP1/self.avcount
self.a2=self.sumP2/self.avcount
self.setDisplay()
initfreq=100.0
class FScopeFrame(Qt.QFrame):
"""
Power spectrum widget --- contains controls + display
"""
def __init__(self , *args):
apply(Qt.QFrame.__init__, (self,) + args)
knobpos=scopeheight+30
# the following: setPal.. doesn't seem to work on Ein
try:
self.setPaletteBackgroundColor( QColor(240,240,245))
except: pass
self.setFixedSize(700, scopeheight+150)
self.freezeState = 0
self.knbSignal = LblKnob(self,160, knobpos, "Signal",1)
self.knbTime = LblKnob(self,310, knobpos,"Frequency", 1)
self.knbTime.setRange(1.0, 2000.0)
self.knbSignal.setRange(0.0000001, 1.0)
self.plot = FScope(self)
self.plot.setGeometry(10, 10, 500, scopeheight)
self.picker = Qwt.QwtPlotPicker(
Qwt.QwtPlot.xBottom,
Qwt.QwtPlot.yLeft,
Qwt.QwtPicker.PointSelection | Qwt.QwtPicker.DragSelection,
Qwt.QwtPlotPicker.CrossRubberBand,
Qwt.QwtPicker.ActiveOnly, #AlwaysOn,
self.plot.canvas())
self.picker.setRubberBandPen(Qt.QPen(Qt.Qt.green))
self.picker.setTrackerPen(Qt.QPen(Qt.Qt.cyan))
self.connect(self.knbTime.knob, Qt.SIGNAL("valueChanged(double)"),
self.setTimebase)
self.knbTime.setValue(1000.0)
self.connect(self.knbSignal.knob, Qt.SIGNAL("valueChanged(double)"),
self.setAmplitude)
self.knbSignal.setValue(1.0)
self.plot.show()
def _calcKnobVal(self,val):
ival=floor(val)
frac=val-ival
if frac >=0.9:
frac=1.0
elif frac>=0.66:
frac=log10(5.0)
elif frac>=log10(2.0):
frac=log10(2.0)
else: frac=0.0
dt=10**frac*10**ival
return dt
def setTimebase(self, val):
dt=self._calcKnobVal(val)
self.plot.setAxisScale( Qwt.QwtPlot.xBottom, 0.0, 10.0*dt)
self.plot.replot()
def setAmplitude(self, val):
dt=self._calcKnobVal(val)
self.plot.setMaxAmp( dt )
self.plot.replot()
#---------------------------------------------------------------------
class FScopeDemo(Qt.QMainWindow):
"""
Application container widget
Contains scope and power spectrum analyser in tabbed windows.
Enables switching between the two.
Handles toolbar and status.
"""
def __init__(self, *args):
apply(Qt.QMainWindow.__init__, (self,) + args)
self.freezeState = 0
self.changeState = 0
self.averageState = 0
self.autocState = 0
self.scope = ScopeFrame(self)
self.current = self.scope
self.pwspec = FScopeFrame(self)
self.pwspec.hide()
self.stack=Qt.QTabWidget(self)
self.stack.addTab(self.scope,"scope")
self.stack.addTab(self.pwspec,"fft")
self.setCentralWidget(self.stack)
toolBar = Qt.QToolBar(self)
self.addToolBar(toolBar)
sb=self.statusBar()
sbfont=Qt.QFont("Helvetica",12)
sb.setFont(sbfont)
self.btnFreeze = Qt.QToolButton(toolBar)
self.btnFreeze.setText("Freeze")
self.btnFreeze.setIcon(Qt.QIcon(Qt.QPixmap(icons.stopicon)))
self.btnFreeze.setCheckable(True)
self.btnFreeze.setToolButtonStyle(Qt.Qt.ToolButtonTextUnderIcon)
toolBar.addWidget(self.btnFreeze)
self.btnPrint = Qt.QToolButton(toolBar)
self.btnPrint.setText("Print")
self.btnPrint.setIcon(Qt.QIcon(Qt.QPixmap(icons.print_xpm)))
self.btnPrint.setToolButtonStyle(Qt.Qt.ToolButtonTextUnderIcon)
toolBar.addWidget(self.btnPrint)
self.btnMode = Qt.QToolButton(toolBar)
self.btnMode.setText("fft")
self.btnMode.setIcon(Qt.QIcon(Qt.QPixmap(icons.pwspec)))
self.btnMode.setCheckable(True)
self.btnMode.setToolButtonStyle(Qt.Qt.ToolButtonTextUnderIcon)
toolBar.addWidget(self.btnMode)
self.btnAvge = Qt.QToolButton(toolBar)
self.btnAvge.setText("average")
self.btnAvge.setIcon(Qt.QIcon(Qt.QPixmap(icons.avge)))
self.btnAvge.setCheckable(True)
self.btnAvge.setToolButtonStyle(Qt.Qt.ToolButtonTextUnderIcon)
toolBar.addWidget(self.btnAvge)
self.btnAutoc = Qt.QToolButton(toolBar)
self.btnAutoc.setText("correlate")
self.btnAutoc.setIcon(Qt.QIcon(Qt.QPixmap(icons.avge)))
self.btnAutoc.setCheckable(True)
self.btnAutoc.setToolButtonStyle(Qt.Qt.ToolButtonTextUnderIcon)
toolBar.addWidget(self.btnAutoc)
self.lstLabl = Qt.QLabel("Buffer:",toolBar)
toolBar.addWidget(self.lstLabl)
self.lstChan = Qt.QComboBox(toolBar)
self.lstChan.insertItem(0,"8192")
self.lstChan.insertItem(1,"16k")
self.lstChan.insertItem(2,"32k")
toolBar.addWidget(self.lstChan)
self.lstLR = Qt.QLabel("Channels:",toolBar)
toolBar.addWidget(self.lstLR)
self.lstLRmode = Qt.QComboBox(toolBar)
self.lstLRmode.insertItem(0,"LR")
self.lstLRmode.insertItem(1,"L")
self.lstLRmode.insertItem(2,"R")
toolBar.addWidget(self.lstLRmode)
self.connect(self.btnPrint, Qt.SIGNAL('clicked()'), self.printPlot)
self.connect(self.btnFreeze, Qt.SIGNAL('toggled(bool)'), self.freeze)
self.connect(self.btnMode, Qt.SIGNAL('toggled(bool)'), self.mode)
self.connect(self.btnAvge, Qt.SIGNAL('toggled(bool)'), self.average)
self.connect(self.btnAutoc, Qt.SIGNAL('toggled(bool)'),
self.autocorrelation)
self.connect(self.lstChan, Qt.SIGNAL('activated(int)'), self.fftsize)
self.connect(self.lstLRmode, Qt.SIGNAL('activated(int)'), self.channel)
self.connect(self.scope.picker,
Qt.SIGNAL('moved(const QPoint&)'),
self.moved)
self.connect(self.scope.picker,
Qt.SIGNAL('appended(const QPoint&)'),
self.appended)
self.connect(self.pwspec.picker,
Qt.SIGNAL('moved(const QPoint&)'),
self.moved)
self.connect(self.pwspec.picker,
Qt.SIGNAL('appended(const QPoint&)'),
self.appended)
self.connect(self.stack,
Qt.SIGNAL('currentChanged(int)'),
self.mode)
self.showInfo(cursorInfo)
def showInfo(self, text):
self.statusBar().showMessage(text)
def printPlot(self):
p = QPrinter()
if p.setup():
self.current.plot.printPlot(p)#, Qwt.QwtFltrDim(200));
def fftsize(self, item):
pass
## global s, soundbuffersize
## s.stop()
## s.close()
## if item==2:
## soundbuffersize=8192*3
## elif item==1:
## soundbuffersize=8192*2
## else:
## soundbuffersize=8192
## s=f.stream(48000,2,'int16',soundbuffersize,1)
## s.open()
## s.start()
## self.pwspec.plot.resetBuffer()
## if self.current==self.pwspec:
## self.pwspec.plot.setDatastream(s)
## self.pwspec.plot.avcount=0
## else:
## self.scope.plot.setDatastream(s)
def channel(self, item):
global LRchannel
if item==2:
LRchannel=RIGHT
elif item==1:
LRchannel=LEFT
else:
LRchannel=BOTHLR
def freeze(self, on):
if on:
self.freezeState = 1
self.btnFreeze.setText("Run")
self.btnFreeze.setIcon(Qt.QIcon(Qt.QPixmap(icons.goicon)))
else:
self.freezeState = 0
self.btnFreeze.setText("Freeze")
self.btnFreeze.setIcon(Qt.QIcon(Qt.QPixmap(icons.stopicon)))
self.scope.plot.setFreeze(self.freezeState)
self.pwspec.plot.setFreeze(self.freezeState)
def average(self, on):
if on:
self.averageState = 1
self.btnAvge.setText("single")
self.btnAvge.setIcon(Qt.QIcon(Qt.QPixmap(icons.single)))
else:
self.averageState = 0
self.btnAvge.setText("average")
self.btnAvge.setIcon(Qt.QIcon(Qt.QPixmap(icons.avge)))
self.scope.plot.setAverage(self.averageState)
self.pwspec.plot.setAverage(self.averageState)
def autocorrelation(self, on):
if on:
self.autocState = 1
self.btnAutoc.setText("normal")
self.btnAutoc.setIcon(Qt.QIcon(Qt.QPixmap(icons.single)))
else:
self.autocState = 0
self.btnAutoc.setText("correlate")
self.btnAutoc.setIcon(Qt.QIcon(Qt.QPixmap(icons.avge)))
self.scope.plot.setAutoc(self.autocState)
def mode(self, on):
if on:
self.changeState=1
self.current=self.pwspec
self.btnMode.setText("scope")
self.btnMode.setIcon(Qt.QIcon(Qt.QPixmap(icons.scope)))
else:
self.changeState=0
self.current=self.scope
self.btnMode.setText("fft")
self.btnMode.setIcon(Qt.QIcon(Qt.QPixmap(icons.pwspec)))
if self.changeState==1:
self.stack.setCurrentIndex(self.changeState)
self.scope.plot.setDatastream(None)
self.pwspec.plot.setDatastream(stream)
else:
self.stack.setCurrentIndex(self.changeState)
self.pwspec.plot.setDatastream(None)
self.scope.plot.setDatastream(stream)
def moved(self, e):
if self.changeState==1:
name='Freq'
else:
name='Time'
frequency = self.current.plot.invTransform(Qwt.QwtPlot.xBottom, e.x())
amplitude = self.current.plot.invTransform(Qwt.QwtPlot.yLeft, e.y())
if name=='Time':
df=self.scope.plot.dt
i=int(frequency/df)
ampa=self.scope.plot.a1[i]
ampb=self.scope.plot.a2[i]
else:
df=self.pwspec.plot.df
i=int(frequency/df)
ampa=self.pwspec.plot.a[i]
ampb=self.pwspec.plot.a2[i]
self.showInfo('%s=%g, cursor=%g, A=%g, B=%g' %
(name,frequency, amplitude,ampa,ampb))
def appended(self, e):
print 's'
# Python semantics: self.pos = e.pos() does not work; force a copy
self.xpos = e.x()
self.ypos = e.y()
self.moved(e) # fake a mouse move to show the cursor position
# open sound card data stream
p = pyaudio.PyAudio()
stream = p.open(format = FORMAT,
channels = CHANNELS,
rate = RATE,
input = True,
frames_per_buffer = CHUNK)
# Admire!
app = Qt.QApplication(sys.argv)
demo=FScopeDemo()
demo.scope.plot.setDatastream(stream)
demo.show()
app.exec_()
stream.stop_stream()
stream.close()
p.terminate()
| gpl-2.0 | 2,410,628,168,513,785,300 | 32.026651 | 83 | 0.586941 | false |
alexgorban/models | official/modeling/tf_utils.py | 1 | 5438 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Common TF utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
import tensorflow as tf
from tensorflow.python.util import deprecation
from official.modeling import activations
@deprecation.deprecated(
None,
"tf.keras.layers.Layer supports multiple positional args and kwargs as "
"input tensors. pack/unpack inputs to override __call__ is no longer "
"needed."
)
def pack_inputs(inputs):
"""Pack a list of `inputs` tensors to a tuple.
Args:
inputs: a list of tensors.
Returns:
a tuple of tensors. if any input is None, replace it with a special constant
tensor.
"""
inputs = tf.nest.flatten(inputs)
outputs = []
for x in inputs:
if x is None:
outputs.append(tf.constant(0, shape=[], dtype=tf.int32))
else:
outputs.append(x)
return tuple(outputs)
@deprecation.deprecated(
None,
"tf.keras.layers.Layer supports multiple positional args and kwargs as "
"input tensors. pack/unpack inputs to override __call__ is no longer "
"needed."
)
def unpack_inputs(inputs):
"""unpack a tuple of `inputs` tensors to a tuple.
Args:
inputs: a list of tensors.
Returns:
a tuple of tensors. if any input is a special constant tensor, replace it
with None.
"""
inputs = tf.nest.flatten(inputs)
outputs = []
for x in inputs:
if is_special_none_tensor(x):
outputs.append(None)
else:
outputs.append(x)
x = tuple(outputs)
# To trick the very pointless 'unbalanced-tuple-unpacking' pylint check
# from triggering.
if len(x) == 1:
return x[0]
return tuple(outputs)
def is_special_none_tensor(tensor):
"""Checks if a tensor is a special None Tensor."""
return tensor.shape.ndims == 0 and tensor.dtype == tf.int32
# TODO(hongkuny): consider moving custom string-map lookup to keras api.
def get_activation(identifier):
"""Maps a identifier to a Python function, e.g., "relu" => `tf.nn.relu`.
It checks string first and if it is one of customized activation not in TF,
the corresponding activation will be returned. For non-customized activation
names and callable identifiers, always fallback to tf.keras.activations.get.
Args:
identifier: String name of the activation function or callable.
Returns:
A Python function corresponding to the activation function.
"""
if isinstance(identifier, six.string_types):
name_to_fn = {
"gelu": activations.gelu,
"simple_swish": activations.simple_swish,
"hard_swish": activations.hard_swish,
"identity": activations.identity,
}
identifier = str(identifier).lower()
if identifier in name_to_fn:
return tf.keras.activations.get(name_to_fn[identifier])
return tf.keras.activations.get(identifier)
def get_shape_list(tensor, expected_rank=None, name=None):
"""Returns a list of the shape of tensor, preferring static dimensions.
Args:
tensor: A tf.Tensor object to find the shape of.
expected_rank: (optional) int. The expected rank of `tensor`. If this is
specified and the `tensor` has a different rank, and exception will be
thrown.
name: Optional name of the tensor for the error message.
Returns:
A list of dimensions of the shape of tensor. All static dimensions will
be returned as python integers, and dynamic dimensions will be returned
as tf.Tensor scalars.
"""
if expected_rank is not None:
assert_rank(tensor, expected_rank, name)
shape = tensor.shape.as_list()
non_static_indexes = []
for (index, dim) in enumerate(shape):
if dim is None:
non_static_indexes.append(index)
if not non_static_indexes:
return shape
dyn_shape = tf.shape(tensor)
for index in non_static_indexes:
shape[index] = dyn_shape[index]
return shape
def assert_rank(tensor, expected_rank, name=None):
"""Raises an exception if the tensor rank is not of the expected rank.
Args:
tensor: A tf.Tensor to check the rank of.
expected_rank: Python integer or list of integers, expected rank.
name: Optional name of the tensor for the error message.
Raises:
ValueError: If the expected shape doesn't match the actual shape.
"""
expected_rank_dict = {}
if isinstance(expected_rank, six.integer_types):
expected_rank_dict[expected_rank] = True
else:
for x in expected_rank:
expected_rank_dict[x] = True
actual_rank = tensor.shape.ndims
if actual_rank not in expected_rank_dict:
raise ValueError(
"For the tensor `%s`, the actual tensor rank `%d` (shape = %s) is not "
"equal to the expected tensor rank `%s`" %
(name, actual_rank, str(tensor.shape), str(expected_rank)))
| apache-2.0 | 7,756,442,032,010,609,000 | 30.074286 | 80 | 0.690143 | false |
PyFilesystem/pyfilesystem | fs/rpcfs.py | 1 | 11326 | """
fs.rpcfs
========
This module provides the class 'RPCFS' to access a remote FS object over
XML-RPC. You probably want to use this in conjunction with the 'RPCFSServer'
class from the :mod:`~fs.expose.xmlrpc` module.
"""
import xmlrpclib
import socket
import base64
from fs.base import *
from fs.errors import *
from fs.path import *
from fs import iotools
from fs.filelike import StringIO
import six
from six import PY3, b
def re_raise_faults(func):
"""Decorator to re-raise XML-RPC faults as proper exceptions."""
def wrapper(*args, **kwds):
try:
return func(*args, **kwds)
except (xmlrpclib.Fault), f:
#raise
# Make sure it's in a form we can handle
print f.faultString
bits = f.faultString.split(" ")
if bits[0] not in ["<type", "<class"]:
raise f
# Find the class/type object
bits = " ".join(bits[1:]).split(">:")
cls = bits[0]
msg = ">:".join(bits[1:])
cls = cls.strip('\'')
print "-" + cls
cls = _object_by_name(cls)
# Re-raise using the remainder of the fault code as message
if cls:
if issubclass(cls, FSError):
raise cls('', msg=msg)
else:
raise cls(msg)
raise f
except socket.error, e:
raise RemoteConnectionError(str(e), details=e)
return wrapper
def _object_by_name(name, root=None):
"""Look up an object by dotted-name notation."""
bits = name.split(".")
if root is None:
try:
obj = globals()[bits[0]]
except KeyError:
try:
obj = __builtins__[bits[0]]
except KeyError:
obj = __import__(bits[0], globals())
else:
obj = getattr(root, bits[0])
if len(bits) > 1:
return _object_by_name(".".join(bits[1:]), obj)
else:
return obj
class ReRaiseFaults:
"""XML-RPC proxy wrapper that re-raises Faults as proper Exceptions."""
def __init__(self, obj):
self._obj = obj
def __getattr__(self, attr):
val = getattr(self._obj, attr)
if callable(val):
val = re_raise_faults(val)
self.__dict__[attr] = val
return val
class RPCFS(FS):
"""Access a filesystem exposed via XML-RPC.
This class provides the client-side logic for accessing a remote FS
object, and is dual to the RPCFSServer class defined in fs.expose.xmlrpc.
Example::
fs = RPCFS("http://my.server.com/filesystem/location/")
"""
_meta = {'thread_safe' : True,
'virtual': False,
'network' : True,
}
def __init__(self, uri, transport=None):
"""Constructor for RPCFS objects.
The only required argument is the URI of the server to connect
to. This will be passed to the underlying XML-RPC server proxy
object, along with the 'transport' argument if it is provided.
:param uri: address of the server
"""
super(RPCFS, self).__init__(thread_synchronize=True)
self.uri = uri
self._transport = transport
self.proxy = self._make_proxy()
self.isdir('/')
@synchronize
def _make_proxy(self):
kwds = dict(allow_none=True, use_datetime=True)
if self._transport is not None:
proxy = xmlrpclib.ServerProxy(self.uri, self._transport, **kwds)
else:
proxy = xmlrpclib.ServerProxy(self.uri, **kwds)
return ReRaiseFaults(proxy)
def __str__(self):
return '<RPCFS: %s>' % (self.uri,)
def __repr__(self):
return '<RPCFS: %s>' % (self.uri,)
@synchronize
def __getstate__(self):
state = super(RPCFS, self).__getstate__()
try:
del state['proxy']
except KeyError:
pass
return state
def __setstate__(self, state):
super(RPCFS, self).__setstate__(state)
self.proxy = self._make_proxy()
def encode_path(self, path):
"""Encode a filesystem path for sending over the wire.
Unfortunately XMLRPC only supports ASCII strings, so this method
must return something that can be represented in ASCII. The default
is base64-encoded UTF8.
"""
return six.text_type(base64.b64encode(path.encode("utf8")), 'ascii')
def decode_path(self, path):
"""Decode paths arriving over the wire."""
return six.text_type(base64.b64decode(path.encode('ascii')), 'utf8')
@synchronize
def getmeta(self, meta_name, default=NoDefaultMeta):
if default is NoDefaultMeta:
meta = self.proxy.getmeta(meta_name)
else:
meta = self.proxy.getmeta_default(meta_name, default)
if isinstance(meta, basestring):
# To allow transport of meta with invalid xml chars (like null)
meta = self.encode_path(meta)
return meta
@synchronize
def hasmeta(self, meta_name):
return self.proxy.hasmeta(meta_name)
@synchronize
@iotools.filelike_to_stream
def open(self, path, mode='r', buffering=-1, encoding=None, errors=None, newline=None, line_buffering=False, **kwargs):
# TODO: chunked transport of large files
epath = self.encode_path(path)
if "w" in mode:
self.proxy.set_contents(epath, xmlrpclib.Binary(b("")))
if "r" in mode or "a" in mode or "+" in mode:
try:
data = self.proxy.get_contents(epath, "rb").data
except IOError:
if "w" not in mode and "a" not in mode:
raise ResourceNotFoundError(path)
if not self.isdir(dirname(path)):
raise ParentDirectoryMissingError(path)
self.proxy.set_contents(path, xmlrpclib.Binary(b("")))
else:
data = b("")
f = StringIO(data)
if "a" not in mode:
f.seek(0, 0)
else:
f.seek(0, 2)
oldflush = f.flush
oldclose = f.close
oldtruncate = f.truncate
def newflush():
self._lock.acquire()
try:
oldflush()
self.proxy.set_contents(epath, xmlrpclib.Binary(f.getvalue()))
finally:
self._lock.release()
def newclose():
self._lock.acquire()
try:
f.flush()
oldclose()
finally:
self._lock.release()
def newtruncate(size=None):
self._lock.acquire()
try:
oldtruncate(size)
f.flush()
finally:
self._lock.release()
f.flush = newflush
f.close = newclose
f.truncate = newtruncate
return f
@synchronize
def exists(self, path):
path = self.encode_path(path)
return self.proxy.exists(path)
@synchronize
def isdir(self, path):
path = self.encode_path(path)
return self.proxy.isdir(path)
@synchronize
def isfile(self, path):
path = self.encode_path(path)
return self.proxy.isfile(path)
@synchronize
def listdir(self, path="./", wildcard=None, full=False, absolute=False, dirs_only=False, files_only=False):
enc_path = self.encode_path(path)
if not callable(wildcard):
entries = self.proxy.listdir(enc_path,
wildcard,
full,
absolute,
dirs_only,
files_only)
entries = [self.decode_path(e) for e in entries]
else:
entries = self.proxy.listdir(enc_path,
None,
False,
False,
dirs_only,
files_only)
entries = [self.decode_path(e) for e in entries]
entries = [e for e in entries if wildcard(e)]
if full:
entries = [relpath(pathjoin(path, e)) for e in entries]
elif absolute:
entries = [abspath(pathjoin(path, e)) for e in entries]
return entries
@synchronize
def makedir(self, path, recursive=False, allow_recreate=False):
path = self.encode_path(path)
return self.proxy.makedir(path, recursive, allow_recreate)
@synchronize
def remove(self, path):
path = self.encode_path(path)
return self.proxy.remove(path)
@synchronize
def removedir(self, path, recursive=False, force=False):
path = self.encode_path(path)
return self.proxy.removedir(path, recursive, force)
@synchronize
def rename(self, src, dst):
src = self.encode_path(src)
dst = self.encode_path(dst)
return self.proxy.rename(src, dst)
@synchronize
def settimes(self, path, accessed_time, modified_time):
path = self.encode_path(path)
return self.proxy.settimes(path, accessed_time, modified_time)
@synchronize
def getinfo(self, path):
path = self.encode_path(path)
info = self.proxy.getinfo(path)
return info
@synchronize
def desc(self, path):
path = self.encode_path(path)
return self.proxy.desc(path)
@synchronize
def getxattr(self, path, attr, default=None):
path = self.encode_path(path)
attr = self.encode_path(attr)
return self.fs.getxattr(path, attr, default)
@synchronize
def setxattr(self, path, attr, value):
path = self.encode_path(path)
attr = self.encode_path(attr)
return self.fs.setxattr(path, attr, value)
@synchronize
def delxattr(self, path, attr):
path = self.encode_path(path)
attr = self.encode_path(attr)
return self.fs.delxattr(path, attr)
@synchronize
def listxattrs(self, path):
path = self.encode_path(path)
return [self.decode_path(a) for a in self.fs.listxattrs(path)]
@synchronize
def copy(self, src, dst, overwrite=False, chunk_size=16384):
src = self.encode_path(src)
dst = self.encode_path(dst)
return self.proxy.copy(src, dst, overwrite, chunk_size)
@synchronize
def move(self, src, dst, overwrite=False, chunk_size=16384):
src = self.encode_path(src)
dst = self.encode_path(dst)
return self.proxy.move(src, dst, overwrite, chunk_size)
@synchronize
def movedir(self, src, dst, overwrite=False, ignore_errors=False, chunk_size=16384):
src = self.encode_path(src)
dst = self.encode_path(dst)
return self.proxy.movedir(src, dst, overwrite, ignore_errors, chunk_size)
@synchronize
def copydir(self, src, dst, overwrite=False, ignore_errors=False, chunk_size=16384):
src = self.encode_path(src)
dst = self.encode_path(dst)
return self.proxy.copydir(src, dst, overwrite, ignore_errors, chunk_size)
| bsd-3-clause | 6,380,686,112,742,397,000 | 30.373961 | 123 | 0.555183 | false |
orlenko/bccf | src/pybb/permissions.py | 1 | 6478 | # -*- coding: utf-8 -*-
"""
Extensible permission system for pybbm
"""
from django.utils.importlib import import_module
from django.db.models import Q
from pybb import defaults
def _resolve_class(name):
""" resolves a class function given as string, returning the function """
if not name: return False
modname, funcname = name.rsplit('.', 1)
return getattr(import_module(modname), funcname)()
class DefaultPermissionHandler(object):
"""
Default Permission handler. If you want to implement custom permissions (for example,
private forums based on some application-specific settings), you can inherit from this
class and override any of the `filter_*` and `may_*` methods. Methods starting with
`may` are expected to return `True` or `False`, whereas methods starting with `filter_*`
should filter the queryset they receive, and return a new queryset containing only the
objects the user is allowed to see.
To activate your custom permission handler, set `settings.PYBB_PERMISSION_HANDLER` to
the full qualified name of your class, e.g. "`myapp.pybb_adapter.MyPermissionHandler`".
"""
#
# permission checks on categories
#
def filter_categories(self, user, qs):
""" return a queryset with categories `user` is allowed to see """
return qs.filter(hidden=False) if not user.is_staff else qs
def may_view_category(self, user, category):
""" return True if `user` may view this category, False if not """
return user.is_staff or not category.hidden
#
# permission checks on forums
#
def filter_forums(self, user, qs):
""" return a queryset with forums `user` is allowed to see """
return qs.filter(Q(hidden=False) & Q(category__hidden=False)) if not user.is_staff else qs
def may_view_forum(self, user, forum):
""" return True if user may view this forum, False if not """
return user.is_staff or ( forum.hidden == False and forum.category.hidden == False )
def may_create_topic(self, user, forum):
""" return True if `user` is allowed to create a new topic in `forum` """
return user.has_perm('pybb.add_post')
#
# permission checks on topics
#
def filter_topics(self, user, qs):
""" return a queryset with topics `user` is allowed to see """
if not user.is_staff:
qs = qs.filter(Q(forum__hidden=False) & Q(forum__category__hidden=False))
if not user.is_superuser:
if user.is_authenticated():
qs = qs.filter(Q(forum__moderators=user) | Q(user=user) | Q(on_moderation=False)).distinct()
else:
qs = qs.filter(on_moderation=False)
return qs
def may_view_topic(self, user, topic):
""" return True if user may view this topic, False otherwise """
if user.is_superuser:
return True
if not user.is_staff and (topic.forum.hidden or topic.forum.category.hidden):
return False # only staff may see hidden forum / category
if topic.on_moderation:
return user.is_authenticated() and (user == topic.user or user in topic.forum.moderators)
return True
def may_moderate_topic(self, user, topic):
return user.is_superuser or user in topic.forum.moderators.all()
def may_close_topic(self, user, topic):
""" return True if `user` may close `topic` """
return self.may_moderate_topic(user, topic)
def may_open_topic(self, user, topic):
""" return True if `user` may open `topic` """
return self.may_moderate_topic(user, topic)
def may_stick_topic(self, user, topic):
""" return True if `user` may stick `topic` """
return self.may_moderate_topic(user, topic)
def may_unstick_topic(self, user, topic):
""" return True if `user` may unstick `topic` """
return self.may_moderate_topic(user, topic)
def may_create_post(self, user, topic):
""" return True if `user` is allowed to create a new post in `topic` """
if topic.forum.hidden and (not user.is_staff):
# if topic is hidden, only staff may post
return False
if topic.closed and (not user.is_staff):
# if topic is closed, only staff may post
return False
# only user which have 'pybb.add_post' permission may post
return user.has_perm('pybb.add_post')
def may_post_as_admin(self, user):
""" return True if `user` may post as admin """
return user.is_staff
#
# permission checks on posts
#
def filter_posts(self, user, qs):
""" return a queryset with posts `user` is allowed to see """
# first filter by topic availability
if not user.is_staff:
qs = qs.filter(Q(topic__forum__hidden=False) & Q(topic__forum__category__hidden=False))
if not defaults.PYBB_PREMODERATION or user.is_superuser:
# superuser may see all posts, also if premoderation is turned off moderation
# flag is ignored
return qs
elif user.is_authenticated():
# post is visible if user is author, post is not on moderation, or user is moderator
# for this forum
qs = qs.filter(Q(user=user) | Q(on_moderation=False) | Q(topic__forum__moderators=user))
else:
# anonymous user may not see posts which are on moderation
qs = qs.filter(on_moderation=False)
return qs
def may_view_post(self, user, post):
""" return True if `user` may view `post`, False otherwise """
if user.is_superuser:
return True
if post.on_moderation:
return post.user == user or user in post.topic.forum.moderators.all()
return True
def may_edit_post(self, user, post):
""" return True if `user` may edit `post` """
return user.is_superuser or post.user == user or self.may_moderate_topic(user, post.topic)
def may_delete_post(self, user, post):
""" return True if `user` may delete `post` """
return self.may_moderate_topic(user, post.topic)
#
# permission checks on users
#
def may_block_user(self, user, user_to_block):
""" return True if `user` may block `user_to_block` """
return user.has_perm('pybb.block_users')
perms = _resolve_class(defaults.PYBB_PERMISSION_HANDLER) | unlicense | -5,898,669,304,835,996,000 | 38.506098 | 108 | 0.629824 | false |
ktok07b6/polyphony | polyphony/compiler/libs.py | 1 | 3330 | single_port_ram = """module SinglePortRam #
(
parameter DATA_WIDTH = 8,
parameter ADDR_WIDTH = 4,
parameter RAM_DEPTH = 1 << ADDR_WIDTH
)
(
input clk,
input rst,
input [ADDR_WIDTH-1:0] ram_addr,
input [DATA_WIDTH-1:0] ram_d,
input ram_we,
output [DATA_WIDTH-1:0] ram_q
);
reg [DATA_WIDTH-1:0] mem [0:RAM_DEPTH-1];
reg [ADDR_WIDTH-1:0] read_addr;
assign ram_q = mem[read_addr];
always @ (posedge clk) begin
if (ram_we)
mem[ram_addr] <= ram_d;
read_addr <= ram_addr;
end
endmodule
"""
bidirectional_single_port_ram = """module BidirectionalSinglePortRam #
(
parameter DATA_WIDTH = 8,
parameter ADDR_WIDTH = 4,
parameter RAM_LENGTH = 16,
parameter RAM_DEPTH = 1 << (ADDR_WIDTH-1)
)
(
input clk,
input rst,
input [ADDR_WIDTH-1:0] ram_addr,
input [DATA_WIDTH-1:0] ram_d,
input ram_we,
output [DATA_WIDTH-1:0] ram_q,
output [ADDR_WIDTH-1:0] ram_len
);
reg [DATA_WIDTH-1:0] mem [0:RAM_DEPTH-1];
reg [ADDR_WIDTH-1:0] read_addr;
/*
integer i;
initial begin
for (i = 0; i < RAM_DEPTH; i = i + 1)
mem[i] = 0;
end
*/
function [ADDR_WIDTH-1:0] address (
input [ADDR_WIDTH-1:0] in_addr
);
begin
if (in_addr[ADDR_WIDTH-1] == 1'b1) begin
address = RAM_LENGTH + in_addr;
end else begin
address = in_addr;
end
end
endfunction // address
wire [ADDR_WIDTH-1:0] a;
assign a = address(ram_addr);
assign ram_q = mem[read_addr];
assign ram_len = RAM_LENGTH;
always @ (posedge clk) begin
if (ram_we)
mem[a] <= ram_d;
read_addr <= a;
end
endmodule
"""
fifo = """module FIFO #
(
parameter integer DATA_WIDTH = 32,
parameter integer ADDR_WIDTH = 2,
parameter integer LENGTH = 4
)
(
input clk,
input rst,
input [DATA_WIDTH - 1 : 0] din,
input write,
output full,
output [DATA_WIDTH - 1 : 0] dout,
input read,
output empty,
output will_full,
output will_empty
);
reg [ADDR_WIDTH - 1 : 0] head;
reg [ADDR_WIDTH - 1 : 0] tail;
reg [ADDR_WIDTH : 0] count;
wire we;
assign we = write && !full;
reg [DATA_WIDTH - 1 : 0] mem [0 : LENGTH - 1];
initial begin : initialize_mem
integer i;
for (i = 0; i < LENGTH; i = i + 1) begin
mem[i] = 0;
end
end
always @(posedge clk) begin
if (we) mem[head] <= din;
end
assign dout = mem[tail];
assign full = count >= LENGTH;
assign empty = count == 0;
assign will_full = write && !read && count == LENGTH-1;
assign will_empty = read && !write && count == 1;
always @(posedge clk) begin
if (rst == 1) begin
head <= 0;
tail <= 0;
count <= 0;
end else begin
if (write && read) begin
if (count == LENGTH) begin
count <= count - 1;
tail <= (tail == (LENGTH - 1)) ? 0 : tail + 1;
end else if (count == 0) begin
count <= count + 1;
head <= (head == (LENGTH - 1)) ? 0 : head + 1;
end else begin
count <= count;
head <= (head == (LENGTH - 1)) ? 0 : head + 1;
tail <= (tail == (LENGTH - 1)) ? 0 : tail + 1;
end
end else if (write) begin
if (count < LENGTH) begin
count <= count + 1;
head <= (head == (LENGTH - 1)) ? 0 : head + 1;
end
end else if (read) begin
if (count > 0) begin
count <= count - 1;
tail <= (tail == (LENGTH - 1)) ? 0 : tail + 1;
end
end
end
end
endmodule
"""
| mit | 682,272,391,786,811,300 | 20.907895 | 70 | 0.571471 | false |
tonioo/modoboa | modoboa/core/forms.py | 1 | 4606 | # -*- coding: utf-8 -*-
"""Core forms."""
from __future__ import unicode_literals
from django import forms
from django.contrib.auth import (
forms as auth_forms, get_user_model, password_validation
)
from django.db.models import Q
from django.utils.translation import ugettext as _, ugettext_lazy
from modoboa.core.models import User
from modoboa.parameters import tools as param_tools
class LoginForm(forms.Form):
"""User login form."""
username = forms.CharField(
label=ugettext_lazy("Username"),
widget=forms.TextInput(attrs={"class": "form-control"})
)
password = forms.CharField(
label=ugettext_lazy("Password"),
widget=forms.PasswordInput(attrs={"class": "form-control"})
)
rememberme = forms.BooleanField(
initial=False,
required=False
)
class ProfileForm(forms.ModelForm):
"""Form to update User profile."""
oldpassword = forms.CharField(
label=ugettext_lazy("Old password"), required=False,
widget=forms.PasswordInput(attrs={"class": "form-control"})
)
newpassword = forms.CharField(
label=ugettext_lazy("New password"), required=False,
widget=forms.PasswordInput(attrs={"class": "form-control"})
)
confirmation = forms.CharField(
label=ugettext_lazy("Confirmation"), required=False,
widget=forms.PasswordInput(attrs={"class": "form-control"})
)
class Meta(object):
model = User
fields = ("first_name", "last_name", "language",
"phone_number", "secondary_email")
widgets = {
"first_name": forms.TextInput(attrs={"class": "form-control"}),
"last_name": forms.TextInput(attrs={"class": "form-control"})
}
def __init__(self, update_password, *args, **kwargs):
super(ProfileForm, self).__init__(*args, **kwargs)
if not update_password:
del self.fields["oldpassword"]
del self.fields["newpassword"]
del self.fields["confirmation"]
def clean_oldpassword(self):
if self.cleaned_data["oldpassword"] == "":
return self.cleaned_data["oldpassword"]
if param_tools.get_global_parameter("authentication_type") != "local":
return self.cleaned_data["oldpassword"]
if not self.instance.check_password(self.cleaned_data["oldpassword"]):
raise forms.ValidationError(_("Old password mismatchs"))
return self.cleaned_data["oldpassword"]
def clean_confirmation(self):
newpassword = self.cleaned_data["newpassword"]
confirmation = self.cleaned_data["confirmation"]
if not newpassword and not confirmation:
return confirmation
if newpassword != confirmation:
raise forms.ValidationError(_("Passwords mismatch"))
password_validation.validate_password(confirmation, self.instance)
return confirmation
def save(self, commit=True):
user = super(ProfileForm, self).save(commit=False)
if commit:
if self.cleaned_data.get("confirmation", "") != "":
user.set_password(
self.cleaned_data["confirmation"],
self.cleaned_data["oldpassword"]
)
user.save()
return user
class APIAccessForm(forms.Form):
"""Form to control API access."""
enable_api_access = forms.BooleanField(
label=ugettext_lazy("Enable API access"), required=False)
def __init__(self, *args, **kwargs):
"""Initialize form."""
user = kwargs.pop("user")
super(APIAccessForm, self).__init__(*args, **kwargs)
self.fields["enable_api_access"].initial = hasattr(user, "auth_token")
class PasswordResetForm(auth_forms.PasswordResetForm):
"""Custom password reset form."""
def get_users(self, email):
"""Return matching user(s) who should receive a reset."""
return (
get_user_model()._default_manager.filter(
email__iexact=email, is_active=True)
.exclude(Q(secondary_email__isnull=True) | Q(secondary_email=""))
)
def send_mail(self, subject_template_name, email_template_name,
context, from_email, to_email,
html_email_template_name=None):
"""Send message to secondary email instead."""
to_email = context["user"].secondary_email
super(PasswordResetForm, self).send_mail(
subject_template_name, email_template_name,
context, from_email, to_email, html_email_template_name)
| isc | -1,737,334,883,102,439,700 | 34.160305 | 78 | 0.621146 | false |
praekelt/txtalert | txtalert/apps/bookings/views.py | 1 | 7598 | from django.http import HttpResponse, Http404, HttpResponseRedirect
from django.template import RequestContext
from django.shortcuts import render_to_response, get_object_or_404
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.core.paginator import Paginator
import logging
from django.utils import timezone
from txtalert.core.models import Visit, PleaseCallMe, MSISDN, AuthProfile, Patient
from txtalert.core.forms import RequestCallForm
from txtalert.core.utils import normalize_msisdn
from datetime import date, datetime
from functools import wraps
def effective_page_range_for(page,paginator,delta=3):
return [p for p in range(page.number-delta,page.number+delta+1)
if (p > 0 and p <= paginator.num_pages)]
def auth_profile_required(func):
@wraps(func)
def wrapper(request, *args, **kwargs):
try:
return func(request, *args, **kwargs)
except AuthProfile.DoesNotExist:
return render_to_response('auth_profile_error.html', {
}, context_instance = RequestContext(request))
return wrapper
@login_required
@auth_profile_required
def index(request):
profile = request.user.get_profile()
return render_to_response("index.html", {
'profile': profile,
'patient': profile.patient,
}, context_instance = RequestContext(request))
@login_required
def appointment_change(request, visit_id):
profile = request.user.get_profile()
visit = get_object_or_404(Visit, pk=visit_id)
change_requested = request.POST.get('when')
if change_requested == 'later':
visit.reschedule_later()
messages.add_message(request, messages.INFO,
"Your request to change the appointment has been sent to " \
"the clinic. You will be notified as soon as possible.")
elif change_requested == 'earlier':
visit.reschedule_earlier()
messages.add_message(request, messages.INFO,
"Your request to change the appointment has been sent to " \
"the clinic. You will be notified as soon as possible.")
return render_to_response("appointment/change.html", {
'profile': profile,
'patient': profile.patient,
'visit': visit,
'change_requested': change_requested,
}, context_instance = RequestContext(request))
@login_required
def appointment_upcoming(request):
profile = request.user.get_profile()
patient = profile.patient
paginator = Paginator(patient.visit_set.upcoming(), 5)
page = paginator.page(request.GET.get('p', 1))
return render_to_response("appointment/upcoming.html", {
'profile': profile,
'patient': patient,
'paginator': paginator,
'page': page,
'effective_page_range': effective_page_range_for(page, paginator)
}, context_instance = RequestContext(request))
@login_required
def appointment_history(request):
profile = request.user.get_profile()
patient = profile.patient
paginator = Paginator(patient.visit_set.past().order_by('-date'), 5)
page = paginator.page(request.GET.get('p', 1))
return render_to_response("appointment/history.html", {
'profile': profile,
'patient': profile.patient,
'paginator': paginator,
'page': page,
'effective_page_range': effective_page_range_for(page, paginator)
}, context_instance=RequestContext(request))
@login_required
def attendance_barometer(request):
profile = request.user.get_profile()
patient = profile.patient
visits = patient.visit_set.all()
attended = visits.filter(status='a').count()
missed = visits.filter(status='m').count()
total = visits.filter(date__lt=date.today()).count()
if total:
attendance = int(float(attended) / float(total) * 100)
else:
attendance = 0.0
return render_to_response("attendance_barometer.html", {
'profile': profile,
'patient': patient,
'attendance': attendance,
'attended': attended,
'missed': missed,
'total': total
}, context_instance=RequestContext(request))
def request_call(request):
if request.POST:
form = RequestCallForm(request.POST)
if form.is_valid():
clinic = form.cleaned_data['clinic']
# normalize
msisdn = normalize_msisdn(form.cleaned_data['msisdn'])
# orm object
msisdn_record, _ = MSISDN.objects.get_or_create(msisdn=msisdn)
pcm = PleaseCallMe(user=clinic.user, clinic=clinic,
msisdn=msisdn_record, timestamp=timezone.now(),
message='Please call me!', notes='Call request issued via txtAlert Bookings')
pcm.save()
messages.add_message(request, messages.INFO,
'Your call request has been registered. '\
'The clinic will call you back as soon as possible.')
return HttpResponseRedirect(reverse('bookings:request_call'))
else:
form = RequestCallForm(initial={
'msisdn': '' if request.user.is_anonymous() else request.user.username
})
if request.user.is_anonymous():
profile = patient = None
else:
profile = request.user.get_profile()
patient = profile.patient
return render_to_response('request_call.html', {
'profile': profile,
'patient': patient,
'form': form,
}, context_instance=RequestContext(request))
def widget_landing(request):
if 'patient_id' in request.GET \
and 'msisdn' in request.GET:
try:
msisdn = normalize_msisdn(request.GET.get('msisdn'))
patient_id = request.GET.get('patient_id')
patient = Patient.objects.get(active_msisdn__msisdn=msisdn,
te_id=patient_id)
try:
visit = patient.next_visit()
except Visit.DoesNotExist:
visit = None
visits = patient.visit_set.all()
context = {
'msisdn': msisdn,
'patient_id': patient_id,
'patient': patient,
'name': patient.name,
'surname': patient.surname,
'next_appointment': visit.date if visit else '',
'visit_id': visit.pk if visit else '',
'clinic': visit.clinic.name if visit else '',
'attendance': int((1.0 - patient.risk_profile) * 100),
'total': visits.count(),
'attended': visits.filter(status='a').count(),
'rescheduled': visits.filter(status='r').count(),
'missed': visits.filter(status='m').count(),
}
except Patient.DoesNotExist:
context = {
'patient_id': patient_id,
'msisdn': msisdn,
}
else:
context = {
'patient_id': request.GET.get('patient_id', ''),
'msisdn': request.GET.get('msisdn', ''),
}
print context
return render_to_response('widget_landing.html', context,
context_instance=RequestContext(request))
def todo(request):
"""Anything that resolves to here still needs to be completed"""
return HttpResponse("This still needs to be implemented.")
def not_found(request):
"""test 404 template rendering"""
raise Http404
def server_error(request):
"""test 500 template rendering"""
raise Exception, '500 testing' | gpl-3.0 | -763,102,905,537,551,700 | 36.995 | 93 | 0.622664 | false |
Flamacue/pretix | src/tests/plugins/test_ticketoutputpdf.py | 2 | 1826 | from datetime import timedelta
from decimal import Decimal
from io import BytesIO
import pytest
from django.utils.timezone import now
from PyPDF2 import PdfFileReader
from pretix.base.models import (
Event, Item, ItemVariation, Order, OrderPosition, Organizer,
)
from pretix.plugins.ticketoutputpdf.ticketoutput import PdfTicketOutput
@pytest.fixture
def env():
o = Organizer.objects.create(name='Dummy', slug='dummy')
event = Event.objects.create(
organizer=o, name='Dummy', slug='dummy',
date_from=now(), live=True
)
o1 = Order.objects.create(
code='FOOBAR', event=event, email='[email protected]',
status=Order.STATUS_PENDING,
datetime=now(), expires=now() + timedelta(days=10),
total=Decimal('13.37'), payment_provider='banktransfer'
)
shirt = Item.objects.create(event=event, name='T-Shirt', default_price=12)
shirt_red = ItemVariation.objects.create(item=shirt, default_price=14, value="Red")
OrderPosition.objects.create(
order=o1, item=shirt, variation=shirt_red,
price=12, attendee_name=None, secret='1234'
)
OrderPosition.objects.create(
order=o1, item=shirt, variation=shirt_red,
price=12, attendee_name=None, secret='5678'
)
return event, o1
@pytest.mark.django_db
def test_generate_pdf(env, mocker):
mocked = mocker.patch('reportlab.pdfgen.canvas.Canvas.drawString')
event, order = env
event.settings.set('ticketoutput_pdf_code_x', 30)
event.settings.set('ticketoutput_pdf_code_y', 50)
event.settings.set('ticketoutput_pdf_code_s', 2)
o = PdfTicketOutput(event)
fname, ftype, buf = o.generate(order.positions.first())
assert ftype == 'application/pdf'
pdf = PdfFileReader(BytesIO(buf))
assert pdf.numPages == 1
assert mocked.called
| apache-2.0 | 6,230,654,048,277,624,000 | 33.45283 | 87 | 0.693866 | false |
abusesa/abusehelper | abusehelper/core/rules/classifier.py | 1 | 1033 | class Classifier(object):
def __init__(self):
self._rules = dict()
def inc(self, rule, class_id):
classes = self._rules.get(rule, None)
if classes is None:
classes = dict()
self._rules[rule] = classes
classes[class_id] = classes.get(class_id, 0) + 1
def dec(self, rule, class_id):
classes = self._rules.get(rule, None)
if classes is None:
return
count = classes.get(class_id, 0) - 1
if count > 0:
classes[class_id] = count
else:
classes.pop(class_id, None)
if not classes:
self._rules.pop(rule, None)
def classify(self, obj):
result = set()
cache = dict()
for rule, classes in self._rules.iteritems():
if result.issuperset(classes):
continue
if rule.match(obj, cache):
result.update(classes)
return result
def is_empty(self):
return not self._rules
| mit | 496,745,086,639,479,040 | 25.487179 | 56 | 0.516941 | false |
geobricks/pgeo | pgeo/metadata/metadata.py | 1 | 2602 | import os
import json
from pgeo.utils.json import dict_merge_and_convert_dates
from pgeo.metadata.db_metadata import DBMetadata
from pgeo.metadata.search import MongoSearch
from pgeo.utils import log
from pgeo.config.metadata.core import template as core_template
from pgeo.config.metadata.raster import template as raster_template
log = log.logger(__name__)
# REMOVE EXAMPLE
#db.layer.remove({'meContent.seCoverage.coverageSector.codes.code': {'$in': ['EARTHSTAT']}})
#db.layer.remove({'meContent.seCoverage.coverageSector.codes.code': {'$in': ['TRMM']}})
# db.layer.remove({'meContent.seCoverage.coverageSector.codes.code': {'$in': ['MODIS']}})
# db.layer.find({'meContent.seCoverage.coverageSector.codes.code': {'$in': ['MODIS']}})
#db.layer.remove({'meContent.seCoverage.coverageSector.codes.code': {'$in': ['MODIS-SADC']}})
#db.layer.remove({'meContent.seCoverage.coverageSector.codes.code': {'$in': ['MODIS_TEST']}})
#db.layer.remove({'meContent.seCoverage.coverageSector.codes.code': {'$in': ['Doukkala-Seasonal-wheat']}})
#db.layer.remove({'meContent.seCoverage.coverageSector.codes.code': {'$in': ['Doukkala - actual evapotransipiration']}})
# with Regular expression
#db.layer.remove({'meContent.seCoverage.coverageSector.codes.code': {$regex: /^MOROCCO/}})
#db.layer.remove({'meContent.seCoverage.coverageSector.codes.code': {$regex: /^JRC/}})
#db.layer.find({'meContent.seCoverage.coverageSector.codes.code': {$regex: /^UMD/}})
#db.layer.find({'uid': {$regex: /^UMD/}})
class Metadata:
db_metadata = None
search = None
def __init__(self, settings):
self.settings = settings
print settings
self.db_metadata = DBMetadata(settings["db"]["metadata"])
self.search = MongoSearch(settings["db"]["metadata"]['connection'], settings["db"]["metadata"]["database"], settings["db"]["metadata"]['document']['layer'])
log.info("---Metadata initialization---")
log.info(self.db_metadata)
log.info(self.search)
def merge_layer_metadata(self, template_name, data):
"""
Merge user's data with the core metadata and the selected template
@param template_name: Name of the template, e.g. 'modis'
@param data: User data, in JSON format
@return: Merged JSON
"""
if template_name == "raster":
out = dict_merge_and_convert_dates(core_template, raster_template)
elif template_name == "vector":
log.error("TODO: vector template")
out = dict_merge_and_convert_dates(out, data)
#log.info(out)
return out | gpl-2.0 | -6,503,222,040,618,752,000 | 40.31746 | 164 | 0.673328 | false |
eavatar/ava | src/eavatar.ava/tests/unit/test_util.py | 1 | 2173 | # -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import
import unittest
import re
from ava.util import resource_path, time_uuid
from ava.util import webutils
class UtilTest(unittest.TestCase):
def test_resource_path(self):
rv = resource_path("path1")
print(rv)
self.assertTrue(rv.endswith("path1"))
def test_time_uuid(self):
prev = time_uuid.utcnow()
for i in xrange(1000):
now = time_uuid.utcnow()
#print(now.hex)
self.assertTrue(now > prev)
prev = now
def test_parse_authorization_header(self):
result = webutils.parse_authorization_header('EAvatar key="1234",realm="http://eavatar.me", token="abcd"')
self.assertEqual("eavatar", result['scheme'])
self.assertEqual("http://eavatar.me", result["realm"])
self.assertEqual("1234", result["key"])
self.assertEqual("abcd", result["token"])
def test_store_url_without_path(self):
store_without_path = '/a/QPndzFJTmycdfg5jxcSghX2scJnc3TNqVEfYtVTA5JVYiPQY/store'
store_with_path = '/a/QPndzFJTmycdfg5jxcSghX2scJnc3TNqVEfYtVTA5JVYiPQY/store/this/is/a/path?q=test'
pattern = '^\/a/([a-zA-Z0-9]+)/store(/[^\?]*)?'
pat = re.compile(pattern)
matches = pat.match(store_without_path)
self.assertEqual('/a/QPndzFJTmycdfg5jxcSghX2scJnc3TNqVEfYtVTA5JVYiPQY/store', matches.group(0))
self.assertEqual('QPndzFJTmycdfg5jxcSghX2scJnc3TNqVEfYtVTA5JVYiPQY', matches.group(1))
self.assertIsNone(matches.group(2))
def test_store_url_with_path(self):
store_with_path = '/a/QPndzFJTmycdfg5jxcSghX2scJnc3TNqVEfYtVTA5JVYiPQY/store/this/is/a/path?q=test'
pattern = '^\/a/(?P<aid>[a-zA-Z0-9]+)/store(?P<path>/[^\?]*)?'
pat = re.compile(pattern)
matches = pat.match(store_with_path)
self.assertEqual('/a/QPndzFJTmycdfg5jxcSghX2scJnc3TNqVEfYtVTA5JVYiPQY/store/this/is/a/path', matches.group(0))
self.assertEqual('QPndzFJTmycdfg5jxcSghX2scJnc3TNqVEfYtVTA5JVYiPQY', matches.group('aid'))
self.assertEqual('/this/is/a/path', matches.group('path')) | bsd-3-clause | 4,244,399,914,679,452,700 | 41.627451 | 118 | 0.664059 | false |
habalux/pglog2grok | pglog2grok.py | 1 | 4033 | #!/usr/bin/env python
#
# Small script for generating a logstash grok filter and patterns for postgresql
# using a non-default log_line_prefix setting.
#
# Output of this script has NOT been tested in any production environment as of yet.
#
# Copyright (c) 2014, Teemu Haapoja <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
# OF SUCH DAMAGE.
# Custom patterns
# PGLOG_TZ is a modified TZ pattern (original didn't recognize "EET" as valid)
pg_patterns = """
PGLOG_TZ (?:[PMCE][SDE]T|UTC)
PGLOG_APPLICATION_NAME .*?
PGLOG_USER_NAME .*?
PGLOG_DATABASE_NAME .*?
PGLOG_REMOTE_HOST_PORT (\[local\]|%{IP:host}\(%{POSINT:port}\))
PGLOG_REMOTE_HOST (\[local\]|%{IP:host})
PGLOG_PROCESS_ID %{POSINT}
PGLOG_TIMESTAMP %{TIMESTAMP_ISO8601} %{PGLOG_TZ:TZ}
PGLOG_COMMAND_TAG .*?
PGLOG_SQL_STATE .*?
PGLOG_SESSION_ID [0-9\.A-Fa-f]+
PGLOG_SESSION_LINE_NUMBER %{POSINT}
PGLOG_SESSION_START_TIMESTAMP %{PGLOG_TIMESTAMP}
PGLOG_VIRTUAL_TRANSACTION_ID ([\/0-9A-Fa-f]+)
PGLOG_TRANSACTION_ID ([0-9A-Fa-f])+
PGLOG_LOGLEVEL (DEBUG[1-5]|INFO|NOTICE|WARNING|ERROR|LOG|FATAL|PANIC|DETAIL)
PGLOG_MESSAGE .*
"""
def prefix_to_grok(pr):
replace_map = {
r'%a' : "%{PGLOG_APPLICATION_NAME:application_name}",
r'%u' : "%{PGLOG_USER_NAME:user_name}",
r'%d' : "%{PGLOG_DATABASE_NAME:database_name}",
r'%r' : "%{PGLOG_REMOTE_HOST_PORT:remote_host_port}",
r'%h' : "%{PGLOG_REMOTE_HOST:remote_host}",
r'%p' : "%{PGLOG_PROCESS_ID:process_id}",
r'%t' : "%{PGLOG_TIMESTAMP}",
r'%m' : "%{PGLOG_TIMESTAMP}",
r'%i' : "%{PGLOG_COMMAND_TAG:command_tag}",
r'%e' : "%{PGLOG_SQL_STATE:sql_state}",
r'%c' : "%{PGLOG_SESSION_ID:session_id}",
r'%l' : "%{PGLOG_SESSION_LINE_NUMBER:session_line_number}",
r'%s' : "%{PGLOG_SESSION_START_TIMESTAMP:session_start_timestamp}",
r'%v' : "%{PGLOG_VIRTUAL_TRANSACTION_ID:virtual_transaction_id}",
r'%x' : "%{PGLOG_TRANSACTION_ID:transaction_id}",
r'%q' : "",
}
pr = pr.replace(r'%%',r'%')
for k,v in replace_map.items():
pr = pr.replace(k,v)
return "%s%%{PGLOG_LOGLEVEL:loglevel}: %%{PGLOG_MESSAGE:message}"%(pr)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Create a grok pattern for your postgresql configuration")
parser.add_argument('-q','--quiet', help="Be quiet, only output the grok pattern", action='store_const', const=True)
parser.add_argument('-p', '--prefix', help="log_line_prefix from YOUR postgresql.conf", required=True)
args = parser.parse_args()
if args.quiet:
print prefix_to_grok(args.prefix)
else:
print "You need to add these patterns to your logstash patterns_dir: "
print "> ==== snip === <"
print pg_patterns
print "> ==== snip === <"
print ""
print "This is the filter for your log_line_prefix:\n\n%s"%(prefix_to_grok(args.prefix))
| bsd-2-clause | -7,059,104,021,549,284,000 | 38.539216 | 117 | 0.706422 | false |
TeMPO-Consulting/mediadrop | mediacore/controllers/login.py | 1 | 4471 | # This file is a part of MediaDrop (http://www.mediadrop.net),
# Copyright 2009-2013 MediaCore Inc., Felix Schwarz and other contributors.
# For the exact contribution history, see the git revision log.
# The source code contained in this file is licensed under the GPLv3 or
# (at your option) any later version.
# See LICENSE.txt in the main project directory, for more information.
from formencode import Invalid
from pylons import request, tmpl_context
from mediacore.forms.login import LoginForm
from mediacore.lib.base import BaseController
from mediacore.lib.helpers import redirect, url_for
from mediacore.lib.i18n import _
from mediacore.lib.decorators import expose, observable
from mediacore.plugin import events
import logging
log = logging.getLogger(__name__)
login_form = LoginForm()
class LoginController(BaseController):
@expose('login.html')
@observable(events.LoginController.login)
def login(self, came_from=None, **kwargs):
if request.environ.get('repoze.who.identity'):
redirect(came_from or '/')
# the friendlyform plugin requires that these values are set in the
# query string
form_url = url_for('/login/submit',
came_from=(came_from or '').encode('utf-8'),
__logins=str(self._is_failed_login()))
login_errors = None
if self._is_failed_login():
login_errors = Invalid('dummy', None, {}, error_dict={
'_form': Invalid(_('Invalid username or password.'), None, {}),
'login': Invalid('dummy', None, {}),
'password': Invalid('dummy', None, {}),
})
return dict(
login_form = login_form,
form_action = form_url,
form_values = kwargs,
login_errors = login_errors,
)
@expose()
def login_handler(self):
"""This is a dummy method.
Without a dummy method, Routes will throw a NotImplemented exception.
Calls that would route to this method are intercepted by
repoze.who, as defined in mediacore.lib.auth
"""
pass
@expose()
def logout_handler(self):
"""This is a dummy method.
Without a dummy method, Routes will throw a NotImplemented exception.
Calls that would route to this method are intercepted by
repoze.who, as defined in mediacore.lib.auth
"""
pass
@expose()
@observable(events.LoginController.post_login)
def post_login(self, came_from=None, **kwargs):
if not request.identity:
# The FriendlyForm plugin will always issue a redirect to
# /login/continue (post login url) even for failed logins.
# If 'came_from' is a protected page (i.e. /admin) we could just
# redirect there and the login form will be displayed again with
# our login error message.
# However if the user tried to login from the front page, this
# mechanism doesn't work so go to the login method directly here.
self._increase_number_of_failed_logins()
return self.login(came_from=came_from)
if came_from:
redirect(came_from)
# It is important to return absolute URLs (if app mounted in subdirectory)
if request.perm.contains_permission(u'edit') or request.perm.contains_permission(u'admin'):
redirect(url_for('/admin', qualified=True))
redirect(url_for('/', qualified=True))
@expose()
@observable(events.LoginController.post_logout)
def post_logout(self, came_from=None, **kwargs):
redirect('/')
def _is_failed_login(self):
# repoze.who.logins will always be an integer even if the HTTP login
# counter variable contained a non-digit string
return (request.environ.get('repoze.who.logins', 0) > 0)
def _increase_number_of_failed_logins(self):
request.environ['repoze.who.logins'] += 1
def __call__(self, environ, start_response):
"""Invoke the Controller"""
# BaseController.__call__ dispatches to the Controller method
# the request is routed to. This routing information is
# available in environ['pylons.routes_dict']
request.identity = request.environ.get('repoze.who.identity')
tmpl_context.identity = request.identity
return BaseController.__call__(self, environ, start_response)
| gpl-3.0 | -8,258,448,679,975,395,000 | 39.645455 | 99 | 0.641244 | false |
cryptica/slapnet | benchmarks/scalable/PhilosophersCM84/make_net.py | 1 | 2045 | #!/usr/bin/python3
import sys
print("""
petri net "The drinking philosophers for n=2" {
places {
p1h p1e
p2h p2e
req1p1 req1p2
req2p1 req2p2
fork1p1 fork1p2
fork2p1 fork2p2
fork1clean fork1dirty
fork2clean fork2dirty
}
transitions {
p1req1 p1req2 p1give1 p1give2 p1eat p1done
p2req1 p2req2 p2give1 p2give2 p2eat p2done
//p1done(2),p1eat(2),p1give1,p1give2,p1req1,p1req2,
//p2give1,p2give2,p2req1,p2req2
}
arcs {
{ p1h req1p1 fork1p2 } -> p1req1 -> { p1h req1p2 fork1p2 }
{ p1h req2p1 fork2p2 } -> p1req2 -> { p1h req2p2 fork2p2 }
{ p1h req1p1 fork1p1 fork1dirty } -> p1give1 -> { p1h req1p1 fork1p2 fork1clean }
{ p1h req2p1 fork2p1 fork2dirty } -> p1give2 -> { p1h req2p1 fork2p2 fork2clean }
{ p1h fork1p1 fork2p1 fork1clean fork2clean } -> p1eat -> { p1e fork1p1 fork2p1 fork1dirty fork2dirty }
{ p1e } -> p1done -> { p1h }
{ p2h req1p2 fork1p1 } -> p2req1 -> { p2h req1p1 fork1p1 }
{ p2h req2p2 fork2p1 } -> p2req2 -> { p2h req2p1 fork2p1 }
{ p2h req1p2 fork1p2 fork1dirty } -> p2give1 -> { p2h req1p2 fork1p1 fork1clean }
{ p2h req2p2 fork2p2 fork2dirty } -> p2give2 -> { p2h req2p2 fork2p1 fork2clean }
{ p2h fork1p2 fork2p2 fork1clean fork2clean } -> p2eat -> { p2e fork1p2 fork2p2 fork1dirty fork2dirty }
{ p2e } -> p2done -> { p2h }
}
initial {
p1h p2h
fork1dirty fork2dirty
fork1p1 fork2p1 req1p2 req2p2
}
}
liveness property "philosopher 1 does not starve" {
p1req1 + p1req2 + p1give1 + p1give2 + p1eat + p1done > 0 &&
p2req1 + p2req2 + p2give1 + p2give2 + p2eat + p2done > 0 &&
p1eat = 0
}
liveness property "philosopher 2 does not starve" {
p1req1 + p1req2 + p1give1 + p1give2 + p1eat + p1done > 0 &&
p2req1 + p2req2 + p2give1 + p2give2 + p2eat + p2done > 0 &&
p2eat = 0
}
safety property "mutual exclusion" {
p1e >= 1 && p2e >= 1
}
""")
| gpl-3.0 | -4,088,092,992,242,717,000 | 33.083333 | 112 | 0.605868 | false |
uwosh/uwosh.intranet.policy | uwosh/intranet/policy/importexport.py | 1 | 5459 |
from Products.CMFCore.utils import getToolByName
from Products.LDAPMultiPlugins import manage_addLDAPMultiPlugin
def install(context):
if not context.readDataFile('uwosh.intranet.policy.txt'):
return
setupLDAPPlugin(context)
def setupLDAPPlugin(context):
ldap_plugin_id = 'ldap_authentication'
SUBTREE = 2 # this value comes from the zmi "Add LDAP Multi Plugin" html source
acl_users = context.getSite().acl_users
if hasattr(acl_users, ldap_plugin_id):
logger = context.getLogger('uwosh.intranet.policy')
logger.warning('Not configuring LDAP plugin, because "acl_users.%s" already exists.' % ldap_plugin_id)
return
manage_addLDAPMultiPlugin(
acl_users,
id=ldap_plugin_id,
title='LDAP Authentication',
LDAP_server='ldap.uwosh.edu:389',
login_attr='uid',
uid_attr='uid',
users_base='ou=people,o=uwosh.edu,dc=uwosh,dc=edu',
users_scope=SUBTREE,
roles='Anonymous',
groups_base='ou=groups,o=uwosh.edu,dc=uwosh,dc=edu',
groups_scope=SUBTREE,
binduid='',
bindpwd='',
binduid_usage=False,
rdn_attr='uid',
local_groups=False,
use_ssl=False,
encryption='SHA',
read_only=True
)
ldap_auth = getattr(acl_users, ldap_plugin_id)
ldap_schema = {
'cn': {
'public_name': 'fullname',
'binary': False,
'ldap_name': 'cn',
'friendly_name': 'Canonical Name',
'multivalued': False
},
'mail': {
'public_name': 'email',
'binary': False,
'ldap_name': 'mail',
'friendly_name': 'Email Address',
'multivalued': False
},
'sn': {
'public_name': 'lastname',
'binary': False,
'ldap_name': 'sn',
'friendly_name': 'Last Name',
'multivalued': False
},
'givenName': {
'public_name': 'firstname',
'binary': False,
'ldap_name': 'givenName',
'friendly_name': 'First Name',
'multivalued': False
},
'uid': {
'public_name': '',
'binary': False,
'ldap_name': 'uid',
'friendly_name': 'uid',
'multivalued': False
},
'eduPersonAffiliation': {
'public_name': 'eduPersonAffiliation',
'binary': False,
'ldap_name': 'eduPersonAffiliation',
'friendly_name': 'eduPersonAffiliation',
'multivalued': True
},
'eduPersonPrimaryAffiliation': {
'public_name': 'eduPersonPrimaryAffiliation',
'binary': False,
'ldap_name': 'eduPersonPrimaryAffiliation',
'friendly_name': 'eduPersonPrimaryAffiliation',
'multivalued': False
},
'ou': {
'public_name': 'ou',
'binary': False,
'ldap_name': 'ou',
'friendly_name': 'Organizational Unit',
'multivalued': False
},
'uwodepartmentassoc': {
'public_name': 'uwodepartmentassoc',
'binary': False,
'ldap_name': 'uwodepartmentassoc',
'friendly_name': 'UWO Department Association',
'multivalued': False
},
'l': {
'public_name': 'location',
'binary': False,
'ldap_name': 'l',
'friendly_name': 'Location',
'multivalued': False
},
'telephoneNumber': {
'public_name': 'phone',
'binary': False,
'ldap_name': 'telephoneNumber',
'friendly_name': 'Phone Number',
'multivalued': False
},
'mailUserStatus': {
'public_name': 'mailUserStatus',
'binary': False,
'ldap_name': 'mailUserStatus',
'friendly_name': 'Mail User Status',
'multivalued': False
},
'uwomailstop': {
'public_name': 'uwomailstop',
'binary': False,
'ldap_name': 'uwomailstop',
'friendly_name': 'UWO Mail Stop',
'multivalued': False
},
'displayName': {
'public_name': 'displayname',
'binary': False,
'ldap_name': 'displayName',
'friendly_name': 'Display Name',
'multivalued': False
},
}
ldap_auth.acl_users.setSchemaConfig(ldap_schema)
ldap_auth.acl_users._user_objclasses = ['inetOrgPerson']
ldap_auth.manage_activateInterfaces(['IUserEnumerationPlugin', 'IPropertiesPlugin', 'IAuthenticationPlugin'])
movePluginToHeadOfList(acl_users.plugins, 'IPropertiesPlugin', ldap_plugin_id)
def movePluginToHeadOfList(plugin_registry, plugin_type, plugin_id):
interface = plugin_registry._getInterfaceFromName(plugin_type)
index = plugin_registry._getPlugins(interface).index(plugin_id)
while index > 0:
plugin_registry.movePluginsUp(interface, [plugin_id])
new_index = plugin_registry._getPlugins(interface).index(plugin_id)
if new_index >= index:
# The plugin didn't move up. We calmly sidestep the infinite loop.
break
index = new_index
| gpl-2.0 | -1,045,420,193,652,714,100 | 32.913043 | 113 | 0.52702 | false |
realopenit/bubble | bubble/commands/cmd_examples.py | 1 | 1437 | # -*- coding: utf-8 -*-
# Part of bubble. See LICENSE file for full copyright and licensing details.
import click
from ..cli import pass_bubble
from ..util.examples import all_examples_functions
@click.command('examples',
short_help='Show example for doing some task in bubble(experimental)')
@click.option('--name',
'-n',
default=None,
help='show the example with the name')
@click.option('--all',
'-a',
is_flag=True,
default=False,
help='show all the examples')
@pass_bubble
def cli(ctx, name,all):
"""Show example for doing some task in bubble(experimental)"""
ctx.gbc.say('all_example_functions',stuff=all_examples_functions, verbosity=1000)
for example in all_examples_functions:
if all or (name and example['name'] == name):
if all:
ctx.gbc.say('example',stuff=example, verbosity=100)
name = example['name']
#click.echo_via_pager(example['fun']())
click.echo("#"*80)
click.echo("### start of bubble example: "+name)
click.echo("#"*80)
click.echo(example['fun']())
click.echo("#"*80)
click.echo("### end of bubble example: "+name)
click.echo("#"*80)
click.echo()
else:
click.echo("available example: " + example['name'])
| gpl-3.0 | -2,532,512,840,913,987,000 | 33.214286 | 85 | 0.557411 | false |
yw374cornell/e-mission-server | emission/core/wrapper/stop.py | 1 | 1696 | import logging
import emission.core.wrapper.wrapperbase as ecwb
class Stop(ecwb.WrapperBase):
props = {"trip_id": ecwb.WrapperBase.Access.WORM, # trip_id of the parent trip
"enter_ts": ecwb.WrapperBase.Access.WORM, # the timestamp of entry (in secs)
"enter_local_dt": ecwb.WrapperBase.Access.WORM, # searchable datetime in timezone of entry
"enter_fmt_time": ecwb.WrapperBase.Access.WORM, # formatted entry time in timezone of place
"exit_ts": ecwb.WrapperBase.Access.WORM, # the timestamp of exit (in secs)
"exit_local_dt": ecwb.WrapperBase.Access.WORM, # searchable datetime in timezone of exit
"exit_fmt_time": ecwb.WrapperBase.Access.WORM, # formatted time in timezone of place
"ending_section": ecwb.WrapperBase.Access.WORM, # the id of the trip just before this
"starting_section": ecwb.WrapperBase.Access.WORM, # the id of the trip just after this
"enter_loc": ecwb.WrapperBase.Access.WORM, # the location in geojson format
"exit_loc": ecwb.WrapperBase.Access.WORM, # the location in geojson format
"source": ecwb.WrapperBase.Access.WORM, # the method used to generate this place
"duration": ecwb.WrapperBase.Access.WORM} # the duration for which we were in this place
enums = {}
geojson = ["enter_loc", "exit_loc"]
nullable = ["enter_ts", "enter_fmt_time", "ending_section", # for the start of a chain
"exit_ts", "exit_fmt_time", "starting_section"] # for the end of a chain
local_dates = ['enter_local_dt', 'exit_local_dt']
def _populateDependencies(self):
pass
| bsd-3-clause | -4,098,555,770,510,610,400 | 64.230769 | 104 | 0.653892 | false |
RNAcentral/rnacentral-import-pipeline | tests/cli/pdb_test.py | 1 | 1261 | # -*- coding: utf-8 -*-
"""
Copyright [2009-2018] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
import pytest
from click.testing import CliRunner
from rnacentral_pipeline.cli import pdb
@pytest.mark.parametrize(
"command,output,pdbs",
[
("data", "pdb.json", ("1S72",)),
("extra", "pdb-extra.json", ("1S72",)),
],
)
def test_can_fetch_expected_data(command, output, pdbs):
runner = CliRunner()
with runner.isolated_filesystem():
args = [command, output]
args.extend(pdbs)
result = runner.invoke(pdb.cli, args)
assert result.exit_code == 0, result.output
assert not result.exception
with open(output, "rb") as raw:
assert raw.read()
| apache-2.0 | 7,726,899,241,946,992,000 | 29.756098 | 72 | 0.694687 | false |
ekapujiw2002/kweb | kwebhelper.py | 1 | 34980 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# helper file for kweb Minimal Kiosk Browser
# Copyright 2013-2014 by Guenter Kreidl
# free software without any warranty
# you can do with it what you like
# version 1.4
import os,urllib,sys,subprocess,threading,time
import Tkinter as tk
# GLOBAL OPTIONS
# use external settings file, if not empty
#settings = ''
settings = '/usr/local/bin/kwebhelper_settings.py'
# where the downloads, PDF files etc. go, make sure a "Downloads" folder exists there
#homedir = '/media/volume'
homedir = ''
# if empty, the user's home dir will be taken
# OMXPLAYER AUDIO VIDEO OPTIONS
omxoptions = []
# for selecting the sound output, uncomment one of these:
#omxoptions = ['-o','hdmi']
#omxoptions = ['-o','local']
# more options are also possible of course
# special options for watching live tv streams (omxplayer > 0.32)
omx_livetv_options = ['--live']
# add the start of your live tv stream links to this list to enable live tv options
live_tv = []
# like this:
#live_tv = ['http://192.168.0.5:9082']
# set this to false, if you want to allow more than one omxplayer instance
kill_omxplayer = True
#kill_omxplayer = False
# mimetypes: if given, this will restrict what omxplayer will be given to play:
mimetypes = []
# normally omxplayer is started from a terminal (xterm), to clear the screen and get full keyboard control
# Set the following to "False" to use omxplayer without starting a terminal first
omxplayer_in_terminal_for_video = True
#omxplayer_in_terminal_for_video = False
omxplayer_in_terminal_for_audio = True
#omxplayer_in_terminal_for_audio = False
# options for m3u playlists, to check that they contain only audio files or streams
audioextensions = ['mp3','aac','flac','wav','wma','cda','ogg','ogm','ac3','ape']
try_stream_as_audio = False
# if set to "True", the following list will be used for checking for video files
videoextensions = ['asf','avi','mpg','mp4','mpeg','m2v','m1v','vob','divx','xvid','mov','m4v','m2p','mkv','m2ts','ts','mts','wmv','webm']
# Play audio files or playlists that contain only audio files in omxaudioplayer GUI:
useAudioplayer = True
# options for omxplayer to be used when playing audio
omxaudiooptions = []
# volume setting when starting omxaudioplayer ranging from -20 to 4 ( -60 to +12 db)
defaultaudiovolume = 0
# start playing and close after playing last song automatically (if "True", set to "False" to disable)
autoplay = True
autofinish = True
# Interface settings for omxaudioplayer:
# The font to be used for playlist and buttons
fontname = 'SansSerif'
# value between 10 and 22, will also determine the size of the GUI window:
fontheight = 14
# number of entries displayed in playlist window, between 5 and 25:
maxlines = 8
# width of the window, value between 40 and 80, defines the minimum number of characters of the song name
# displayed in the songlist (usually much more are shown!)
lwidth = 40
# if the following is set to "True", vlc will be used to play audio files and playlists (audio only)
useVLC = False
#useVLC = True
#COMMAND EXECUTION OPTIONS
# if this is set to "True", all Desktop (GUI) programs will be executed without starting a terminal first
check_desktop = True
#check_desktop = False
# direct commands will be executed without starting a terminal first
# use it for background commands or programs with a GUI that are not desktop programs or if check_desktop is set to "False"
direct_commands = ['kwebhelper.py','omxplayer']
# preferred terminal to run commands in, must be set
preferred_terminal = 'lxterminal'
#preferred_terminal = 'xterm'
formdata_in_terminal = False
#formdata_in_terminal = True
# set the following to "True", if you want to spare memory overhead (but you'll get more disk write accesses)
run_as_script = False
#run_as_script = True
# PDF OPTIONS
# preferred pdf reader; both must be set or emtpy
pdfprogpath = ''
pdfprog = ''
#pdfprogpath = '/usr/bin/mupdf'
#pdfprog = 'mupdf'
# additional options for pdf program (must match the selected program!):
pdfoptions = []
#pdfoptions = ['-fullscreen']
# this will allow to open pdf files on a local server as files instead of downloading them first;
# will only work with "http://localhost" links
pdfpathreplacements = {}
#pdfpathreplacements = {'http://localhost:8073/Ebooks1':'file:///var/www/Ebooks1'}
# DOWNLOAD OPTIONS
#download options for external download mode, enable one of these options:
show_download_in_terminal = True
#show_download_in_terminal = False
# ONLINE VIDEO OPTIONS
# options for pages containing video, either HTML5 video tags or all websites supported by youtube-dl
# if html5 video tags include more than one source format, select the preferred one here
preferred_html5_video_format = '.mp4'
# Choose, if HTML5 URL extraction is tried first and youtube-dl extraction afterwards or vice versa
html5_first = True
#html5_first = False
#additional youtube-dl options, e. g. selecting a resolution or file format
youtube_dl_options = []
#youtube_dl_options = ['-f','37/22/18']
# special omxplayer options for web video
youtube_omxoptions = []
# to use the same options as for other video, set
#youtube_omxoptions = omxoptions
### end of global settings
# take settings from separate file:
if settings and os.path.exists(settings):
try:
execfile(settings)
except:
pass
if not homedir:
homedir = os.path.expanduser('~')
dldir = homedir +'/Downloads'
if not os.path.exists(dldir):
os.mkdir(dldir)
# helper functions
def get_opt(options):
if '--win' in options:
pos = options.index('--win')
if pos < (len(options) -2):
options[pos+1] = '"' + options[pos+1] + '"'
return ' '.join(options)
def get_playlist(url, audio_as_stream):
playlist = []
fn = ''
audioonly = True
go = False
if url.startswith('http://'):
try:
fn,h = urllib.urlretrieve(url)
go = True
except:
pass
elif url.startswith('file://'):
fn = url.replace('file://','').replace('%20',' ')
fn = urllib.unquote(fn)
if os.path.exists(fn):
go = True
elif os.path.exists(url):
fn = url
go = True
if go:
f = file(fn,'rb')
pl = f.read()
f.close()
if url.startswith('http://'):
os.remove(fn)
pll = pl.split('\n')
if url.lower().endswith('.m3u') or url.lower().endswith('.m3u8'):
for s in pll:
if s != '' and not s.startswith('#'):
if s.split('.')[-1].lower() in audioextensions:
pass
elif audio_as_stream and s.split('.')[-1].lower() not in videoextensions:
pass
else:
audioonly = False
playlist.append(s)
elif url.lower().endswith('.pls'):
for s in pll:
if s.startswith('File'):
aurl = s.split('=')[1].strip()
playlist.append(aurl)
return (audioonly, playlist)
def video_tag_extractor(url):
result = []
if url.startswith('file://'):
fpath = url.replace('file://','').replace('%20',' ')
else:
try:
fpath,h = urllib.urlretrieve(url)
except:
return result
f = file(fpath,'rb')
html = f.read()
f.close()
if '<video ' in html:
htl = html.split('<video')
for ind in range(1,len(htl)):
if not 'src="' in htl[ind]:
continue
vtl = htl[ind].split('src="')
if len(vtl) > 2:
links = []
for l in vtl[1:]:
pos = l.find('"')
links.append(l[0:pos])
link = links[0]
for li in links:
if preferred_html5_video_format and li.lower().endswith(preferred_html5_video_format):
link = li
else:
vt = vtl[1]
pos = vt.find('"')
link = vt[0:pos]
if link.startswith('http://') or link.startswith('https://') or link.startswith('rtsp://') or link.startswith('rtmp://'):
result.append(link)
elif link.startswith('file://'):
newlink = '"'+link.replace('file://','').replace('%20',' ')+'"'
result.append(newlink)
else:
urll = url.split('/')
if link.startswith('/'):
newlink = '/'.join(urll[0:3]+[link[1:]])
else:
relcount = len(urll) - 1 - link.count('../')
newlink = '/'.join(urll[0:relcount]+[link.replace('../','')])
if newlink.startswith('file://'):
newlink = '"'+newlink.replace('file://','').replace('%20',' ')+'"'
result.append(newlink)
return result
def play_ytdl(res):
vlist = res.split('\n')
if (len(vlist) == 1) or (len(vlist) == 2 and vlist[1] == ''):
vurl = vlist[0]
if kill_omxplayer:
dummy = os.system('killall omxplayer.bin > /dev/null 2>&1')
pargs = ["xterm","-fn","fixed","-fullscreen", "-maximized", "-bg", "black", "-fg", "black", "-e",'omxplayer']+youtube_omxoptions+[vurl]+['>', '/dev/null', '2>&1']
os.execv("/usr/bin/xterm",pargs)
else:
if kill_omxplayer:
script = '#!/bin/bash\nkillall omxplayer.bin > /dev/null 2>&1\n'
else:
script = '#!/bin/bash\n'
for vurl in vlist:
if vurl != '':
script += 'omxplayer ' + get_opt(youtube_omxoptions) + ' "' + vurl + '" > /dev/null 2>&1\n'
f = file(dldir+os.sep+'playall.sh','wb')
f.write(script)
f.close()
os.chmod(dldir+os.sep+'playall.sh',511)
os.execl("/usr/bin/xterm","xterm","-fn","fixed","-fullscreen", "-maximized", "-bg", "black", "-fg", "black", "-e",dldir+os.sep+'playall.sh')
def play_html5(tags):
if len(tags) == 1:
if kill_omxplayer:
dummy = os.system('killall omxplayer.bin > /dev/null 2>&1')
pargs = ["xterm","-fn","fixed","-fullscreen", "-maximized", "-bg", "black", "-fg", "black", "-e",'omxplayer']+youtube_omxoptions+[tags[0]]+['>', '/dev/null', '2>&1']
os.execv("/usr/bin/xterm",pargs)
else:
if kill_omxplayer:
script = '#!/bin/bash\nkillall omxplayer.bin > /dev/null 2>&1\n'
else:
script = '#!/bin/bash\n'
for t in tags:
script += 'omxplayer ' + get_opt(youtube_omxoptions) + ' ' + t + ' > /dev/null 2>&1\n'
f = file(dldir+os.sep+'playall.sh','wb')
f.write(script)
f.close()
os.chmod(dldir+os.sep+'playall.sh',511)
os.execl("/usr/bin/xterm","xterm","-fn","fixed","-fullscreen", "-maximized", "-bg", "black", "-fg", "black", "-e",dldir+os.sep+'playall.sh')
# omxaudioplayer GUI
class omxaudioplayer(tk.Frame):
def __init__(self, master=None, playlist=[],mode='simple',autofinish=True,volume=0,omxoptions=[],
fontheight=14,fontname='SansSerif',maxlines=8,width=40,autoplay=True):
tk.Frame.__init__(self, master)
self.set_defaults()
self.fontheight = min([max([fontheight,10]),22])
self.fontname = fontname
try:
self.font = (self.fontname,str(self.fontheight),'bold')
except:
self.font = ('SansSerif',str(self.fontheight),'bold')
self.maxlines = min([max([maxlines,5]),25])
self.defaultwidth = min([max([width,40]),80])
self.root = master
self.root.bind("<<finished>>",self.on_finished)
self.root.protocol('WM_DELETE_WINDOW', self.on_close)
self.root.title("omxaudioplayer")
self.root.resizable(False,False)
for keysym in self.keybindings:
self.root.bind(keysym,self.keyp_handler)
self.grid()
self.omxoptions = omxoptions
self.autofinish = autofinish
self.playlist = playlist
self.autoplay = autoplay
self.mode = mode
self.status = 'stopped'
self.omxprocess = None
self.omxwatcher = None
self.songpointer = 0
self.listpointer = 0
self.currentvolume = min([max([volume,-20]),4])
self.changedvolume = tk.IntVar()
self.changedvolume.set(volume)
self.playcontent = tk.StringVar()
self.playcontent.set(self.playstring)
self.createwidgets()
if self.playlist and self.autoplay:
self.playsong(0)
def set_defaults(self):
self.playstring = '>'
self.pausestring = '||'
self.stopstring = '[]'
self.rewstring = '←'
self.fwdstring = '→'
self.prevstring = '↑'
self.nextstring = '↓'
self.vchdelay = 0.05
self.keybindings = ['<KeyPress-Down>','<KeyPress-Up>','<KeyPress-space>','<KeyPress-q>','<KeyPress-Escape>',
'<KeyPress-plus>','<KeyPress-minus>','<KeyPress-Left>','<KeyPress-Right>','<KeyPress-Return>',
'<KeyPress-KP_Enter>','<KeyPress-KP_Add>','<KeyPress-KP_Subtract>']
def keyp_handler(self, event):
if event.keysym in ['space','Return','KP_Enter']:
self.playpause()
elif event.keysym in ['q','Escape']:
self.stop()
elif event.keysym == 'Down':
while self.nextbutton['state'] == tk.DISABLED:
time.sleep(0.1)
self.nextsong()
elif event.keysym == 'Up':
while self.prevbutton['state'] == tk.DISABLED:
time.sleep(0.1)
self.prevsong()
elif event.keysym == 'Left':
self.sendcommand('\x1b\x5b\x44')
elif event.keysym == 'Right':
self.sendcommand('\x1b\x5b\x43')
else:
av = 0
if event.keysym in ['plus','KP_Add']:
av = 1
elif event.keysym in ['minus','KP_Subtract']:
av = -1
if av != 0:
nv = self.changedvolume.get() + av
if nv in range(-20,5):
self.changedvolume.set(nv)
self.vol_changed(nv)
def playsong(self, index):
if not self.omxprocess:
self.prevbutton['state'] = tk.DISABLED
self.nextbutton['state'] = tk.DISABLED
self.songpointer = index
pargs = ['omxplayer', '--vol', str(self.currentvolume*300)] + self.omxoptions + [self.playlist[index]]
self.omxprocess = subprocess.Popen(pargs,stdin=subprocess.PIPE,stdout=file('/dev/null','wa'))
self.omxwatcher = threading.Timer(0,self.watch)
self.omxwatcher.start()
self.status = 'playing'
self.playcontent.set(self.pausestring)
selection = self.playlistwindow.curselection()
if not selection or index != int(selection[0]):
self.listpointer = index
self.playlistwindow.selection_clear(0, len(self.playlist)-1)
self.playlistwindow.selection_set(index)
self.playlistwindow.see(index)
time.sleep(0.3)
self.prevbutton['state'] = tk.NORMAL
self.nextbutton['state'] = tk.NORMAL
def on_close(self):
if self.omxprocess:
self.status='closing'
self.sendcommand('q')
time.sleep(0.1)
if self.omxprocess:
try:
self.omxprocess.terminate()
time.sleep(0.1)
except:
pass
if self.omxprocess:
try:
self.omxprocess.kill()
time.sleep(0.1)
except:
pass
self.root.destroy()
def on_finished(self, *args):
stat = self.status
self.status = 'stopped'
self.playcontent.set(self.playstring)
if stat != 'finished':
if self.songpointer == self.listpointer:
self.nextsong()
else:
self.songpointer = self.listpointer
self.playsong(self.songpointer)
def watch(self):
if self.omxprocess:
try:
dummy = self.omxprocess.wait()
except:
pass
self.omxprocess = None
if self.status != 'closing':
self.root.event_generate("<<finished>>")
def sendcommand(self, cmd):
if self.omxprocess:
try:
self.omxprocess.stdin.write(cmd)
except:
pass
def playpause(self):
if self.status in ['stopped','finished']:
self.songpointer = self.listpointer
self.playsong(self.songpointer)
elif self.status == 'paused':
self.sendcommand('p')
self.status = 'playing'
self.playcontent.set(self.pausestring)
elif self.status == 'playing':
self.sendcommand('p')
self.status = 'paused'
self.playcontent.set(self.playstring)
def stop(self,stat='finished'):
if self.omxprocess:
self.status = stat
self.sendcommand('q')
else:
self.playcontent.set(self.playstring)
self.status = 'stopped'
def rewind(self):
self.sendcommand('\x1b\x5b\x44')
def forward(self):
self.sendcommand('\x1b\x5b\x43')
def prevsong(self):
if self.listpointer != self.songpointer and self.status != 'stopped':
self.stop('stopped')
elif self.listpointer > 0:
self.listpointer = self.listpointer - 1
self.playlistwindow.selection_clear(0, len(self.playlist)-1)
self.playlistwindow.selection_set(self.listpointer)
if self.status == 'stopped':
self.playsong(self.listpointer)
else:
self.stop('stopped')
def nextsong(self):
if self.listpointer != self.songpointer and self.status != 'stopped':
self.stop('stopped')
elif self.listpointer < len(self.playlist)-1:
self.listpointer = self.listpointer + 1
self.playlistwindow.selection_clear(0, len(self.playlist)-1)
self.playlistwindow.selection_set(self.listpointer)
if self.status == 'stopped':
self.playsong(self.listpointer)
else:
self.stop('stopped')
elif self.autofinish:
self.on_close()
def vol_changed(self, volume):
vol = int(volume)
if self.status != 'stopped':
if vol > self.currentvolume:
diff = vol - self.currentvolume
self.currentvolume = vol
for k in range(0,diff):
self.sendcommand('+')
time.sleep(self.vchdelay)
elif vol < self.currentvolume:
diff = self.currentvolume - vol
self.currentvolume = vol
for k in range(0,diff):
self.sendcommand('-')
time.sleep(self.vchdelay)
else:
self.currentvolume = vol
def on_listbox_select(self,event):
sel = self.playlistwindow.curselection()
if sel:
self.listpointer = int(sel[0])
def on_listbox_double(self,event):
self.on_listbox_select(event)
if self.status != 'stopped':
if self.songpointer == self.listpointer:
self.stop()
self.playsong(self.listpointer)
else:
self.stop('stopped')
else:
self.playsong(self.listpointer)
def focus_out(self, event):
self.root.focus_set()
def createwidgets(self):
if len(self.playlist) > self.maxlines:
self.yScroll = tk.Scrollbar(self, orient=tk.VERTICAL)
self.yScroll['width'] = int(self.yScroll['width']) + (self.fontheight-10)
hg = self.maxlines
else:
hg = len(self.playlist)
self.playlistwindow = tk.Listbox(self, takefocus=0, selectmode = 'single', width = self.defaultwidth, height = hg, font=self.font,activestyle='none',bg='#000', fg = '#ddd', selectbackground='#60c', selectforeground='#ffffd0')
for url in self.playlist:
song = url.split('/')[-1]
self.playlistwindow.insert(tk.END, urllib.unquote(song).replace('%20',' '))
self.playlistwindow.selection_set(self.songpointer)
self.playlistwindow.bind("<<ListboxSelect>>", self.on_listbox_select)
self.playlistwindow.bind("<Double-Button-1>",self.on_listbox_double)
self.playlistwindow.bind("<FocusIn>",self.focus_out)
self.playlistwindow.grid(row=0,column=0,columnspan=7, sticky=tk.N+tk.S+tk.E+tk.W)
if len(self.playlist) > self.maxlines:
self.playlistwindow.configure(yscrollcommand=self.yScroll.set)
self.yScroll['command'] = self.playlistwindow.yview
self.yScroll.grid(row=0,column=7, sticky=tk.N+tk.S)
self.playbutton = tk.Button(self, command=self.playpause, font=self.font, textvariable = self.playcontent, width = 3, justify = tk.CENTER)
self.playbutton.grid(row=1,column=0)
self.stopbutton = tk.Button(self, command=self.stop, font=self.font, text = self.stopstring, width = 3, justify = tk.CENTER)
self.stopbutton.grid(row=1,column=1)
self.prevbutton = tk.Button(self, command=self.rewind, font=self.font, text = self.rewstring, width = 3, justify = tk.CENTER)
self.prevbutton.grid(row=1,column=2)
self.nextbutton = tk.Button(self, command=self.forward, font=self.font, text = self.fwdstring, width = 3, justify = tk.CENTER)
self.nextbutton.grid(row=1,column=3)
self.prevbutton = tk.Button(self, command=self.prevsong, font=self.font, text = self.prevstring, width = 3, justify = tk.CENTER)
self.prevbutton.grid(row=1,column=4)
self.nextbutton = tk.Button(self, command=self.nextsong, font=self.font, text = self.nextstring, width = 3, justify = tk.CENTER)
self.nextbutton.grid(row=1,column=5)
self.volume = tk.Scale(self, command=self.vol_changed, font=self.font, length=str((self.fontheight-2)*(self.defaultwidth-30))+'p', from_ = -20, to=4, variable=self.changedvolume ,orient=tk.HORIZONTAL, resolution=1, showvalue=0)
self.volume.grid(row=1,column=6)
# main script function
args = sys.argv
if len(args) > 2:
mode = args[1]
url = args[2]
mimetype = ''
# media section: play audio, video, m3u playlists and streams
if mode == 'av':
mtflag = True
if len(args) > 3:
mimetype = args[3]
if mimetypes and mimetype not in mimetypes:
mtflag = False
url_extension = url.lower().split('.')[-1]
if url_extension in ['m3u','m3u8','pls'] and mtflag:
audioonly, playlist = get_playlist(url,try_stream_as_audio)
if playlist:
if audioonly and useVLC:
os.execl("/usr/bin/vlc","vlc",url)
elif audioonly and useAudioplayer:
if kill_omxplayer:
dummy = os.system('killall omxplayer.bin > /dev/null 2>&1')
root = tk.Tk()
player = omxaudioplayer(master=root, playlist=playlist,volume=defaultaudiovolume,omxoptions=omxaudiooptions,
autofinish=autofinish,fontheight=fontheight,fontname=fontname,maxlines=maxlines,
autoplay=autoplay,width=lwidth)
player.mainloop()
else:
if audioonly:
options = omxaudiooptions
else:
options = omxoptions
if kill_omxplayer:
script = '#!/bin/bash\nkillall omxplayer.bin > /dev/null 2>&1\n'
else:
script = '#!/bin/bash\n'
for s in playlist:
if audioonly and omxplayer_in_terminal_for_audio:
script += 'echo "now playing: '+ urllib.unquote(s.split('/')[-1]) +'"\n'
script += 'omxplayer ' + get_opt(options) + ' "' + s + '" > /dev/null 2>&1\n'
f = file(dldir+os.sep+'playall.sh','wb')
f.write(script)
f.close()
os.chmod(dldir+os.sep+'playall.sh',511)
if omxplayer_in_terminal_for_audio and audioonly:
os.execlp(preferred_terminal,preferred_terminal,"-e",dldir+os.sep+'playall.sh')
elif omxplayer_in_terminal_for_video and not audioonly:
os.execl("/usr/bin/xterm","xterm","-fn","fixed","-fullscreen", "-maximized", "-bg", "black", "-fg", "black", "-e",dldir+os.sep+'playall.sh')
else:
os.execl(dldir+os.sep+'playall.sh','playall.sh')
elif mtflag:
url_valid = True
if url.startswith('file://'):
url = url.replace('file://','').replace('%20',' ')
url = urllib.unquote(url)
if not os.path.exists(url):
url_valid = False
elif not url.startswith('http'):
if not os.path.exists(url):
url_valid = False
if url_valid:
if url_extension in audioextensions or (try_stream_as_audio and not url_extension in videoextensions):
if useVLC:
os.execl("/usr/bin/vlc","vlc",url)
elif useAudioplayer:
if kill_omxplayer:
dummy = os.system('killall omxplayer.bin > /dev/null 2>&1')
root = tk.Tk()
player = omxaudioplayer(master=root, playlist=[url],volume=defaultaudiovolume,omxoptions=omxaudiooptions,
autofinish=autofinish,fontheight=fontheight,fontname=fontname,maxlines=maxlines,
autoplay=autoplay,width=lwidth)
player.mainloop()
else:
if kill_omxplayer:
dummy = os.system('killall omxplayer.bin > /dev/null 2>&1')
if omxplayer_in_terminal_for_audio:
pargs = [preferred_terminal,'-e','omxplayer'] + omxaudiooptions + [url]
os.execvp(preferred_terminal,pargs)
else:
pargs = ['omxplayer'] + omxaudiooptions + [url]
os.execvp('omxplayer',pargs)
else:
if kill_omxplayer:
dummy = os.system('killall omxplayer.bin > /dev/null 2>&1')
options = omxoptions
if live_tv:
for lt in live_tv:
if url.startswith(lt):
options = omx_livetv_options
break
if omxplayer_in_terminal_for_video:
pargs = ["xterm","-fn","fixed","-fullscreen", "-maximized", "-bg", "black", "-fg", "black", "-e",'omxplayer']+options+[url]+['>', '/dev/null', '2>&1']
os.execv("/usr/bin/xterm",pargs)
else:
pargs = ['omxplayer'] + omxoptions + [url]
os.execvp('omxplayer',pargs)
# end of media section
# pdf section (download - if needed - and open pdf file)
elif mode == 'pdf':
if not (pdfprogpath and pdfprog):
if os.path.exists('/usr/bin/xpdf'):
pdfprogpath = '/usr/bin/xpdf'
pdfprog = 'xpdf'
else:
pdfprogpath = '/usr/bin/mupdf'
pdfprog = 'mupdf'
go = False
# option to open pdf as files from http://localhost instead of downloading them first
if pdfpathreplacements and url.startswith('http://localhost'):
for k,v in pdfpathreplacements.iteritems():
if url.startswith(k):
nurl = url.replace(k,v)
if os.path.exists(urllib.unquote(nurl.replace('file://','').replace('%20',' ').split('#')[0])):
url = nurl
break
if url.startswith('file://'):
url = url.replace('file://','').replace('%20',' ')
url = urllib.unquote(url)
urll = url.split('#page=')
f = urll[0]
if os.path.exists(f):
if len(urll) > 1:
page = urll[1].split('&')[0]
os.execv(pdfprogpath,[pdfprog]+pdfoptions+[f,page])
else:
os.execv(pdfprogpath,[pdfprog]+pdfoptions+[f])
else:
if url.endswith('.pdf') or url.endswith('.PDF') or '.pdf#page' in url.lower():
urll = url.split('#page=')
fname = urllib.unquote(urll[0].split('/')[-1].replace('%20',' '))
f = dldir+os.sep+urllib.unquote(urll[0].split('/')[-1].replace('%20',' '))
if os.path.exists(f):
go = True
else:
try:
fn,h = urllib.urlretrieve(urll[0],f)
go = True
except:
pass
if go:
if len(urll) > 1:
page = urll[1].split('&')[0]
os.execv(pdfprogpath,[pdfprog]+pdfoptions+[f,page])
else:
os.execv(pdfprogpath,[pdfprog]+pdfoptions+[f])
# end of pdf section
# download section
elif mode == 'dl':
# download file using wget
if show_download_in_terminal:
os.execlp(preferred_terminal,preferred_terminal,'-e', "wget", "-P", dldir,"--no-clobber","--adjust-extension","--content-disposition",url,"--load-cookies",homedir + "/.web_cookie_jar","--no-check-certificate")
else:
os.execl("/usr/bin/wget", "wget", "-P", dldir,"--no-clobber","--adjust-extension","--content-disposition",url,"--load-cookies",homedir + "/.web_cookie_jar","--no-check-certificate")
#end of download section
# command execution section
elif mode == 'cmd':
cmd = ''
formdata = False
cpage = 'file:///homepage.html?cmd='
url = url.decode('utf-8')
if url.startswith('#'):
cmd = url[1:]
elif url.startswith(cpage):
cmd = url.replace(cpage,'')
if not cmd.startswith('formdata'):
cmd = urllib.unquote_plus(cmd).replace('%20',' ')
elif url.startswith('http://localhost') and ('/homepage.html?cmd=' in url):
cmd = url.split('/homepage.html?cmd=')[1]
if not cmd.startswith('formdata'):
cmd = urllib.unquote_plus(cmd).replace('%20',' ')
if cmd:
if cmd.startswith('formdata'):
formdata = True
cmd = cmd.split('formdata')[1].strip()
if '&' in cmd:
cmdargs = cmd.split('&')
for ind in range(0,len(cmdargs)):
if '=' in cmdargs[ind]:
cargl = cmdargs[ind].split('=')
if cargl[0].startswith('quoted') and cargl[1] != '':
cmdargs[ind] = " '" + urllib.unquote_plus(cargl[1]) + "'"
elif cargl[0].startswith('dquoted') and cargl[1] != '':
cmdargs[ind] = ' "' + urllib.unquote_plus(cargl[1]) + '"'
elif cargl[1] != '':
cmdargs[ind] = ' ' + urllib.unquote_plus(cargl[1])
else:
cmdargs[ind] = ''
else:
cmdargs[ind] = ' ' + urllib.unquote_plus(cmdargs[ind]).strip()
cmd = ''.join(cmdargs).strip()
else:
cmd = urllib.unquote_plus(cmd).strip()
cmdl = cmd.split(' ')
if len(cmdl)>1 and cmdl[0] == 'sudo':
realname = cmdl[1]
else:
realname = cmdl[0]
desktop_app = False
if check_desktop and '/' not in realname:
if os.path.exists('/usr/share/applications/'+realname+'.desktop'):
desktop_app = True
if desktop_app or (realname in direct_commands) or (formdata and not formdata_in_terminal):
cmdline = cmd.encode('utf-8')
else:
cmdline = preferred_terminal + ' -e '+cmd.encode('utf-8')
if run_as_script:
dmcount = 0
scpath = dldir+os.sep+'temp'+str(dmcount)+'.sh'
while os.path.exists(scpath):
dmcount += 1
scpath = dldir+os.sep+'temp'+str(dmcount)+'.sh'
f = file(scpath,'wb')
f.write('#!/bin/bash\n'+cmdline+'\nrm '+scpath+'\n')
f.close()
os.chmod(scpath,511)
os.execl(scpath,scpath)
else:
try:
dummy = os.system(cmdline)
except:
pass
# end of command execution section
# web video section (HTML5 and all websites supported by youtube-dl)
elif mode == 'ytdl' and os.path.exists('/usr/bin/youtube-dl'): #youtube and HTML5 videos
if html5_first:
tags = video_tag_extractor(url)
if tags: #extract embedded html5 video
play_html5(tags)
else:
yta = ['youtube-dl', '-g']+youtube_dl_options+[url]
yt = subprocess.Popen(yta,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
(res,err) = yt.communicate()
if res and not err:
play_ytdl(res)
else:
yta = ['youtube-dl', '-g']+youtube_dl_options+[url]
yt = subprocess.Popen(yta,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
(res,err) = yt.communicate()
if res and not err:
play_ytdl(res)
else:
tags = video_tag_extractor(url)
if tags: #extract embedded html5 video
play_html5(tags)
# end of web video section
| gpl-3.0 | 2,244,612,217,892,959,500 | 41.185766 | 235 | 0.53966 | false |
bwohlberg/sporco | sporco/dictlrn/cbpdndl.py | 1 | 18601 | # -*- coding: utf-8 -*-
# Copyright (C) 2015-2020 by Brendt Wohlberg <[email protected]>
# All rights reserved. BSD 3-clause License.
# This file is part of the SPORCO package. Details of the copyright
# and user license can be found in the 'LICENSE.txt' file distributed
# with the package.
"""Dictionary learning based on CBPDN sparse coding"""
from __future__ import print_function, absolute_import
import copy
import numpy as np
import sporco.cnvrep as cr
import sporco.admm.cbpdn as admm_cbpdn
import sporco.admm.ccmod as admm_ccmod
import sporco.pgm.cbpdn as pgm_cbpdn
import sporco.pgm.ccmod as pgm_ccmod
from sporco.dictlrn import dictlrn
import sporco.dictlrn.common as dc
from sporco.common import _fix_dynamic_class_lookup
from sporco.linalg import inner
from sporco.fft import (rfftn, irfftn, rfl2norm2)
__author__ = """Brendt Wohlberg <[email protected]>"""
def cbpdn_class_label_lookup(label):
"""Get a CBPDN class from a label string."""
clsmod = {'admm': admm_cbpdn.ConvBPDN,
'pgm': pgm_cbpdn.ConvBPDN}
if label in clsmod:
return clsmod[label]
else:
raise ValueError('Unknown ConvBPDN solver method %s' % label)
def ConvBPDNOptionsDefaults(method='admm'):
"""Get defaults dict for the ConvBPDN class specified by the ``method``
parameter.
"""
dflt = copy.deepcopy(cbpdn_class_label_lookup(method).Options.defaults)
if method == 'admm':
dflt.update({'MaxMainIter': 1, 'AutoRho':
{'Period': 10, 'AutoScaling': False,
'RsdlRatio': 10.0, 'Scaling': 2.0,
'RsdlTarget': 1.0}})
else:
dflt.update({'MaxMainIter': 1})
return dflt
def ConvBPDNOptions(opt=None, method='admm'):
"""A wrapper function that dynamically defines a class derived from
the Options class associated with one of the implementations of
the Convolutional BPDN problem, and returns an object
instantiated with the provided parameters. The wrapper is designed
to allow the appropriate object to be created by calling this
function using the same syntax as would be used if it were a
class. The specific implementation is selected by use of an
additional keyword argument 'method'. Valid values are as
specified in the documentation for :func:`ConvBPDN`.
"""
# Assign base class depending on method selection argument
base = cbpdn_class_label_lookup(method).Options
# Nested class with dynamically determined inheritance
class ConvBPDNOptions(base):
def __init__(self, opt):
super(ConvBPDNOptions, self).__init__(opt)
# Allow pickling of objects of type ConvBPDNOptions
_fix_dynamic_class_lookup(ConvBPDNOptions, method)
# Return object of the nested class type
return ConvBPDNOptions(opt)
def ConvBPDN(*args, **kwargs):
"""A wrapper function that dynamically defines a class derived from
one of the implementations of the Convolutional Constrained MOD
problems, and returns an object instantiated with the provided
parameters. The wrapper is designed to allow the appropriate
object to be created by calling this function using the same
syntax as would be used if it were a class. The specific
implementation is selected by use of an additional keyword
argument 'method'. Valid values are:
- ``'admm'`` :
Use the implementation defined in :class:`.admm.cbpdn.ConvBPDN`.
- ``'pgm'`` :
Use the implementation defined in :class:`.pgm.cbpdn.ConvBPDN`.
The default value is ``'admm'``.
"""
# Extract method selection argument or set default
method = kwargs.pop('method', 'admm')
# Assign base class depending on method selection argument
base = cbpdn_class_label_lookup(method)
# Nested class with dynamically determined inheritance
class ConvBPDN(base):
def __init__(self, *args, **kwargs):
super(ConvBPDN, self).__init__(*args, **kwargs)
# Allow pickling of objects of type ConvBPDN
_fix_dynamic_class_lookup(ConvBPDN, method)
# Return object of the nested class type
return ConvBPDN(*args, **kwargs)
def ccmod_class_label_lookup(label):
"""Get a CCMOD class from a label string."""
clsmod = {'ism': admm_ccmod.ConvCnstrMOD_IterSM,
'cg': admm_ccmod.ConvCnstrMOD_CG,
'cns': admm_ccmod.ConvCnstrMOD_Consensus,
'pgm': pgm_ccmod.ConvCnstrMOD}
if label in clsmod:
return clsmod[label]
else:
raise ValueError('Unknown ConvCnstrMOD solver method %s' % label)
def ConvCnstrMODOptionsDefaults(method='pgm'):
"""Get defaults dict for the ConvCnstrMOD class specified by the
``method`` parameter.
"""
dflt = copy.deepcopy(ccmod_class_label_lookup(method).Options.defaults)
if method == 'pgm':
dflt.update({'MaxMainIter': 1})
else:
dflt.update({'MaxMainIter': 1, 'AutoRho':
{'Period': 10, 'AutoScaling': False,
'RsdlRatio': 10.0, 'Scaling': 2.0,
'RsdlTarget': 1.0}})
return dflt
def ConvCnstrMODOptions(opt=None, method='pgm'):
"""A wrapper function that dynamically defines a class derived from
the Options class associated with one of the implementations of
the Convolutional Constrained MOD problem, and returns an object
instantiated with the provided parameters. The wrapper is designed
to allow the appropriate object to be created by calling this
function using the same syntax as would be used if it were a
class. The specific implementation is selected by use of an
additional keyword argument 'method'. Valid values are as
specified in the documentation for :func:`ConvCnstrMOD`.
"""
# Assign base class depending on method selection argument
base = ccmod_class_label_lookup(method).Options
# Nested class with dynamically determined inheritance
class ConvCnstrMODOptions(base):
def __init__(self, opt):
super(ConvCnstrMODOptions, self).__init__(opt)
# Allow pickling of objects of type ConvCnstrMODOptions
_fix_dynamic_class_lookup(ConvCnstrMODOptions, method)
# Return object of the nested class type
return ConvCnstrMODOptions(opt)
def ConvCnstrMOD(*args, **kwargs):
"""A wrapper function that dynamically defines a class derived from
one of the implementations of the Convolutional Constrained MOD
problems, and returns an object instantiated with the provided
parameters. The wrapper is designed to allow the appropriate
object to be created by calling this function using the same
syntax as would be used if it were a class. The specific
implementation is selected by use of an additional keyword
argument 'method'. Valid values are:
- ``'ism'`` :
Use the implementation defined in :class:`.ConvCnstrMOD_IterSM`. This
method works well for a small number of training images, but is very
slow for larger training sets.
- ``'cg'`` :
Use the implementation defined in :class:`.ConvCnstrMOD_CG`. This
method is slower than ``'ism'`` for small training sets, but has better
run time scaling as the training set grows.
- ``'cns'`` :
Use the implementation defined in :class:`.ConvCnstrMOD_Consensus`.
This method is a good choice for large training sets.
- ``'pgm'`` :
Use the implementation defined in :class:`.pgm.ccmod.ConvCnstrMOD`.
This method is the best choice for large training sets.
The default value is ``'pgm'``.
"""
# Extract method selection argument or set default
method = kwargs.pop('method', 'pgm')
# Assign base class depending on method selection argument
base = ccmod_class_label_lookup(method)
# Nested class with dynamically determined inheritance
class ConvCnstrMOD(base):
def __init__(self, *args, **kwargs):
super(ConvCnstrMOD, self).__init__(*args, **kwargs)
# Allow pickling of objects of type ConvCnstrMOD
_fix_dynamic_class_lookup(ConvCnstrMOD, method)
# Return object of the nested class type
return ConvCnstrMOD(*args, **kwargs)
class ConvBPDNDictLearn(dictlrn.DictLearn):
r"""
Dictionary learning by alternating between sparse coding and dictionary
update stages.
|
.. inheritance-diagram:: ConvBPDNDictLearn
:parts: 2
|
The sparse coding is performed using
:class:`.admm.cbpdn.ConvBPDN` (see :cite:`wohlberg-2014-efficient`) or
:class:`.pgm.cbpdn.ConvBPDN` (see :cite:`chalasani-2013-fast` and
:cite:`wohlberg-2016-efficient`), and the dictionary update is computed
using :class:`.pgm.ccmod.ConvCnstrMOD` (see
:cite:`garcia-2018-convolutional1`) or one of the solver classes in
:mod:`.admm.ccmod` (see :cite:`wohlberg-2016-efficient` and
:cite:`sorel-2016-fast`). The coupling between sparse coding and
dictionary update stages is as in :cite:`garcia-2017-subproblem`.
Solve the optimisation problem
.. math::
\mathrm{argmin}_{\mathbf{d}, \mathbf{x}} \;
(1/2) \sum_k \left \| \sum_m \mathbf{d}_m * \mathbf{x}_{k,m} -
\mathbf{s}_k \right \|_2^2 + \lambda \sum_k \sum_m
\| \mathbf{x}_{k,m} \|_1 \quad \text{such that}
\quad \mathbf{d}_m \in C \;\; \forall m \;,
where :math:`C` is the feasible set consisting of filters with
unit norm and constrained support, via interleaved alternation
between the ADMM steps of the :class:`.admm.cbpdn.ConvBPDN` and
:func:`.ConvCnstrMOD` problems. Multi-channel variants
:cite:`wohlberg-2016-convolutional` are also supported.
After termination of the :meth:`solve` method, attribute :attr:`itstat`
is a list of tuples representing statistics of each iteration. The
fields of the named tuple ``IterationStats`` are:
``Iter`` : Iteration number
``ObjFun`` : Objective function value
``DFid`` : Value of data fidelity term :math:`(1/2) \sum_k \|
\sum_m \mathbf{d}_m * \mathbf{x}_{k,m} - \mathbf{s}_k \|_2^2`
``RegL1`` : Value of regularisation term :math:`\sum_k \sum_m
\| \mathbf{x}_{k,m} \|_1`
``Cnstr`` : Constraint violation measure
*If the ADMM solver is selected for sparse coding:*
``XPrRsdl`` : Norm of X primal residual
``XDlRsdl`` : Norm of X dual residual
``XRho`` : X penalty parameter
*If the PGM solver is selected for sparse coding:*
``X_F_Btrack`` : Value of objective function for CSC problem
``X_Q_Btrack`` : Value of quadratic approximation for CSC problem
``X_ItBt`` : Number of iterations in backtracking for CSC problem
``X_L`` : Inverse of gradient step parameter for CSC problem
*If an ADMM solver is selected for the dictionary update:*
``DPrRsdl`` : Norm of D primal residual
``DDlRsdl`` : Norm of D dual residual
``DRho`` : D penalty parameter
*If the PGM solver is selected for the dictionary update:*
``D_F_Btrack`` : Value of objective function for CDU problem
``D_Q_Btrack`` : Value of wuadratic approximation for CDU problem
``D_ItBt`` : Number of iterations in backtracking for CDU problem
``D_L`` : Inverse of gradient step parameter for CDU problem
``Time`` : Cumulative run time
"""
class Options(dictlrn.DictLearn.Options):
"""CBPDN dictionary learning algorithm options.
Options include all of those defined in
:class:`.dictlrn.DictLearn.Options`, together with additional
options:
``AccurateDFid`` : Flag determining whether data fidelity term is
estimated from the value computed in the X update (``False``) or
is computed after every outer iteration over an X update and a D
update (``True``), which is slower but more accurate.
``DictSize`` : Dictionary size vector.
``CBPDN`` : An options class appropriate for the selected
sparse coding solver class
``CCMOD`` : An options class appropriate for the selected
dictionary update solver class
"""
defaults = copy.deepcopy(dictlrn.DictLearn.Options.defaults)
defaults.update({'DictSize': None, 'AccurateDFid': False})
def __init__(self, opt=None, xmethod=None, dmethod=None):
"""
Valid values for parameters ``xmethod`` and ``dmethod`` are
documented in functions :func:`.ConvBPDN` and
:func:`.ConvCnstrMOD` respectively.
"""
if xmethod is None:
xmethod = 'admm'
if dmethod is None:
dmethod = 'pgm'
self.xmethod = xmethod
self.dmethod = dmethod
self.defaults.update(
{'CBPDN': ConvBPDNOptionsDefaults(xmethod),
'CCMOD': ConvCnstrMODOptionsDefaults(dmethod)})
# Initialisation of CBPDN and CCMOD keys here is required to
# ensure that the corresponding options have types appropriate
# for classes in the cbpdn and ccmod modules, and are not just
# standard entries in the parent option tree
dictlrn.DictLearn.Options.__init__(self, {
'CBPDN': ConvBPDNOptions(self.defaults['CBPDN'],
method=xmethod),
'CCMOD': ConvCnstrMODOptions(self.defaults['CCMOD'],
method=dmethod)})
if opt is None:
opt = {}
self.update(opt)
def __init__(self, D0, S, lmbda=None, opt=None, xmethod=None,
dmethod=None, dimK=1, dimN=2):
"""
|
**Call graph**
.. image:: ../_static/jonga/cbpdndl_init.svg
:width: 20%
:target: ../_static/jonga/cbpdndl_init.svg
|
Parameters
----------
D0 : array_like
Initial dictionary array
S : array_like
Signal array
lmbda : float
Regularisation parameter
opt : :class:`ConvBPDNDictLearn.Options` object
Algorithm options
xmethod : string, optional (default 'admm')
String selecting sparse coding solver. Valid values are
documented in function :func:`.ConvBPDN`.
dmethod : string, optional (default 'pgm')
String selecting dictionary update solver. Valid values are
documented in function :func:`.ConvCnstrMOD`.
dimK : int, optional (default 1)
Number of signal dimensions. If there is only a single input
signal (e.g. if `S` is a 2D array representing a single image)
`dimK` must be set to 0.
dimN : int, optional (default 2)
Number of spatial/temporal dimensions
"""
if opt is None:
opt = ConvBPDNDictLearn.Options(xmethod=xmethod, dmethod=dmethod)
if xmethod is None:
xmethod = opt.xmethod
if dmethod is None:
dmethod = opt.dmethod
if opt.xmethod != xmethod or opt.dmethod != dmethod:
raise ValueError('Parameters xmethod and dmethod must have the '
'same values used to initialise the Options '
'object')
self.opt = opt
self.xmethod = xmethod
self.dmethod = dmethod
# Get dictionary size
if self.opt['DictSize'] is None:
dsz = D0.shape
else:
dsz = self.opt['DictSize']
# Construct object representing problem dimensions
cri = cr.CDU_ConvRepIndexing(dsz, S, dimK, dimN)
# Normalise dictionary
D0 = cr.Pcn(D0, dsz, cri.Nv, dimN, cri.dimCd, crp=True,
zm=opt['CCMOD', 'ZeroMean'])
# Modify D update options to include initial value for Y
optname = 'X0' if dmethod == 'pgm' else 'Y0'
opt['CCMOD'].update({optname: cr.zpad(
cr.stdformD(D0, cri.Cd, cri.M, dimN), cri.Nv)})
# Create X update object
xstep = ConvBPDN(D0, S, lmbda, opt['CBPDN'], method=xmethod,
dimK=dimK, dimN=dimN)
# Create D update object
dstep = ConvCnstrMOD(None, S, dsz, opt['CCMOD'], method=dmethod,
dimK=dimK, dimN=dimN)
# Configure iteration statistics reporting
isc = dictlrn.IterStatsConfig(
isfld=dc.isfld(xmethod, dmethod, opt),
isxmap=dc.isxmap(xmethod, opt), isdmap=dc.isdmap(dmethod),
evlmap=dc.evlmap(opt['AccurateDFid']),
hdrtxt=dc.hdrtxt(xmethod, dmethod, opt),
hdrmap=dc.hdrmap(xmethod, dmethod, opt),
fmtmap={'It_X': '%4d', 'It_D': '%4d'})
# Call parent constructor
super(ConvBPDNDictLearn, self).__init__(xstep, dstep, opt, isc)
def getdict(self, crop=True):
"""Get final dictionary. If ``crop`` is ``True``, apply
:func:`.cnvrep.bcrop` to returned array.
"""
return self.dstep.getdict(crop=crop)
def reconstruct(self, D=None, X=None):
"""Reconstruct representation."""
if D is None:
D = self.getdict(crop=False)
if X is None:
X = self.getcoef()
Df = rfftn(D, self.xstep.cri.Nv, self.xstep.cri.axisN)
Xf = rfftn(X, self.xstep.cri.Nv, self.xstep.cri.axisN)
DXf = inner(Df, Xf, axis=self.xstep.cri.axisM)
return irfftn(DXf, self.xstep.cri.Nv, self.xstep.cri.axisN)
def evaluate(self):
"""Evaluate functional value of previous iteration."""
if self.opt['AccurateDFid']:
if self.dmethod == 'pgm':
D = self.dstep.getdict(crop=False)
else:
D = self.dstep.var_y()
if self.xmethod == 'pgm':
X = self.xstep.getcoef()
else:
X = self.xstep.var_y()
Df = rfftn(D, self.xstep.cri.Nv, self.xstep.cri.axisN)
Xf = rfftn(X, self.xstep.cri.Nv, self.xstep.cri.axisN)
Sf = self.xstep.Sf
Ef = inner(Df, Xf, axis=self.xstep.cri.axisM) - Sf
dfd = rfl2norm2(Ef, self.xstep.S.shape,
axis=self.xstep.cri.axisN) / 2.0
rl1 = np.sum(np.abs(X))
return dict(DFid=dfd, RegL1=rl1,
ObjFun=dfd + self.xstep.lmbda * rl1)
else:
return None
| bsd-3-clause | 8,305,897,426,569,706,000 | 34.498092 | 77 | 0.626257 | false |
kism/RViProgramLauncher | viinputdaemon.py | 1 | 18123 | # Input Daemon for the Visually Impared
# For use with a device that outputs serial
import uinput #interface between python and the uinput kernel module
import time #for time.sleep()
import serial #the keyboard this program interfaces with uses serial
import os
import sys
# Easier debugging :^)
class termcolour:
PINK = '\033[95m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
WHITE = '\033[0m'
# Figure out what to do on the keypresses
def sendLetter(letter):
global caps
global numb
print termcolour.GREEN + 'Sent ASCII Char:' + termcolour.WHITE
if numb == True:
if letter == 'KEY_A':
device.emit_click(uinput.KEY_1)
if letter == 'KEY_B':
device.emit_click(uinput.KEY_2)
if letter == 'KEY_C':
device.emit_click(uinput.KEY_3)
if letter == 'KEY_D':
device.emit_click(uinput.KEY_4)
if letter == 'KEY_E':
device.emit_click(uinput.KEY_5)
if letter == 'KEY_F':
device.emit_click(uinput.KEY_6)
if letter == 'KEY_G':
device.emit_click(uinput.KEY_7)
if letter == 'KEY_H':
device.emit_click(uinput.KEY_8)
if letter == 'KEY_I':
device.emit_click(uinput.KEY_9)
if letter == 'KEY_J':
device.emit_click(uinput.KEY_0)
else:
if caps == 0:
device.emit_click(getattr(uinput,letter))
if caps == 1:
caps = 0
device.emit_combo([
uinput.KEY_LEFTSHIFT,
getattr(uinput,letter),
])
if caps == 2:
device.emit_combo([
uinput.KEY_LEFTSHIFT,
getattr(uinput,letter),
])
def f1(inProgram):
print termcolour.PINK + 'F1 Pressed' + termcolour.WHITE
print termcolour.GREEN + 'Program:' + termcolour.WHITE, inProgram
if inProgram == 'viui': # Open menu item 1
device.emit_click(uinput.KEY_1)
time.sleep(0.01)
device.emit_click(uinput.KEY_ENTER)
time.sleep(0.01)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'F1'
if inProgram == 'nano': # Open Help
device.emit_combo([
uinput.KEY_LEFTCTRL,
uinput.KEY_G,
])
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Help'
if inProgram == 'newsbeuter': # Open Help
device.emit_combo([
uinput.KEY_LEFTSHIFT,
uinput.KEY_SLASH,
])
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Help'
if inProgram == 'alpine': # Open Help
device.emit_combo([
uinput.KEY_LEFTSHIFT,
uinput.KEY_SLASH,
])
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Help'
if inProgram == 'links': # Open Help
device.emit_click(uinput.KEY_F9)
time.sleep(0.1)
device.emit_click(uinput.KEY_H)
time.sleep(0.1)
device.emit_click(uinput.KEY_M)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Help'
if inProgram == 'irssi': # Open Help
device.emit_click(uinput.KEY_SLASH)
time.sleep(0.01)
device.emit_click(uinput.KEY_H)
time.sleep(0.01)
device.emit_click(uinput.KEY_E)
time.sleep(0.01)
device.emit_click(uinput.KEY_L)
time.sleep(0.01)
device.emit_click(uinput.KEY_P)
time.sleep(0.01)
device.emit_click(uinput.KEY_ENTER)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Help'
if inProgram == 'zsh': # Go to home directory
device.emit_combo([
uinput.KEY_LEFTCTRL,
uinput.KEY_C,
])
device.emit_click(uinput.KEY_C)
time.sleep(0.01)
device.emit_click(uinput.KEY_D)
time.sleep(0.01)
device.emit_click(uinput.KEY_ENTER)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Home'
if inProgram == 'man': # Help
device.emit_click(uinput.KEY_H)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Help'
def f2(inProgram):
print termcolour.PINK + 'F2 Pressed' + termcolour.WHITE
print termcolour.GREEN + 'Program:' + termcolour.WHITE, inProgram
if inProgram == 'viui': # Open menu item 2
device.emit_click(uinput.KEY_2)
time.sleep(0.01)
device.emit_click(uinput.KEY_ENTER)
time.sleep(0.01)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'F2'
if inProgram == 'nano': # Open File
device.emit_combo([
uinput.KEY_LEFTCTRL,
uinput.KEY_R,
])
time.sleep(0.1)
device.emit_combo([
uinput.KEY_LEFTCTRL,
uinput.KEY_T,
])
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Open File'
if inProgram == 'newsbeuter': # Open Entry
device.emit_click(uinput.KEY_ENTER)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Open Entry'
if inProgram == 'alpine': # Open Index
device.emit_click(uinput.KEY_I)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Index'
if inProgram == 'links': # Go to
device.emit_click(uinput.KEY_G)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Go to'
if inProgram == 'irssi': # Connect
device.emit_click(uinput.KEY_SLASH)
time.sleep(0.01)
device.emit_click(uinput.KEY_C)
time.sleep(0.01)
device.emit_click(uinput.KEY_O)
time.sleep(0.01)
device.emit_click(uinput.KEY_N)
time.sleep(0.01)
device.emit_click(uinput.KEY_N)
time.sleep(0.01)
device.emit_click(uinput.KEY_E)
time.sleep(0.01)
device.emit_click(uinput.KEY_C)
time.sleep(0.01)
device.emit_click(uinput.KEY_T)
time.sleep(0.01)
device.emit_click(uinput.KEY_SPACE)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Connect'
if inProgram == 'zsh': # Use the mplayer alias
device.emit_click(uinput.KEY_P)
time.sleep(0.01)
device.emit_click(uinput.KEY_L)
time.sleep(0.01)
device.emit_click(uinput.KEY_A)
time.sleep(0.01)
device.emit_click(uinput.KEY_Y)
time.sleep(0.01)
device.emit_click(uinput.KEY_SPACE)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Play'
def f3(inProgram):
print termcolour.PINK + 'F3 Pressed' + termcolour.WHITE
print termcolour.GREEN + 'Program:' + termcolour.WHITE, inProgram
if inProgram == 'viui': # Open menu item 3
device.emit_click(uinput.KEY_3)
time.sleep(0.01)
device.emit_click(uinput.KEY_ENTER)
time.sleep(0.01)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'F3'
if inProgram == 'nano': # Save file
device.emit_combo([
uinput.KEY_LEFTCTRL,
uinput.KEY_O,
])
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Save File'
if inProgram == 'newsbeuter': # Save entry to file
device.emit_click(uinput.KEY_S)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Save Story'
if inProgram == 'alpine': # Compose
device.emit_click(uinput.KEY_C)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Compose'
if inProgram == 'links': # Open menu
device.emit_click(uinput.KEY_F9)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Menu'
if inProgram == 'irssi': # Join
device.emit_click(uinput.KEY_SLASH)
time.sleep(0.01)
device.emit_click(uinput.KEY_J)
time.sleep(0.01)
device.emit_click(uinput.KEY_O)
time.sleep(0.01)
device.emit_click(uinput.KEY_I)
time.sleep(0.01)
device.emit_click(uinput.KEY_N)
time.sleep(0.01)
device.emit_click(uinput.KEY_SPACE)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Join'
def f4(inProgram):
print termcolour.PINK + 'F4 Pressed' + termcolour.WHITE
print termcolour.GREEN + 'Program:' + termcolour.WHITE, inProgram
if inProgram == 'viui': # Open menu item 4
device.emit_click(uinput.KEY_4)
time.sleep(0.01)
device.emit_click(uinput.KEY_ENTER)
time.sleep(0.01)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'F4'
if inProgram == 'nano': # Cancel
device.emit_combo([
uinput.KEY_LEFTCTRL,
uinput.KEY_C,
])
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Cancel'
if inProgram == 'alpine': # Back
device.emit_click(uinput.KEY_M)
time.sleep(0.1)
device.emit_click(uinput.KEY_COMMA)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Back'
if inProgram == 'links': # Cancel
device.emit_click(uinput.KEY_ESC)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Cancel'
if inProgram == 'irssi': # Part
device.emit_click(uinput.KEY_SLASH)
time.sleep(0.01)
device.emit_click(uinput.KEY_P)
time.sleep(0.01)
device.emit_click(uinput.KEY_A)
time.sleep(0.01)
device.emit_click(uinput.KEY_R)
time.sleep(0.01)
device.emit_click(uinput.KEY_T)
time.sleep(0.01)
device.emit_click(uinput.KEY_ENTER)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Part'
if inProgram == 'zsh': # Cancel
device.emit_combo([
uinput.KEY_LEFTCTRL,
uinput.KEY_C,
])
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Cancel'
def f5(inProgram):
print termcolour.PINK + 'F5 Pressed' + termcolour.WHITE
print termcolour.GREEN + 'Program:' + termcolour.WHITE, inProgram
if inProgram == 'viui': # Open menu item 5
device.emit_click(uinput.KEY_5)
time.sleep(0.01)
device.emit_click(uinput.KEY_ENTER)
time.sleep(0.01)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'F5'
if inProgram == 'nano': # Cut
device.emit_combo([
uinput.KEY_LEFTCTRL,
uinput.KEY_K,
])
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Cut'
if inProgram == 'newsbeuter': # Reload
device.emit_click(uinput.KEY_R)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Reload'
if inProgram == 'alpine': # Journal
device.emit_click(uinput.KEY_J)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Journal'
if inProgram == 'irssi': # Query
device.emit_click(uinput.KEY_SLASH)
time.sleep(0.01)
device.emit_click(uinput.KEY_Q)
time.sleep(0.01)
device.emit_click(uinput.KEY_U)
time.sleep(0.01)
device.emit_click(uinput.KEY_E)
time.sleep(0.01)
device.emit_click(uinput.KEY_R)
time.sleep(0.01)
device.emit_click(uinput.KEY_Y)
time.sleep(0.01)
device.emit_click(uinput.SPACE)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Query'
def f6(inProgram):
print termcolour.PINK + 'F6 Pressed' + termcolour.WHITE
print termcolour.GREEN + 'Program:' + termcolour.WHITE, inProgram
if inProgram == 'viui': # Open menu item 6
device.emit_click(uinput.KEY_6)
time.sleep(0.01)
device.emit_click(uinput.KEY_ENTER)
time.sleep(0.01)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'F6'
if inProgram == 'nano': # Uncut
device.emit_combo([
uinput.KEY_LEFTCTRL,
uinput.KEY_U,
])
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Uncut'
if inProgram == 'newsbeuter': # Open next unread
uinput.KEY_N,
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Next Unread'
if inProgram == 'alpine': # Address
device.emit_click(uinput.KEY_A)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Address'
if inProgram == 'irssi': # Previous window
device.emit_combo([
uinput.KEY_CTRL,
uinput.KEY_P,
])
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Previous window'
def f7(inProgram):
print termcolour.PINK + 'F7 Pressed' + termcolour.WHITE
print termcolour.GREEN + 'Program:' + termcolour.WHITE, inProgram
if inProgram == 'viui': # Open menu item 7
device.emit_click(uinput.KEY_7)
time.sleep(0.01)
device.emit_click(uinput.KEY_ENTER)
time.sleep(0.01)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'F7'
if inProgram == 'nano': # Find
device.emit_combo([
uinput.KEY_LEFTCTRL,
uinput.KEY_W,
])
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Find'
if inProgram == 'newsbeuter': # Open in browser
device.emit_click(uinput.KEY_O)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Open in Browser'
if inProgram == 'alpine': # Setup email
device.emit_click(uinput.KEY_S)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Setup'
if inProgram == 'links': # Find on page
device.emit_click(uinput.KEY_SLASH)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Find'
if inProgram == 'irssi': # Next window
device.emit_combo([
uinput.KEY_CTRL,
uinput.KEY_N,
])
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Next Window'
def f8(inProgram):
print termcolour.PINK + 'F8 Pressed' + termcolour.WHITE
print termcolour.GREEN + 'Program:' + termcolour.WHITE, inProgram
if inProgram == 'viui': # Open menu item 8
device.emit_click(uinput.KEY_8)
time.sleep(0.01)
device.emit_click(uinput.KEY_ENTER)
time.sleep(0.01)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'F8'
if inProgram == 'nano': # Exit menu or program
device.emit_combo([
uinput.KEY_LEFTCTRL,
uinput.KEY_X,
])
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Quit'
if inProgram == 'newsbeuter': # Quit
device.emit_click(uinput.KEY_Q)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Quit'
if inProgram == 'alpine': # Quit
device.emit_click(uinput.KEY_Q)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Quit'
if inProgram == 'links': # Quit
device.emit_click(uinput.KEY_Q)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Quit'
if inProgram == 'irssi': # Quit
device.emit_click(uinput.KEY_SLASH)
time.sleep(0.01)
device.emit_click(uinput.KEY_Q)
time.sleep(0.01)
device.emit_click(uinput.KEY_U)
time.sleep(0.01)
device.emit_click(uinput.KEY_I)
time.sleep(0.01)
device.emit_click(uinput.KEY_T)
time.sleep(0.01)
device.emit_click(uinput.KEY_ENTER)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Quit'
if inProgram == 'zsh': # Quit
device.emit_combo([
uinput.KEY_LEFTCTRL,
uinput.KEY_D,
])
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Quit'
if inProgram == 'man': # Quit
device.emit_click(uinput.KEY_Q)
print termcolour.GREEN + 'Command:' + termcolour.WHITE, 'Quit'
# Main Program
print termcolour.PINK + '~ Daemon initialising ~' + termcolour.WHITE
# Check if program was run with an arguement
if len(sys.argv) > 1:
print termcolour.GREEN + 'Argument:' + termcolour.WHITE, str(sys.argv[1])
program = str(sys.argv[1])
else:
print termcolour.YELLOW + 'No args, what are you even doing?' + termcolour.WHITE
program = ''
# Python-uinput is a quality Interface, To find key codes check /usr/include/linux/input.h
device = uinput.Device([
uinput.KEY_A,
uinput.KEY_B,
uinput.KEY_C,
uinput.KEY_D,
uinput.KEY_E,
uinput.KEY_F,
uinput.KEY_G,
uinput.KEY_H,
uinput.KEY_I,
uinput.KEY_J,
uinput.KEY_K,
uinput.KEY_L,
uinput.KEY_M,
uinput.KEY_N,
uinput.KEY_O,
uinput.KEY_P,
uinput.KEY_Q,
uinput.KEY_R,
uinput.KEY_S,
uinput.KEY_T,
uinput.KEY_U,
uinput.KEY_V,
uinput.KEY_W,
uinput.KEY_X,
uinput.KEY_Y,
uinput.KEY_Z,
uinput.KEY_1,
uinput.KEY_2,
uinput.KEY_3,
uinput.KEY_4,
uinput.KEY_5,
uinput.KEY_6,
uinput.KEY_7,
uinput.KEY_8,
uinput.KEY_9,
uinput.KEY_0,
uinput.KEY_TAB,
uinput.KEY_ENTER,
uinput.KEY_SPACE,
uinput.KEY_DOT,
uinput.KEY_COMMA,
uinput.KEY_SLASH,
uinput.KEY_BACKSLASH,
uinput.KEY_LEFTCTRL,
uinput.KEY_LEFTALT,
uinput.KEY_LEFTSHIFT,
uinput.KEY_BACKSPACE,
uinput.KEY_PAGEDOWN,
uinput.KEY_PAGEUP,
uinput.KEY_UP,
uinput.KEY_LEFT,
uinput.KEY_RIGHT,
uinput.KEY_DOWN,
uinput.KEY_ESC,
uinput.KEY_F1,
uinput.KEY_F2,
uinput.KEY_F3,
uinput.KEY_F4,
uinput.KEY_F5,
uinput.KEY_F6,
uinput.KEY_F7,
uinput.KEY_F8,
uinput.KEY_F9,
uinput.KEY_F10,
uinput.KEY_F11,
uinput.KEY_F12,
uinput.KEY_1,
uinput.KEY_2,
uinput.KEY_3,
uinput.KEY_4,
uinput.KEY_5,
uinput.KEY_6,
uinput.KEY_7,
uinput.KEY_8,
uinput.KEY_9,
uinput.KEY_0,
])
# Open serial decice
ser = serial.Serial('/dev/ttyUSB0', 115200, timeout = 1)
print termcolour.GREEN + 'Serial device opened:' + termcolour.WHITE, ser.name
# Mad Hacks go here
caps = 0
numb = False
if program == 'newsbeuter':
time.sleep(2.0)
device.emit_click(uinput.KEY_R)
time.sleep(3.0)
device.emit_click(uinput.KEY_ENTER)
# Polling for input
while 1:
sbuf = ser.read()
print 'Buffer Queue =', ser.inWaiting()
print 'Read =', sbuf
# All values are in hex, not actuall ascii, lol python
# Braille Modifier Characters
if sbuf == '\x01': # Caps
if caps > 1:
caps = 2
else:
caps = caps + 1
print termcolour.GREEN + 'Caps:' + termcolour.WHITE, caps
if sbuf == '\x0F': # Number
if numb == True:
numb = False
else:
numb = True
print termcolour.GREEN + 'Numb:' + termcolour.WHITE, numb
# Regular Keys
if sbuf == '\x20':
sendLetter('KEY_A')
if sbuf == '\x30':
sendLetter('KEY_B')
if sbuf == '\x24':
sendLetter('KEY_C')
if sbuf == '\x26':
sendLetter('KEY_D')
if sbuf == '\x22':
sendLetter('KEY_E')
if sbuf == '\x34':
sendLetter('KEY_F')
if sbuf == '\x36':
sendLetter('KEY_G')
if sbuf == '\x32':
sendLetter('KEY_H')
if sbuf == '\x14':
sendLetter('KEY_I')
if sbuf == '\x16':
sendLetter('KEY_J')
if sbuf == '\x28':
sendLetter('KEY_K')
if sbuf == '\x38':
sendLetter('KEY_L')
if sbuf == '\x2C':
sendLetter('KEY_M')
if sbuf == '\x2E':
sendLetter('KEY_N')
if sbuf == '\x2A':
sendLetter('KEY_O')
if sbuf == '\x3C':
sendLetter('KEY_P')
if sbuf == '\x3E':
sendLetter('KEY_Q')
if sbuf == '\x3A':
sendLetter('KEY_R')
if sbuf == '\x1C':
sendLetter('KEY_S')
if sbuf == '\x1E':
sendLetter('KEY_T')
if sbuf == '\x29':
sendLetter('KEY_U')
if sbuf == '\x39':
sendLetter('KEY_V')
if sbuf == '\x17':
sendLetter('KEY_W')
if sbuf == '\x2D':
sendLetter('KEY_X')
if sbuf == '\x2F':
sendLetter('KEY_Y')
if sbuf == '\x2B':
sendLetter('KEY_Z')
if sbuf == '\x10':
device.emit_click(uinput.KEY_COMMA)
if sbuf == '\x13':
device.emit_click(uinput.KEY_DOT)
if sbuf == '\x0C':
device.emit_click(uinput.KEY_SLASH)
if sbuf == '\x60':
device.emit_click(uinput.KEY_SPACE)
caps = 0
numb = 0
# IBM Compatable PC Keys
if sbuf == '\x40':
device.emit_click(uinput.KEY_ESC)
if sbuf == '\x41':
device.emit_click(uinput.KEY_UP)
if sbuf == '\x42':
device.emit_click(uinput.KEY_LEFT)
if sbuf == '\x43':
device.emit_click(uinput.KEY_RIGHT)
if sbuf == '\x44':
device.emit_click(uinput.KEY_DOWN)
if sbuf == '\x45':
device.emit_click(uinput.KEY_ENTER)
if sbuf == '\x46':
device.emit_click(uinput.KEY_BACKSPACE)
if sbuf == '\x47':
device.emit_click(uinput.KEY_PAGEUP)
if sbuf == '\x48':
device.emit_click(uinput.KEY_PAGEDOWN)
# Macro Keys
if sbuf == '\x81': #129
f1(program)
if sbuf == '\x82': #130
f2(program)
if sbuf == '\x83': #131
f3(program)
if sbuf == '\x84': #132
f4(program)
if sbuf == '\x85': #133
f5(program)
if sbuf == '\x86': #134
f6(program)
if sbuf == '\x87': #135
f7(program)
if sbuf == '\x88': #136
f8(program) | mit | -6,499,945,008,790,168,000 | 27.81399 | 90 | 0.674668 | false |
JNeiger/robocup-software | soccer/gameplay/plays/no_opponents/offensive_pivot_kick.py | 1 | 1448 | import play
import behavior
import robocup
import skills.line_kick
import tactics.defense
import main
import constants
import enum
import role_assignment
class OffensivePivotKick(play.Play):
def __init__(self):
super().__init__(continuous=False)
self.add_transition(behavior.Behavior.State.start,
behavior.Behavior.State.running, lambda: True,
'immediately')
self.add_transition(
behavior.Behavior.State.running, behavior.Behavior.State.completed,
lambda: self.has_subbehavior_with_name('kicker') and self.subbehavior_with_name('kicker').is_done_running(),
"kicker finished")
def on_enter_running(self):
kicker = skills.pivot_kick.PivotKick()
kicker.target = constants.Field.TheirGoalSegment
kicker.aim_params = {'error_threshold': .01,
'desperate_timeout': 10,
'max_steady_ang_vel': 4}
self.add_subbehavior(kicker, 'kicker', required=True, priority=100)
def on_exit_running(self):
self.remove_subbehavior('kicker')
@classmethod
def score(cls):
gs = main.game_state()
#Currently has lower priority than basic_122. Maybe add a check to see if we have all our robots?
return 15 if gs.is_playing() else float("inf")
@classmethod
def handles_goalie(self):
return False
| apache-2.0 | -2,246,814,608,390,388,200 | 31.177778 | 120 | 0.627072 | false |
FederatedAI/FATE | examples/benchmark_quality/hetero_nn/fate-hetero_nn.py | 1 | 6666 | #
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
from tensorflow.keras import initializers
from tensorflow.keras import optimizers
from tensorflow.keras.layers import Dense
from pipeline.backend.pipeline import PipeLine
from pipeline.component import DataIO
from pipeline.component import Evaluation
from pipeline.component import HeteroNN
from pipeline.component import Intersection
from pipeline.component import Reader
from pipeline.interface import Data, Model
from pipeline.utils.tools import load_job_config, JobConfig
from pipeline.runtime.entity import JobParameters
from federatedml.evaluation.metrics import classification_metric
from fate_test.utils import extract_data, parse_summary_result
def main(config="../../config.yaml", param="./hetero_nn_breast_config.yaml", namespace=""):
# obtain config
if isinstance(config, str):
config = load_job_config(config)
if isinstance(param, str):
param = JobConfig.load_from_file(param)
parties = config.parties
guest = parties.guest[0]
host = parties.host[0]
backend = config.backend
work_mode = config.work_mode
guest_train_data = {"name": param["guest_table_name"], "namespace": f"experiment{namespace}"}
host_train_data = {"name": param["host_table_name"], "namespace": f"experiment{namespace}"}
pipeline = PipeLine().set_initiator(role='guest', party_id=guest).set_roles(guest=guest, host=host)
reader_0 = Reader(name="reader_0")
reader_0.get_party_instance(role='guest', party_id=guest).component_param(table=guest_train_data)
reader_0.get_party_instance(role='host', party_id=host).component_param(table=host_train_data)
dataio_0 = DataIO(name="dataio_0")
dataio_0.get_party_instance(role='guest', party_id=guest).component_param(with_label=True)
dataio_0.get_party_instance(role='host', party_id=host).component_param(with_label=False)
intersection_0 = Intersection(name="intersection_0")
hetero_nn_0 = HeteroNN(name="hetero_nn_0", epochs=param["epochs"],
interactive_layer_lr=param["learning_rate"], batch_size=param["batch_size"],
early_stop="diff")
hetero_nn_0.add_bottom_model(Dense(units=param["bottom_layer_units"], input_shape=(10,), activation="tanh",
kernel_initializer=initializers.RandomUniform(minval=-1, maxval=1, seed=123)))
hetero_nn_0.set_interactve_layer(
Dense(units=param["interactive_layer_units"], input_shape=(param["bottom_layer_units"],), activation="relu",
kernel_initializer=initializers.RandomUniform(minval=-1, maxval=1, seed=123)))
hetero_nn_0.add_top_model(
Dense(units=param["top_layer_units"], input_shape=(param["interactive_layer_units"],),
activation=param["top_act"],
kernel_initializer=initializers.RandomUniform(minval=-1, maxval=1, seed=123)))
opt = getattr(optimizers, param["opt"])(lr=param["learning_rate"])
hetero_nn_0.compile(optimizer=opt, metrics=param["metrics"],
loss=param["loss"])
hetero_nn_1 = HeteroNN(name="hetero_nn_1")
if param["loss"] == "categorical_crossentropy":
eval_type = "multi"
else:
eval_type = "binary"
evaluation_0 = Evaluation(name="evaluation_0", eval_type=eval_type)
pipeline.add_component(reader_0)
pipeline.add_component(dataio_0, data=Data(data=reader_0.output.data))
pipeline.add_component(intersection_0, data=Data(data=dataio_0.output.data))
pipeline.add_component(hetero_nn_0, data=Data(train_data=intersection_0.output.data))
pipeline.add_component(hetero_nn_1, data=Data(test_data=intersection_0.output.data),
model=Model(hetero_nn_0.output.model))
pipeline.add_component(evaluation_0, data=Data(data=hetero_nn_0.output.data))
pipeline.compile()
job_parameters = JobParameters(backend=backend, work_mode=work_mode)
pipeline.fit(job_parameters)
nn_0_data = pipeline.get_component("hetero_nn_0").get_output_data().get("data")
nn_1_data = pipeline.get_component("hetero_nn_1").get_output_data().get("data")
nn_0_score = extract_data(nn_0_data, "predict_result")
nn_0_label = extract_data(nn_0_data, "label")
nn_1_score = extract_data(nn_1_data, "predict_result")
nn_1_label = extract_data(nn_1_data, "label")
nn_0_score_label = extract_data(nn_0_data, "predict_result", keep_id=True)
nn_1_score_label = extract_data(nn_1_data, "predict_result", keep_id=True)
metric_summary = parse_summary_result(pipeline.get_component("evaluation_0").get_summary())
if eval_type == "binary":
metric_nn = {
"score_diversity_ratio": classification_metric.Distribution.compute(nn_0_score_label, nn_1_score_label),
"ks_2samp": classification_metric.KSTest.compute(nn_0_score, nn_1_score),
"mAP_D_value": classification_metric.AveragePrecisionScore().compute(nn_0_score, nn_1_score, nn_0_label,
nn_1_label)}
metric_summary["distribution_metrics"] = {"hetero_nn": metric_nn}
elif eval_type == "multi":
metric_nn = {
"score_diversity_ratio": classification_metric.Distribution.compute(nn_0_score_label, nn_1_score_label)}
metric_summary["distribution_metrics"] = {"hetero_nn": metric_nn}
data_summary = {"train": {"guest": guest_train_data["name"], "host": host_train_data["name"]},
"test": {"guest": guest_train_data["name"], "host": host_train_data["name"]}
}
return data_summary, metric_summary
if __name__ == "__main__":
parser = argparse.ArgumentParser("BENCHMARK-QUALITY PIPELINE JOB")
parser.add_argument("-config", type=str,
help="config file")
parser.add_argument("-param", type=str,
help="config file for params")
args = parser.parse_args()
if args.config is not None:
main(args.config, args.param)
else:
main()
| apache-2.0 | 4,090,263,162,999,835,600 | 46.276596 | 117 | 0.671467 | false |
SoftwareKing/zstack-woodpecker | zstackwoodpecker/zstackwoodpecker/operations/config_operations.py | 1 | 1270 | '''
All zstack config operations
@author: Youyk
'''
import apibinding.api_actions as api_actions
import apibinding.inventory as inventory
import account_operations
import os
def get_global_config_value(category, name, session_uuid = None, \
default_value = None):
value = None
action = api_actions.GetGlobalConfigAction()
action.category = category
action.name = name
result = account_operations.execute_action_with_session(action, \
session_uuid)
if result:
return result.inventory.value
def get_global_config_default_value(category, name, session_uuid = None):
value = None
return get_global_config_value(category, name, session_uuid, True)
def change_global_config(category, name, value, session_uuid = None):
default_value = get_global_config_default_value(category, name, session_uuid)
pre_value = get_global_config_value(category, name, session_uuid)
action = api_actions.UpdateGlobalConfigAction()
action.category = category
action.name = name
action.defaultValue = str(default_value)
if value:
action.value = str(value)
account_operations.execute_action_with_session(action, session_uuid)
return pre_value
| apache-2.0 | 3,635,980,917,824,433,000 | 28.238095 | 81 | 0.692126 | false |
wevote/WebAppPublic | position/urls.py | 1 | 3102 | # position/urls.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
from django.conf.urls import url
from . import views, views_admin
urlpatterns = [
# admin_views.py
url(r'^$', views_admin.position_list_view, name='position_list',),
url(r'^delete/$', views_admin.position_delete_process_view, name='position_delete_process',),
url(r'^edit_process/$', views_admin.position_edit_process_view, name='position_edit_process'),
url(r'^export/', views_admin.PositionsSyncOutView.as_view(), name='positions_export'),
url(r'^import/$',
views_admin.positions_import_from_master_server_view, name='positions_import_from_master_server'),
url(r'^new/$', views_admin.position_new_view, name='position_new',),
url(r'^relink_candidates_measures/$', views_admin.relink_candidates_measures_view,
name='relink_candidates_measures'),
url(r'^(?P<position_we_vote_id>wv[\w]{2}pos[\w]+)/edit/$', views_admin.position_edit_view, name='position_edit'),
url(r'^(?P<position_we_vote_id>wv[\w]{2}pos[\w]+)/summary/$',
views_admin.position_summary_view, name='position_summary'),
# # These pages are used to return the div popup page with details about all supporters, opposers, etc.
# # Any position that this voter isn't already following
# url(r'^cand/(?P<candidate_campaign_id>[0-9]+)/anyposition/$',
# views_admin.positions_display_list_related_to_candidate_campaign_any_position_view,
# name='positions_display_list_related_to_candidate_campaign_any_position_view'),
# # Candidate Supporters
# url(r'^cand/(?P<candidate_campaign_id>[0-9]+)/supporters/$',
# views_admin.positions_display_list_related_to_candidate_campaign_supporters_view,
# name='positions_display_list_related_to_candidate_campaign_supporters_view'),
# # Candidate Opposers
# url(r'^cand/(?P<candidate_campaign_id>[0-9]+)/opposers/$',
# views_admin.positions_display_list_related_to_candidate_campaign_opposers_view,
# name='positions_display_list_related_to_candidate_campaign_opposers_view'),
# # Candidate No Stance, Comments, Information only
# url(r'^cand/(?P<candidate_campaign_id>[0-9]+)/infoonlylist/$',
# views_admin.positions_display_list_related_to_candidate_campaign_information_only_view,
# name='positions_display_list_related_to_candidate_campaign_information_only_view'),
# # Candidate - Still Deciding
# url(r'^cand/(?P<candidate_campaign_id>[0-9]+)/deciders/$',
# views_admin.positions_display_list_related_to_candidate_campaign_deciders_view,
# name='positions_display_list_related_to_candidate_campaign_deciders_view'),
#
# # Measures
# url(r'^ms/(?P<contest_measure_id>[0-9]+)/oppose/$',
# views.positions_related_to_contest_measure_oppose_view,
# name='positions_related_to_contest_measure_oppose_view'),
# url(r'^ms/(?P<contest_measure_id>[0-9]+)/support/$',
# views.positions_related_to_contest_measure_support_view,
# name='positions_related_to_contest_measure_support_view'),
]
| bsd-3-clause | -570,835,136,176,265,540 | 58.653846 | 117 | 0.683752 | false |
wadobo/GECO | src/gecod/gecod/xmlrpc_frontend.py | 1 | 3242 | #!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Provides a xmlrpc frontend to gecod backend
'''
import backend
import secure_xmlrpc as sxmlrpc
HOST = 'localhost'
PORT = 4343
DATABASE = 'sqlite:///database.sqlite'
KEYFILE='certs/key.pem'
CERTFILE='certs/cert.pem'
def parseconfig(configfile):
global HOST, PORT, DATABASE, KEYFILE, CERTFILE
options = open(configfile).readlines()
for opt in options:
k, v = opt.split('=')
k = k.strip().lower()
v = v.strip()
if k == 'host':
HOST = v
elif k == 'port':
PORT = int(v)
elif k == 'database':
DATABASE = v
elif k == 'keyfile':
KEYFILE = v
elif k == 'certfile':
CERTFILE = v
class frontend:
def __init__(self):
pass
def auth(self, user, password):
'''
Return the cookie.
'''
return backend.auth(user, 'password',
password=password)
def logout(self, cookie):
backend.logout(cookie)
def register(self, user, password):
backend.register(user, password)
def unregister(self, cookie):
backend.unregister(cookie)
def change_password(self, cookie, new_password):
backend.change_password(cookie, new_password)
def change_attr(self, cookie, name, args):
'''
args is a dict with possible keys:
type, description, account, expiration, password
expiration must be a datetime
'''
backend.change_attr(cookie, name, **args)
def check_user_name(self, name):
return backend.check_user_name(name)
def set_password(self, cookie, name, password, args):
'''
args is a dict with possible keys:
type, description, account, expiration
expiration must be an integer (days)
'''
backend.set_password(cookie, name, password, **args)
def del_password(self, cookie, name):
backend.del_password(cookie, name)
def get_password(self, cookie, name):
p = backend.get_password(cookie, name)
return p
def get_passwords(self, cookie, args):
'''
args is a dict with possible keys:
name, type, updated, expiration, account
'''
p = backend.get_passwords_by(cookie, **args)
return [i for i in p]
def get_all_passwords(self, cookie):
'''
Return all passwords of user
'''
p = backend.get_all_passwords(cookie)
return [i for i in p]
def export(self, cookie):
'''
Returns a string with all passwords
ready to import
'''
return backend.export(cookie)
def restore(self, cookie, data):
'''
Restore data from a backup doit with export
'''
backend.restore(cookie, data)
def start_server():
sxmlrpc.EasyServer(HOST, PORT, frontend())
def main(config='gecod-xmlrpc.conf'):
parseconfig(config)
backend.DATABASE = DATABASE
sxmlrpc.KEYFILE = KEYFILE
sxmlrpc.CERTFILE = CERTFILE
try:
start_server()
except KeyboardInterrupt:
print "Closing"
if __name__ == '__main__':
main()
| gpl-3.0 | -6,722,768,706,466,853,000 | 22.664234 | 60 | 0.572795 | false |
prior/webinars | webinars_web/webinars/views/events.py | 1 | 10543 | from operator import attrgetter
from django.http import HttpResponse,HttpResponseRedirect,HttpResponseNotFound,HttpResponseForbidden
from django.shortcuts import render_to_response
from django.views.decorators.http import require_GET, require_POST
from webinars_web.webinars.forms.event import EventForm
from marketplace.decorators import marketplace
from sanetime import time
from django.conf import settings
import hapi.leads
#from django.core import management
from django.template import RequestContext
from webinars_web.webinars import utils
import csv
import logging
def bucket_events(hub):
from webinars_web.webinars import models as wm
events = wm.Event.objects.filter(
account__hub=hub, deleted_at__isnull=True).select_related('current_sync','account').extra(
select={'registrant_count': 'SELECT COUNT(*) FROM webinars_registrant WHERE webinars_registrant.event_id=webinars_event.id'}).extra(
select={'attendant_count': 'SELECT COUNT(*) FROM webinars_registrant WHERE webinars_registrant.event_id=webinars_event.id AND started_at IS NOT NULL'})
events = sorted(events, key=attrgetter('starts_at'), reverse=True)
event_ids_form_ids = [(ef.event_id, ef.cms_form_id) for ef in wm.EventForm.objects.filter(event__in=wm.Event.objects.filter(account__hub=hub, deleted_at__isnull=True), cms_form__is_sync_target=False)]
event_id_to_form_ids_map = {}
for event_id, form_id in event_ids_form_ids:
event_id_to_form_ids_map.setdefault(event_id,[]).append(form_id)
form_ids_lps = [(lp.cms_form_id, lp) for lp in wm.LandingPage.objects.filter(cms_form__in=set(ef[1] for ef in event_ids_form_ids))]
form_id_to_lp_map = {}
for form_id, lp in form_ids_lps:
form_id_to_lp_map.setdefault(form_id,[]).append(lp)
for event in events: #TODO: this is creating an 2N+1 situation-- need to refactor!
event.landing_pages = []
for form_id in event_id_to_form_ids_map.get(event.id,[]):
event.landing_pages.extend(form_id_to_lp_map[form_id])
now = time()
return utils.partition(events, lambda e: (e.ended_at < now), [True,False])
@marketplace
@require_GET
def _list(request, which): # returns the partial list requested (future or past)-- used by ajax table replace
from webinars_web.webinars import models as wm
hub = wm.Hub.ensure(request.marketplace.hub_id)
buckets = bucket_events(hub)
is_future = which.lower()=='future'
is_past = not is_future
return render_to_response('events/_list.djml', {
'events': buckets[is_past],
'past': is_past,
'empty_callout': is_future
}, context_instance=RequestContext(request))
@marketplace
@require_GET
def list(request):
from webinars_web.webinars import models as wm
hub = wm.Hub.ensure(request.marketplace.hub_id, select_related=['current_sync','last_sync'])
buckets = bucket_events(hub)
return render_to_response('events/list.djml', {
'future_events': buckets[False],
'past_events': buckets[True],
'hub': hub,
}, context_instance=RequestContext(request))
def filter_registrants(registrants, segment):
if segment == 'noshows': return [r for r in registrants if not r.get('started_at')]
elif segment == 'attendees': return [r for r in registrants if r.get('started_at')]
else: return registrants
@marketplace
@require_GET
def export(request, event_id, segment):
if segment not in ['noshows', 'attendees', 'registrants']: return HttpResponseForbidden()
attrs = ['first_name', 'last_name', 'email']
from webinars_web.webinars import models as wm
registrant_set = wm.Event.objects.filter(pk=event_id)[0].registrant_set.values()
logging.debug('CSVDEBUG: event=%s, segment=%s' % (event_id, segment))
name = '%s%s' % (segment, event_id)
logging.debug('CSVDEBUG: filename=%s' % name)
people = filter_registrants(registrant_set, segment)
response = HttpResponse(mimetype='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % name
writer = csv.writer(response)
writer.writerow(['FirstName', 'LastName', 'Email'])
for p in people:
writer.writerow([p.get(attr).encode('utf-8') for attr in attrs])
return response
def get_fresh_last_modified_at(hub, guid):
leads_client = hapi.leads.LeadsClient(settings.HUBSPOT_API_KEY, hub_id=hub.id, env=settings.API_ENV)
leads = leads_client.get_leads(
time_pivot = 'lastModifiedAt',
sort = 'lastModifiedAt',
dir = 'desc',
max = 1,
form_guid = guid)
if leads:
return time(us=leads[0]['lastModifiedAt']*1000 + 1000)
else:
return time(0)
def new_or_edit(request, event_id=None):
from webinars_web.webinars import models as wm
hub = wm.Hub.ensure(request.marketplace.hub_id)
kwargs = {'hub':hub}
old_sync_leads_for_all_time = None
if event_id:
kwargs['instance']=wm.Event.objects.select_related('account').get(pk=event_id)
old_sync_leads_for_all_time = kwargs['instance'].sync_leads_for_all_time
if request.method == 'POST': # If the form has been submitted...
form = EventForm(request.POST, **kwargs) # A form bound to the POST data
if form.is_valid(): # All validation rules pass
# Process the data in form.cleaned_data
# ...
event = form.save(commit=False)
tz = kwargs.get('instance') and kwargs['instance'].starts_at.tz or hub.timezone
event.starts_at = time(form.cleaned_data['starts_at_ndt'], tz)
event.duration = int(form.cleaned_data['duration'])
event.ensure_hashcode()
event.save()
old_cms_forms = dict((cf.guid, cf) for cf in event.cms_forms.all())
new_cms_forms = dict((cf.guid, cf) for cf in form.cleaned_data['cms_forms'])
for guid in (set(new_cms_forms) - set(old_cms_forms)):
wm.EventForm.objects.create(cms_form=new_cms_forms[guid], event=event, last_last_modified_at = not event.sync_leads_for_all_time and get_fresh_last_modified_at(hub, guid) or 0, converted_at_cutoff = not event.sync_leads_for_all_time and time() or 0)
for guid in (set(old_cms_forms) - set(new_cms_forms)):
wm.EventForm.objects.filter(cms_form=old_cms_forms[guid], event=event).delete()
if old_sync_leads_for_all_time is not None and old_sync_leads_for_all_time != event.sync_leads_for_all_time:
for event_form in event.eventform_set.all():
if event.sync_leads_for_all_time:
event_form.last_last_modified_at = 0
event_form.converted_at_cutoff = 0
# doing the else doesn't really make sense cuz we could've already been syncing before
event_form.save()
return HttpResponseRedirect('%sevents'%request.marketplace.base_url) # Redirect after POST
else:
wm.CmsForm.sync(hub)
form = EventForm(**kwargs) # An unbound form
return render_to_response('events/%s.djml'%(event_id and 'edit' or 'new'), {
'form': form,
}, context_instance=RequestContext(request))
@marketplace
def new(request):
return new_or_edit(request)
@marketplace
def edit(request, event_id):
return new_or_edit(request, event_id)
@marketplace
@require_POST
def destroy(request, event_id):
from webinars_web.webinars import models as wm
try:
event = wm.Event.objects.get(pk=event_id)
except Exception:
return HttpResponseNotFound()
if event.account.hub_id != request.marketplace.hub_id:
return HttpResponseForbidden()
event.deleted_at = time()
event.save()
return HttpResponse()
@marketplace
def show(request, event_id):
from webinars_web.webinars import models as wm
hub = wm.Hub.ensure(request.marketplace.hub_id)
try:
event = wm.Event.objects.select_related('account','account__hub').get(pk=event_id)
except:
return HttpResponseNotFound()
if event.account.hub_id != hub.id:
return HttpResponseForbidden()
registrants = event.registrant_set.select_related('cms_form').extra(
select = { 'durationx': 'IF(ISNULL(stopped_at) OR ISNULL(started_at), NULL, stopped_at-started_at)' },
order_by = ['-durationx']
)
for r in registrants:
r.event = event
lps = [lp for lp in wm.LandingPage.objects.filter(cms_form__event=event)]
forms_to_lps = {}
for lp in lps:
forms_to_lps.setdefault(lp.cms_form.guid,[]).append(lp)
for r in registrants:
if r.effective_duration:
if not r.cms_form or r.cms_form.is_sync_target:
r.landing_pages = []
else:
r.landing_pages = forms_to_lps[r.cms_form.guid]
now = time()
if event._time_ended_at or event.ends_at < now:
partitioned_registrants = utils.partition(registrants, lambda r: bool(r.started_at and r.stopped_at), [True, False])
return render_to_response('events/show.djml', {
'event': event,
'future': False,
'registrants': registrants,
'registrants_count': len(registrants),
'attendees': partitioned_registrants[True],
'attendees_count': len(partitioned_registrants[True]),
'noshows': partitioned_registrants[False],
'noshows_count': len(partitioned_registrants[False]),
'MARKETPLACE_SLUG': settings.MARKETPLACE_SLUG,
}, context_instance=RequestContext(request))
else:
return render_to_response('events/show.djml', {
'event': event,
'future': True,
'registrants': registrants,
'registrants_count': len(registrants),
'MARKETPLACE_SLUG': settings.MARKETPLACE_SLUG,
}, context_instance=RequestContext(request))
def sync(request, event_id):
from webinars_web.webinars import models as wm
force = request.REQUEST.get('force') and True or False
postbin = request.REQUEST.get('postbin') or None
auto = (request.REQUEST.get('auto') is None or request.REQUEST.get('auto').lower()!='false') and True or False
event = wm.Event.objects.get(pk=event_id)
sync_stages = event.trigger_sync(force=force, auto=auto)
return render_to_response('events/trigger_sync.djml', {'event':event, 'sync_stages':sync_stages, 'postbin':postbin}, context_instance=RequestContext(request))
| apache-2.0 | -1,903,053,518,049,108,200 | 44.640693 | 265 | 0.656455 | false |
edmundgentle/schoolscript | SchoolScript/bin/Debug/pythonlib/Lib/distutils/command/bdist_dumb.py | 1 | 4801 | """distutils.command.bdist_dumb
Implements the Distutils 'bdist_dumb' command (create a "dumb" built
distribution -- i.e., just an archive to be unpacked under $prefix or
$exec_prefix)."""
__revision__ = "$Id$"
import os
from distutils.core import Command
from distutils.util import get_platform
from distutils.dir_util import remove_tree, ensure_relative
from distutils.errors import *
from distutils.sysconfig import get_python_version
from distutils import log
class bdist_dumb(Command):
description = "create a \"dumb\" built distribution"
user_options = [('bdist-dir=', 'd',
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('format=', 'f',
"archive format to create (tar, ztar, gztar, zip)"),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('relative', None,
"build the archive using relative paths"
"(default: false)"),
]
boolean_options = ['keep-temp', 'skip-build', 'relative']
default_format = { 'posix': 'gztar',
'nt': 'zip',
'os2': 'zip' }
def initialize_options(self):
self.bdist_dir = None
self.plat_name = None
self.format = None
self.keep_temp = 0
self.dist_dir = None
self.skip_build = 0
self.relative = 0
def finalize_options(self):
if self.bdist_dir is None:
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'dumb')
if self.format is None:
try:
self.format = self.default_format[os.name]
except KeyError:
raise DistutilsPlatformError(
"don't know how to create dumb built distributions "
"on platform %s" % os.name)
self.set_undefined_options('bdist',
('dist_dir', 'dist_dir'),
('plat_name', 'plat_name'))
def run(self):
if not self.skip_build:
self.run_command('build')
install = self.reinitialize_command('install', reinit_subcommands=1)
install.root = self.bdist_dir
install.skip_build = self.skip_build
install.warn_dir = 0
log.info("installing to %s" % self.bdist_dir)
self.run_command('install')
# And make an archive relative to the root of the
# pseudo-installation tree.
archive_basename = "%s.%s" % (self.distribution.get_fullname(),
self.plat_name)
# OS/2 objects to any ":" characters in a filename (such as when
# a timestamp is used in a version) so change them to hyphens.
if os.name == "os2":
archive_basename = archive_basename.replace(":", "-")
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
if not self.relative:
archive_root = self.bdist_dir
else:
if (self.distribution.has_ext_modules() and
(install.install_base != install.install_platbase)):
raise DistutilsPlatformError(
"can't make a dumb built distribution where "
"base and platbase are different (%s, %s)"
% (repr(install.install_base),
repr(install.install_platbase)))
else:
archive_root = os.path.join(self.bdist_dir,
ensure_relative(install.install_base))
# Make the archive
filename = self.make_archive(pseudoinstall_root,
self.format, root_dir=archive_root)
if self.distribution.has_ext_modules():
pyversion = get_python_version()
else:
pyversion = 'any'
self.distribution.dist_files.append(('bdist_dumb', pyversion,
filename))
if not self.keep_temp:
remove_tree(self.bdist_dir, dry_run=self.dry_run)
| gpl-2.0 | -1,751,620,946,305,370,000 | 38.008333 | 76 | 0.519267 | false |
Fabien-B/Web_ASA_Sourdoire | www/parcelle.py | 1 | 4236 | import mysql.connector
import datetime
class Parcelle(object):
database = 'IENAC14_asa'
user = 'root'
password = 'root'
host = '127.0.0.1'
def __init__(self,id_parc,compteur=None,nom=None,lat=None,lon=None,altitude=None):
if id_parc>0:
self.load(id_parc)
else:
connection = mysql.connector.connect(user=Parcelle.user, password=Parcelle.password,host=Parcelle.host,database=Parcelle.database)
curseur = connection.cursor()
requete = 'select max(Id_parcelle) from Parcelle;'
curseur.execute(requete)
(maxId,)=curseur.fetchall()[0]
self.id = maxId + 1
self.compteur = compteur
self.nom = nom
self.lat = lat
self.lon = lon
self.altitude = altitude
curseur.close()
connection.close()
def save(self):
if self.compteur == None:
raise ParcelleError("compteur missing for create parcelle")
connection = mysql.connector.connect(user=Parcelle.user, password=Parcelle.password,host=Parcelle.host,database=Parcelle.database)
curseur = connection.cursor()
requete = "INSERT INTO Parcelle VALUES ({0},{1},{2},{3},{4},{5});".format(self.id, self.compteur, self.nom, self.lat, self.lon, self.altitude)
curseur.execute(requete)
connection.commit()
curseur.close()
connection.close()
def load(self,id_parc):
connection = mysql.connector.connect(user=Parcelle.user, password=Parcelle.password,host=Parcelle.host,database=Parcelle.database)
curseur = connection.cursor()
requete = 'select * from Parcelle where Id_parcelle={};'.format(id_parc)
curseur.execute(requete)
try:
(_,compteur,nom,lat,lon,altitude)=curseur.fetchall()[0]
except IndexError:
raise ParcelleError("Parcelle with id {} doesn't exist".format(id_parc))
curseur.close()
connection.close()
self.id = id_parc
self.compteur = compteur
self.nom = nom
self.lat = lat
self.lon = lon
self.altitude = altitude
def release_my_ornot(self, exploitant=0):
connection = mysql.connector.connect(user=Parcelle.user, password=Parcelle.password,host=Parcelle.host,database=Parcelle.database)
curseur = connection.cursor()
actualtime = str(datetime.datetime.now())
requete = 'UPDATE Propriete SET date_fin="{1}" WHERE Id_parcelle={0} AND date_fin IS NULL;'.format(self.id, actualtime)
curseur.execute(requete)
requete = 'select max(Id_propriete) from Propriete;'
curseur.execute(requete)
(maxId,)=curseur.fetchall()[0]
if exploitant==0:
requete = 'INSERT INTO Propriete VALUES({2}, {0}, 0, "{1}", NULL);'.format(self.id, actualtime, maxId+1)
else:
requete = 'INSERT INTO Propriete VALUES({2}, {0}, {3}, "{1}", NULL);'.format(self.id, actualtime, maxId+1, exploitant.id)
curseur.execute(requete)
connection.commit()
curseur.close()
connection.close()
@staticmethod
def get_exploitant_parcelle_id(id_ex):
connection = mysql.connector.connect(user=Parcelle.user, password=Parcelle.password,host=Parcelle.host,database=Parcelle.database)
curseur = connection.cursor()
if id_ex == 0:
requete = 'select Id_parcelle FROM Parcelle;'
elif id_ex == -1: #parcelles libres
requete = 'select Parcelle.Id_parcelle FROM Parcelle,Propriete WHERE Propriete.Id_parcelle = Parcelle.Id_parcelle AND Id_exploitant = 0 AND date_fin IS NULL ORDER BY Parcelle.Id_parcelle;'
else:
requete = 'select Parcelle.Id_parcelle FROM Parcelle,Propriete WHERE Propriete.Id_parcelle = Parcelle.Id_parcelle AND Id_exploitant = {0} AND date_fin IS NULL ORDER BY Parcelle.Id_parcelle;'.format(id_ex)
curseur.execute(requete)
id_parc = curseur.fetchall()
curseur.close()
connection.close()
id_parc_list = []
for (id,) in id_parc:
id_parc_list.append(id)
return id_parc_list
class ParcelleError(Exception):
pass | lgpl-3.0 | -4,204,626,003,705,896,400 | 41.37 | 216 | 0.628895 | false |
saknis/upelis | logs4.py | 1 | 5178 | #!/usr/bin/env python
import base64
import cgi
import datetime
import logging
import os
import time
#from datetime import datetime, date, time
import urllib
import wsgiref.handlers
import string
from google.appengine.api import users
from google.appengine.api.logservice import logservice
from google.appengine.ext import db
#from google.appengine.ext import webapp
import webapp2 as webapp
# This sample gets the app request logs up to the current time, displays 5 logs
# at a time, including all AppLogs, with a Next link to let the user "page"
# through the results, using the RequestLog offset property.
class MainPage(webapp.RequestHandler):
def get(self):
logging.info('Starting Main handler')
# Get the incoming offset param from the Next link to advance through
# the logs. (The first time the page is loaded, there won't be any offset.)
start_time_set=False
try:
offset = self.request.get('offset') or None
if offset:
offset = base64.urlsafe_b64decode(str(offset))
except TypeError:
offset = None
try:
start_time = self.request.get('start_time') or None
if start_time:
start_time = float(base64.urlsafe_b64decode(str(start_time)))
start_time_set=True
except TypeError:
start_time = None
start_time_set=False
try:
filter = str(self.request.get('filter')) or None
except TypeError:
filter = None
# Set up end time for our query.
# Count specifies the max number of RequestLogs shown at one time.
# Use a boolean to initially turn off visiblity of the "Next" link.
count = 1000
show_next = True
last_offset = 5000
dt=datetime.datetime.now()
tt=dt.timetuple()
year=tt[0]
month=tt[1]
ttt=time.strptime((("01 %s %s") % (month,year)), "%d %m %Y")
if not start_time_set:
end_time = time.time()
start_time = time.mktime(ttt)
else:
dt2=datetime.datetime.utcfromtimestamp(float(start_time))
tt2=dt2.timetuple()
year2=tt2[0]
month2=tt2[1]
month2=month2+1
if month2==13:
month2=1
year2=year2+1
ttt2=time.strptime((("01 %s %s") % (month2,year2)), "%d %m %Y")
end_time=time.mktime(ttt2)
dt3=datetime.datetime.utcfromtimestamp(float(start_time))
tt3=dt3.timetuple()
year3=tt3[0]
month3=tt3[1]
month3=month3-1
if month3==0:
month3=12
year3=year3-1
ttt3=time.strptime((("01 %s %s") % (month3,year3)), "%d %m %Y")
start_time_next=time.mktime(ttt3)
# Iterate through all the RequestLog objects, displaying some fields and
# iterate through all AppLogs beloging to each RequestLog count times.
# In each iteration, save the offset to last_offset; the last one when
# count is reached will be used for the link.
i = 0
for req_log in logservice.fetch(start_time=start_time,end_time=end_time, offset=offset,
minimum_log_level=logservice.LOG_LEVEL_INFO,
include_app_logs=False):
ip=req_log.ip
status=str(req_log.status)
if filter and status and not string.find(status, filter) == -1:
# self.response.out.write("<br /> REQUEST LOG <br />")
# self.respons
self.response.out.write("""%s <br />""" % (req_log.combined))
i += 1
else:
if not filter:
self.response.out.write("""%s <br />""" % (req_log.combined))
i += 1
# self.response.out.write("""IP: %s <br /> Method: %s <br />
# Resource: %s <br />""" % (req_log.ip,
# req_log.method, req_log.resource))
# self.response.out.write("Date: "+datetime.datetime.fromtimestamp(req_log.end_time).strftime('%D %T UTC') +"<br />")
last_offset= req_log.offset
for app_log in req_log.app_logs:
self.response.out.write("<br />APP LOG<br />")
statslink = ("<a href=\"http://%s/stats/details?time=%s\">%s</a>" % (os.environ['HTTP_HOST'], app_log.time,app_log.time))
self.response.out.write("<br />STATS DETAILS: %s<br />" % (statslink))
self.response.out.write("Date: "+datetime.datetime.fromtimestamp(app_log.time).strftime('%Y-%m-%d %H:%M:%S UTC') +"<br />")
self.response.out.write("<br />Message: "+app_log.message+"<br />")
if i >= count:
show_next = True
break
# Prepare the offset URL parameters, if any.
if show_next:
query = self.request.GET
query['offset'] = base64.urlsafe_b64encode(last_offset)
query['start_time'] = base64.urlsafe_b64encode(("%s")%(start_time_next))
next_link = urllib.urlencode(query)
self.response.out.write("<a href=\"/logs4?"+next_link+"\">Next</a>")
self.response.out.write("<br />")
#def main():
logging.getLogger().setLevel(logging.DEBUG)
app = webapp.WSGIApplication([
('/logs4', MainPage),
], debug=True)
# wsgiref.handlers.CGIHandler().run(application)
#if __name__ == '__main__':
# main() | lgpl-2.1 | 5,720,461,625,587,779,000 | 33 | 131 | 0.60506 | false |
agiliq/django-secure-login | secure_login/tests.py | 1 | 9858 | from django.test import TestCase
from django.contrib.auth.models import User
from django.contrib.auth import authenticate
from django.core import mail
from django.test.utils import override_settings
from django.conf import settings
from django import forms
from django.contrib.auth.backends import ModelBackend
from .models import FailedLogin
from .forms import SecureLoginForm, SecureFormMixin
import secure_login.checkers as checkers
from .backends import SecureLoginBackendMixin
class SecureLoginBackendTest(TestCase):
@override_settings(SECURE_LOGIN_CHECKERS=["secure_login.checkers.no_weak_passwords", ])
def test_no_weak_passwords(self):
bad_password = "albatross"
good_password = "a-l0ng-pa55w0rd-@^&"
user = User.objects.create(username="hello")
user.set_password(bad_password)
user.save()
self.assertFalse(authenticate(username="hello", password=bad_password))
user.set_password(good_password)
user.save()
self.assertEqual(
authenticate(username="hello", password=good_password), user)
@override_settings(SECURE_LOGIN_CHECKERS=["secure_login.checkers.no_short_passwords", ])
def test_no_short_passwords(self):
bad_password = "123"
good_password = "a-l0ng-pa55w0rd-@^&"
empty_password = ''
user = User.objects.create(username="hello")
user.set_password(bad_password)
user.save()
self.assertFalse(authenticate(username="hello", password=bad_password))
user.set_password(empty_password)
user.save()
self.assertFalse(authenticate(username="hello", password=empty_password))
user.set_password(good_password)
user.save()
self.assertEqual(
authenticate(username="hello", password=good_password), user)
@override_settings(SECURE_LOGIN_CHECKERS=["secure_login.checkers.no_username_password_same", ])
def test_no_username_password_same(self):
username = "hellohello"
bad_password = "hellohello"
good_password = "a-l0ng-pa55w0rd-@^&"
user = User.objects.create(username=username)
user.set_password(bad_password)
user.save()
self.assertFalse(
authenticate(username=username, password=bad_password))
user.set_password(good_password)
user.save()
self.assertEqual(
authenticate(username=username, password=good_password), user)
@override_settings(SECURE_LOGIN_ON_FAIL=["secure_login.on_fail.email_user", ])
def test_email_sent_on_wrong_password(self):
username = "hello"
password = "hellohello"
user = User.objects.create(username=username)
user.set_password(password)
user.save()
self.assertFalse(
authenticate(username=username, password=password + "1"))
self.assertEqual(len(mail.outbox), 1)
@override_settings(SECURE_LOGIN_ON_FAIL=["secure_login.on_fail.populate_failed_requests"], SECURE_LOGIN_CHECKERS=[])
def test_populate_failed_requests(self):
username = "hello"
password = "hellohello"
user = User.objects.create_user(username=username, password=password)
authenticate(username=username, password="not-the-correct-password")
self.assertEqual(FailedLogin.objects.count(), 1)
@override_settings(SECURE_LOGIN_ON_FAIL=["secure_login.on_fail.populate_failed_requests", "secure_login.on_fail.lockout_on_many_wrong_password", ], SECURE_LOGIN_CHECKERS=[])
def test_lockout(self):
username = "hello"
password = "hellohello"
user = User.objects.create_user(username=username, password=password)
for _ in range(11):
authenticate(
username=username, password="not-the-correct-password")
user_ = authenticate(username=username, password=password)
self.assertFalse(user_.is_active)
def test_email_based_backend(self):
username = "hello"
password = "albatross"
email = "[email protected]"
user = User.objects.create_user(username=username, password=password, email=email)
with self.settings(AUTHENTICATION_BACKENDS=["secure_login.tests.SecureEmailBackend"], SECURE_LOGIN_CHECKERS=["secure_login.checkers.no_weak_passwords"]):
self.assertEqual(authenticate(email=email, password=password), None)
with self.settings(AUTHENTICATION_BACKENDS=["secure_login.tests.SecureEmailBackend"], SECURE_LOGIN_CHECKERS=[]):
self.assertEqual(authenticate(email=email, password=password), user)
def test_multiple_backend(self):
username = "hello"
password = "albatross"
email = "[email protected]"
user = User.objects.create_user(username=username, password=password, email=email)
with self.settings(AUTHENTICATION_BACKENDS=["secure_login.tests.SecureEmailBackend", "secure_login.tests.SecureUsernameBackend"], SECURE_LOGIN_CHECKERS=["secure_login.checkers.no_weak_passwords"]):
self.assertEqual(authenticate(email=email, password=password), None)
class FormsTest(TestCase):
@override_settings(SECURE_LOGIN_CHECKERS=["secure_login.checkers.no_weak_passwords", ])
def test_no_weak_passwords(self):
checkers.no_weak_passwords.error_message = 'Your password is too weak'
bad_password = "albatross"
good_password = "a-l0ng-pa55w0rd-@^&"
user = User.objects.create(username="hello")
user.set_password(bad_password)
user.save()
form = SecureLoginForm(
data={"username": "hello", "password": bad_password})
self.assertFalse(form.is_valid())
self.assertIn(checkers.no_weak_passwords.error_message, form.errors['__all__'])
user.set_password(good_password)
user.save()
form = SecureLoginForm(
data={"username": "hello", "password": good_password})
self.assertTrue(form.is_valid())
@override_settings(SECURE_LOGIN_CHECKERS=["secure_login.checkers.no_short_passwords", ])
def test_no_short_passwords(self):
checkers.no_short_passwords.error_message = 'Your password is too short'
bad_password = "123"
good_password = "a-l0ng-pa55w0rd-@^&"
empty_password = ''
user = User.objects.create(username="hello")
user.set_password(bad_password)
user.save()
form = SecureLoginForm(
data={"username": "hello", "password": bad_password})
self.assertFalse(form.is_valid())
self.assertIn(checkers.no_short_passwords.error_message, form.errors['__all__'])
user.set_password(empty_password)
user.save()
form = SecureLoginForm(
data={"username": "hello", "password": empty_password})
self.assertFalse(form.is_valid())
user.set_password(good_password)
user.save()
form = SecureLoginForm(
data={"username": "hello", "password": good_password})
self.assertTrue(form.is_valid())
def test_register_form(self):
class RegiterForm(forms.Form):
username = forms.CharField(max_length=50)
password = forms.CharField()
email = forms.EmailField(required=False)
class SecureRegisterForm(SecureFormMixin, RegiterForm):
pass
bad_password = "albatross"
with self.settings(SECURE_LOGIN_CHECKERS=["secure_login.checkers.no_weak_passwords", ]):
form = SecureRegisterForm(
data={"username": "hello", "password": bad_password})
self.assertFalse(form.is_valid())
with self.settings(SECURE_LOGIN_CHECKERS=[]):
form = SecureRegisterForm(
data={"username": "hello", "password": bad_password})
self.assertTrue(form.is_valid())
bad_password = 123
with self.settings(SECURE_LOGIN_CHECKERS=["secure_login.checkers.no_short_passwords", ]):
form = SecureRegisterForm(
data={"username": "hello", "password": bad_password})
self.assertFalse(form.is_valid())
with self.settings(SECURE_LOGIN_CHECKERS=[]):
form = SecureRegisterForm(
data={"username": "hello", "password": bad_password})
self.assertTrue(form.is_valid())
bad_password = ''
with self.settings(SECURE_LOGIN_CHECKERS=["secure_login.checkers.no_short_passwords", ]):
form = SecureRegisterForm(
data={"username": "hello", "password": bad_password})
self.assertFalse(form.is_valid())
def test_email_login_form(self):
class EmailLoginForm(forms.Form):
email = forms.EmailField()
password = forms.CharField()
class SecureRegisterForm(SecureFormMixin, EmailLoginForm):
pass
def username_fieldname(self):
return "email"
bad_password = "albatross"
with self.settings(SECURE_LOGIN_CHECKERS=["secure_login.checkers.no_weak_passwords", ]):
form = SecureRegisterForm(
data={"email": "[email protected]", "password": bad_password})
self.assertFalse(form.is_valid())
with self.settings(SECURE_LOGIN_CHECKERS=[]):
form = SecureRegisterForm(
data={"email": "[email protected]", "password": bad_password})
self.assertTrue(form.is_valid())
class EmailBackend(object):
def authenticate(self, email, password, **kwargs):
try:
return User.objects.get(email=email)
except User.DoesNotexist:
return None
class SecureEmailBackend(SecureLoginBackendMixin, EmailBackend):
def username_fieldname(self):
return "email"
class SecureUsernameBackend(SecureLoginBackendMixin, ModelBackend):
pass
| bsd-3-clause | -4,263,597,417,731,176,400 | 38.2749 | 205 | 0.649828 | false |
clearpathrobotics/axis_camera | nodes/axis.py | 1 | 8123 | #!/usr/bin/env python
#
# Axis camera image driver. Based on:
# https://code.ros.org/svn/wg-ros-pkg/branches/trunk_cturtle/sandbox/axis_camera
# /axis.py
#
import threading
import urllib2
import rospy
from sensor_msgs.msg import CompressedImage, CameraInfo
import camera_info_manager
class StreamThread(threading.Thread):
def __init__(self, axis):
threading.Thread.__init__(self)
self.axis = axis
self.daemon = True
self.timeoutSeconds = 2.5
def run(self):
while(True):
self.stream()
def stream(self):
while(True):
self.formURL()
self.authenticate()
if self.openURL():
self.publishFramesContinuously()
rospy.sleep(2) # if stream stays intact we shouldn't get to this
def formURL(self):
self.url = 'http://%s/mjpg/video.mjpg' % self.axis.hostname
self.url += "?fps=0&resolution=%dx%d" % (self.axis.width,
self.axis.height)
# support for Axis F34 multicamera switch
if (self.axis.camera != 0):
self.url += "&camera=%d" % self.axis.camera
rospy.logdebug('opening ' + str(self.axis))
def authenticate(self):
'''only try to authenticate if user/pass configured. I have not
used this method (yet).'''
if self.axis.password != '' and self.axis.username != '':
# create a password manager
password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
# Add the username and password, use default realm.
top_level_url = "http://" + self.axis.hostname
password_mgr.add_password(None, top_level_url, self.axis.username,
self.axis.password)
if self.axis.use_encrypted_password :
handler = urllib2.HTTPDigestAuthHandler(password_mgr)
else:
handler = urllib2.HTTPBasicAuthHandler(password_mgr)
# create "opener" (OpenerDirector instance)
opener = urllib2.build_opener(handler)
# ...and install it globally so it can be used with urlopen.
urllib2.install_opener(opener)
def openURL(self):
'''Open connection to Axis camera using http'''
try:
self.fp = urllib2.urlopen(self.url, timeout=self.timeoutSeconds)
return(True)
except urllib2.URLError, e:
rospy.logwarn('Error opening URL %s' % (self.url) +
'Possible timeout. Looping until camera appears')
return(False)
def publishFramesContinuously(self):
'''Continuous loop to publish images'''
while(True):
try:
self.findBoundary()
self.getImage()
self.publishMsg()
self.publishCameraInfoMsg()
except:
rospy.loginfo('Timed out while trying to get message.')
break
def findBoundary(self):
'''The string "--myboundary" is used to denote the start of an image in
Axis cameras'''
while(True):
boundary = self.fp.readline()
if boundary=='--myboundary\r\n':
break
def getImage(self):
'''Get the image header and image itself'''
self.getHeader()
self.getImageData()
def getHeader(self):
self.header = {}
while(True):
line = self.fp.readline()
if line == "\r\n":
break
line = line.strip()
parts = line.split(": ", 1)
try:
self.header[parts[0]] = parts[1]
except:
rospy.logwarn('Problem encountered with image header. Setting '
'content_length to zero')
self.header['Content-Length'] = 0 # set content_length to zero if
# there is a problem reading header
self.content_length = int(self.header['Content-Length'])
def getImageData(self):
'''Get the binary image data itself (ie. without header)'''
if self.content_length>0:
self.img = self.fp.read(self.content_length)
self.fp.readline() # Read terminating \r\n and do nothing with it
def publishMsg(self):
'''Publish jpeg image as a ROS message'''
self.msg = CompressedImage()
self.msg.header.stamp = rospy.Time.now()
self.msg.header.frame_id = self.axis.frame_id
self.msg.format = "jpeg"
self.msg.data = self.img
self.axis.pub.publish(self.msg)
def publishCameraInfoMsg(self):
'''Publish camera info manager message'''
cimsg = self.axis.cinfo.getCameraInfo()
cimsg.header.stamp = self.msg.header.stamp
cimsg.header.frame_id = self.axis.frame_id
cimsg.width = self.axis.width
cimsg.height = self.axis.height
self.axis.caminfo_pub.publish(cimsg)
class Axis:
def __init__(self, hostname, username, password, width, height, frame_id,
camera_info_url, use_encrypted_password, camera):
self.hostname = hostname
self.username = username
self.password = password
self.width = width
self.height = height
self.frame_id = frame_id
self.camera_info_url = camera_info_url
self.use_encrypted_password = use_encrypted_password
self.camera = camera
# generate a valid camera name based on the hostname
self.cname = camera_info_manager.genCameraName(self.hostname)
self.cinfo = camera_info_manager.CameraInfoManager(cname = self.cname,
url = self.camera_info_url)
self.cinfo.loadCameraInfo() # required before getCameraInfo()
self.st = None
self.pub = rospy.Publisher("image_raw/compressed", CompressedImage, self, queue_size=1)
self.caminfo_pub = rospy.Publisher("camera_info", CameraInfo, self, queue_size=1)
def __str__(self):
"""Return string representation."""
return(self.hostname + ',' + self.username + ',' + self.password +
'(' + str(self.width) + 'x' + str(self.height) + ')')
def peer_subscribe(self, topic_name, topic_publish, peer_publish):
'''Lazy-start the image-publisher.'''
if self.st is None:
self.st = StreamThread(self)
self.st.start()
def main():
rospy.init_node("axis_driver")
arg_defaults = {
'hostname': '192.168.0.90', # default IP address
'username': 'root', # default login name
'password': '',
'width': 640,
'height': 480,
'frame_id': 'axis_camera',
'camera_info_url': '',
'use_encrypted_password' : False,
'camera' : 0}
args = updateArgs(arg_defaults)
Axis(**args)
rospy.spin()
def updateArgs(arg_defaults):
'''Look up parameters starting in the driver's private parameter space, but
also searching outer namespaces. Defining them in a higher namespace allows
the axis_ptz.py script to share parameters with the driver.'''
args = {}
for name, val in arg_defaults.iteritems():
full_name = rospy.search_param(name)
if full_name is None:
args[name] = val
else:
args[name] = rospy.get_param(full_name, val)
# resolve frame_id with tf_prefix (unless already absolute)
if args['frame_id'][0] != '/': # not absolute?
tf_prefix = rospy.search_param('tf_prefix')
prefix_val = ''
if tf_prefix is not None: # prefix defined?
prefix_val = rospy.get_param(tf_prefix)
if prefix_val[0] != '/': # prefix not absolute?
prefix_val = '/' + prefix_val
args['frame_id'] = prefix_val + '/' + args['frame_id']
return(args)
if __name__ == "__main__":
main()
| bsd-3-clause | 9,019,188,777,415,011,000 | 36.43318 | 95 | 0.562723 | false |
jacebrowning/gridcommand | setup.py | 1 | 1810 | #!/usr/bin/env python
"""Setup script for the package."""
import os
import sys
import setuptools
PACKAGE_NAME = 'gridcommand'
MINIMUM_PYTHON_VERSION = 3, 5
def check_python_version():
"""Exit when the Python version is too low."""
if sys.version_info < MINIMUM_PYTHON_VERSION:
sys.exit("Python {}.{}+ is required.".format(*MINIMUM_PYTHON_VERSION))
def read_package_variable(key):
"""Read the value of a variable from the package without importing."""
module_path = os.path.join(PACKAGE_NAME, '__init__.py')
with open(module_path) as module:
for line in module:
parts = line.strip().split(' ')
if parts and parts[0] == key:
return parts[-1].strip("'")
assert 0, "'{0}' not found in '{1}'".format(key, module_path)
def read_descriptions():
"""Build a description for the project from documentation files."""
try:
readme = open("README.rst").read()
changelog = open("CHANGELOG.rst").read()
except IOError:
return "<placeholder>"
else:
return readme + '\n' + changelog
check_python_version()
setuptools.setup(
name=read_package_variable('__project__'),
version=read_package_variable('__version__'),
description="TBD",
url='https://github.com/jacebrowning/gridcommand',
author='Jace Browning',
author_email='[email protected]',
packages=setuptools.find_packages(),
entry_points={'console_scripts': []},
long_description=read_descriptions(),
license='LGPL',
classifiers=[
'Development Status :: 1 - Planning',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.5',
],
install_requires=open("requirements.txt").readlines(),
)
| lgpl-3.0 | -8,046,625,268,878,089,000 | 25.617647 | 78 | 0.628729 | false |
ledbutter/ProjectEulerPython | Problem39.py | 1 | 1375 | # If p is the perimeter of a right angle triangle with integral length sides, {a,b,c}, there are exactly three solutions for p = 120.
# {20,48,52}, {24,45,51}, {30,40,50}
# For which value of p 1000, is the number of solutions maximised?
# p = 120
# sols = 0
# for a in range(1, p//2):
# for b in range(a+1, p-a):
# for c in range(b+1, p-a-b+1):
# if a**2 + b**2 == c**2 and a+b+c==p:
# print(a,b,c)
# sols += 1
# print(sols)
#def possible_perimters(p):
#http://blog.dreamshire.com/2009/04/22/project-euler-problem-39-solution/
# t_max = 0
# p_limit = 1000
# for p in range(p_limit//2, p_limit+1, 2):
# t = 0;
# for a in range(2, p//4+1):
# if p*(p - 2*a) % (2*(p-a)) == 0: t += 1
# if t > t_max: (t_max, p_max) = (t, p)
# print(p_max)
#840
#my original code would have worked but it was incredibly slow,
#this is an optimized version of that code based on the message board
from math import sqrt
max_p = max_solutions = current_solutions = 0
for p in range(500, 1001, 2):
#print(p)
current_solutions = 0
for a in range(1, p//4):
for b in range(a+1, (p-a)//2):
c = sqrt(a**2+b**2)
if a+b+c==p:
#print(a,b,c)
current_solutions += 1
if current_solutions > max_solutions:
max_p = p
max_solutions = current_solutions
print(max_p, max_solutions) | mit | 9,017,787,300,578,095,000 | 22.157895 | 133 | 0.576727 | false |
google/personfinder | app/indexing.py | 1 | 10056 | # Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Support for approximate string prefix queries.
A hit is defined when the words entered in the query are all prefixes of one
of the words in the given and family names on the record. For example, a
record with the fields:
given_name: ABC 123
family_name: DEF 456
will be retrieved by:
"ABC 456"
"45 ED"
"123 ABC"
"ABC 123 DEF"
but will not be retrieved by:
"ABC 1234"
"ABC 123 DEF 456 789"
"""
from text_query import TextQuery
from google.appengine.ext import db
import unicodedata
import logging
import model
import re
import jautils
def update_index_properties(entity):
"""Finds and updates all prefix-related properties on the given entity."""
# Using set to make sure I'm not adding the same string more than once.
names_prefixes = set()
for property in entity._fields_to_index_properties:
for value in TextQuery(getattr(entity, property)).query_words:
if property in entity._fields_to_index_by_prefix_properties:
for n in xrange(1,len(value)+1):
pref = value[:n]
if pref not in names_prefixes:
names_prefixes.add(pref)
else:
if value not in names_prefixes:
names_prefixes.add(value)
# Add alternate names to the index tokens. We choose not to index prefixes
# of alternate names so that we can keep the index size small.
# TODI(ryok): This strategy works well for Japanese, but how about other
# languages?
names_prefixes |= get_alternate_name_tokens(entity)
# Put a cap on the number of tokens, just as a precaution.
MAX_TOKENS = 100
entity.names_prefixes = list(names_prefixes)[:MAX_TOKENS]
if len(names_prefixes) > MAX_TOKENS:
logging.debug('MAX_TOKENS exceeded for %s' %
' '.join(list(names_prefixes)))
def get_alternate_name_tokens(person):
"""Returns alternate name tokens and their variations."""
tokens = set(TextQuery(person.alternate_names).query_words)
# This is no-op for non-Japanese.
tokens |= set(jautils.get_additional_tokens(tokens))
return tokens
class CmpResults():
def __init__(self, query):
self.query = query
self.query_words_set = set(query.words)
def __call__(self, p1, p2):
if ((p1.primary_full_name and
p1.primary_full_name == p2.primary_full_name) or
((p1.given_name or p1.family_name) and
p1.given_name == p2.given_name and
p1.family_name == p2.family_name)):
return 0
self.set_ranking_attr(p1)
self.set_ranking_attr(p2)
r1 = self.rank(p1)
r2 = self.rank(p2)
if r1 == r2:
# if rank is the same sort by name so same names will be together
return cmp(p1._normalized_full_name.normalized,
p2._normalized_full_name.normalized)
else:
return cmp(r2, r1)
def set_ranking_attr(self, person):
"""Consider save these into to db"""
if not hasattr(person, '_normalized_given_name'):
person._normalized_given_name = TextQuery(person.given_name)
person._normalized_family_name = TextQuery(person.family_name)
person._normalized_full_name = TextQuery(person.full_name)
person._name_words = set(person._normalized_full_name.words)
person._alt_name_words = set(
TextQuery(person.alternate_names).words)
# TODO(ryok): re-consider the ranking putting more weight on full_name (a
# required field) instead of given name and family name pair (optional).
def rank(self, person):
# The normalized query words, in the order as entered.
ordered_words = self.query.normalized.split()
if (ordered_words ==
person._normalized_given_name.words +
person._normalized_family_name.words):
# Matches a Latin name exactly (given name followed by surname).
return 10
if (re.match(ur'^[\u3400-\u9fff]$', person.family_name) and
ordered_words in [
[person.family_name + person.given_name],
[person.family_name, person.given_name]
]):
# Matches a CJK name exactly (surname followed by given name).
return 10
if (re.match(ur'^[\u3400-\u9fff]+$', person.family_name) and
ordered_words in [
[person.family_name + person.given_name],
[person.family_name, person.given_name]
]):
# Matches a CJK name exactly (surname followed by given name).
# A multi-character surname is uncommon, so it is ranked a bit lower.
return 9.5
if (ordered_words ==
person._normalized_family_name.words +
person._normalized_given_name.words):
# Matches a Latin name with given and family name switched.
return 9
if (re.match(ur'^[\u3400-\u9fff]$', person.given_name) and
ordered_words in [
[person.given_name + person.family_name],
[person.given_name, person.family_name]
]):
# Matches a CJK name with surname and given name switched.
return 9
if (re.match(ur'^[\u3400-\u9fff]+$', person.given_name) and
ordered_words in [
[person.given_name + person.family_name],
[person.given_name, person.family_name]
]):
# Matches a CJK name with surname and given name switched.
# A multi-character surname is uncommon, so it's ranked a bit lower.
return 8.5
if person._name_words == self.query_words_set:
# Matches all the words in the given and family name, out of order.
return 8
if self.query.normalized in [
person._normalized_given_name.normalized,
person._normalized_family_name.normalized,
]:
# Matches the given name exactly or the family name exactly.
return 7
if person._name_words.issuperset(self.query_words_set):
# All words in the query appear somewhere in the name.
return 6
# Count the number of words in the query that appear in the name and
# also in the alternate names.
matched_words = person._name_words.union(
person._alt_name_words).intersection(self.query_words_set)
return min(5, 1 + len(matched_words))
def rank_and_order(results, query, max_results):
results.sort(CmpResults(query))
return results[:max_results]
def sort_query_words(query_words):
"""Sort query_words so that the query filters created from query_words are
more effective and consistent when truncated due to NeedIndexError, and
return the sorted list."""
# (1) Sort them lexicographically so that we return consistent search
# results for query 'AA BB CC DD' and 'DD AA BB CC' even when filters
# are truncated.
sorted_query_words = sorted(query_words)
# (2) Sort them according to popularity so that less popular query words,
# which are usually more effective filters, come first.
sorted_query_words = jautils.sorted_by_popularity(sorted_query_words)
# (3) Sort them according to the lengths so that longer query words,
# which are usually more effective filters, come first.
return sorted(sorted_query_words, key=len, reverse=True)
def search(repo, query_obj, max_results):
# As there are limits on the number of filters that we can apply and the
# number of entries we can fetch at once, the order of query words could
# potentially matter. In particular, this is the case for most Japanese
# names, many of which consist of 4 to 6 Chinese characters, each
# coresponding to an additional filter.
query_words = sort_query_words(query_obj.query_words)
logging.debug('query_words: %r' % query_words)
# First try the query with all the filters, and then keep backing off
# if we get NeedIndexError.
fetch_limit = 400
fetched = []
filters_to_try = len(query_words)
while filters_to_try:
query = model.Person.all_in_repo(repo)
for word in query_words[:filters_to_try]:
query.filter('names_prefixes =', word)
try:
fetched = query.fetch(fetch_limit)
logging.debug('query succeeded with %d filters' % filters_to_try)
break
except db.NeedIndexError:
filters_to_try -= 1
continue
logging.debug('indexing.search fetched: %d' % len(fetched))
# Now perform any filtering that App Engine was unable to do for us.
matched = []
for result in fetched:
for word in query_words:
if word not in result.names_prefixes:
break
else:
matched.append(result)
logging.debug('indexing.search matched: %d' % len(matched))
if len(fetched) == fetch_limit and len(matched) < max_results:
logging.debug('Warning: Fetch reached a limit of %d, but only %d '
'exact-matched the query (max_results = %d).' %
(fetch_limit, len(matched), max_results))
# Now rank and order the results.
return rank_and_order(matched, query_obj, max_results)
| apache-2.0 | 7,982,706,846,564,120,000 | 38.590551 | 81 | 0.625597 | false |
miguelalba89/hfdp-python | combining/observer.py | 1 | 7761 | """
Ducks problem with quackologists (observers)
Author: m1ge7
Date: 2014/03/24
"""
from abc import ABCMeta, abstractmethod
###############################################################################
#
###############################################################################
class QuackObservable:
__metaclass__ = ABCMeta
@abstractmethod
def register_observer(self, observer):
pass
@abstractmethod
def notify_observers(self):
pass
class Quackable(QuackObservable):
__metaclass__ = ABCMeta
@abstractmethod
def quack(self):
pass
class Observable(QuackObservable):
def __init__(self, duck):
self.__observers = []
self.__duck = duck
def register_observer(self, observer):
self.__observers.append(observer)
def notify_observers(self):
for obs in self.__observers:
obs.update(self.__duck)
def get_observers(self):
return self.__observers
class Observer:
__metaclass__ = ABCMeta
@abstractmethod
def update(self, duck):
pass
class Quackologist(Observer):
def update(self, duck):
print "Quackologist: " + str(duck) + " just quacked."
def __str__(self):
return "Quackologist"
###############################################################################
# Duck concrete classes
###############################################################################
class DecoyDuck(Quackable):
def __init__(self):
self.__observable = Observable(self)
def quack(self):
print "<< Silence >>"
self.notify_observers()
def register_observer(self, observer):
self.__observable.register_observer(observer)
def notify_observers(self):
self.__observable.notify_observers()
def __str__(self):
return "Decoy Duck"
class DuckCall(Quackable):
def __init__(self):
self.__observable = Observable(self)
def quack(self):
print "Kwak"
self.notify_observers()
def register_observer(self, observer):
self.__observable.register_observer(observer)
def notify_observers(self):
self.__observable.notify_observers()
def __str__(self):
return "Duck Call"
class MallardDuck(Quackable):
def __init__(self):
self.__observable = Observable(self)
def quack(self):
print "Quack"
self.notify_observers()
def register_observer(self, observer):
self.__observable.register_observer(observer)
def notify_observers(self):
self.__observable.notify_observers()
def __str__(self):
return "Mallard Duck"
class RedheadDuck(Quackable):
def __init__(self):
self.__observable = Observable(self)
def quack(self):
print "Quack"
self.notify_observers()
def register_observer(self, observer):
self.__observable.register_observer(observer)
def notify_observers(self):
self.__observable.notify_observers()
def __str__(self):
return "Redhead Duck"
class RubberDuck(Quackable):
def __init__(self):
self.__observable = Observable(self)
def quack(self):
print "Squeak"
self.notify_observers()
def register_observer(self, observer):
self.__observable.register_observer(observer)
def notify_observers(self):
self.__observable.notify_observers()
def __str__(self):
return "Rubber Duck"
###############################################################################
# Goose classes
###############################################################################
class Goose:
def honk(self):
print "Honk"
def __str__(self):
return "Goose"
class GooseAdapter(Quackable):
def __init__(self, goose):
self.__goose = goose
self.__observable = Observable(self)
def quack(self):
self.__goose.honk()
self.notify_observers()
def register_observer(self, observer):
self.__observable.register_observer(observer)
def notify_observers(self):
self.__observable.notify_observers()
def __str__(self):
return "Goose pretending to be a Duck"
###############################################################################
# QuackCounter
###############################################################################
class QuackCounter(Quackable):
number_of_quacks = 0
def __init__(self, duck):
self.__duck = duck
def quack(self):
self.__duck.quack()
QuackCounter.number_of_quacks += 1
@staticmethod
def get_quacks():
return QuackCounter.number_of_quacks
def register_observer(self, observer):
self.__duck.register_observer(observer)
def notify_observers(self):
self.__duck.notify_observers()
def __str__(self):
return str(self.__duck)
###############################################################################
# Factories
###############################################################################
class AbstractDuckFactory:
__metaclass__ = ABCMeta
@abstractmethod
def create_mallard_duck(self):
pass
@abstractmethod
def create_redhead_duck(self):
pass
@abstractmethod
def create_duck_call(self):
pass
@abstractmethod
def create_rubber_duck(self):
pass
class DuckFactory(AbstractDuckFactory):
def create_mallard_duck(self):
return MallardDuck()
def create_redhead_duck(self):
return RedheadDuck()
def create_duck_call(self):
return DuckCall()
def create_rubber_duck(self):
return RubberDuck()
class CountingDuckFactory(AbstractDuckFactory):
def create_mallard_duck(self):
return QuackCounter(MallardDuck())
def create_redhead_duck(self):
return QuackCounter(RedheadDuck())
def create_duck_call(self):
return QuackCounter(DuckCall())
def create_rubber_duck(self):
return QuackCounter(RubberDuck())
###############################################################################
# Flock
###############################################################################
class Flock(Quackable):
def __init__(self):
self.__ducks = []
def add(self, duck):
self.__ducks.append(duck)
def quack(self):
for duck in self.__ducks:
duck.quack()
def register_observer(self, observer):
for duck in self.__ducks:
duck.register_observer(observer)
def notify_observers():
pass
def __str__(self):
return "Flock of Ducks"
class DuckSimulator:
def simulate_factory(self, duck_factory):
print "\nDuck Simulator: With Composite - Flocks"
flock_of_ducks = Flock()
flock_of_ducks.add(duck_factory.create_redhead_duck())
flock_of_ducks.add(duck_factory.create_duck_call())
flock_of_ducks.add(duck_factory.create_rubber_duck())
flock_of_ducks.add(GooseAdapter(Goose()))
flock_of_mallards = Flock()
for i in range(4):
flock_of_mallards.add(duck_factory.create_mallard_duck())
flock_of_ducks.add(flock_of_mallards)
print "\nDuck Simulator: With Observer"
quackologist = Quackologist()
flock_of_ducks.register_observer(quackologist)
self.simulate_duck(flock_of_ducks)
print "The ducks quacked " + str(QuackCounter.get_quacks()) + " times"
def simulate_duck(self, duck):
duck.quack()
if __name__ == '__main__':
simulator = DuckSimulator()
duck_factory = CountingDuckFactory()
simulator.simulate_factory(duck_factory)
| gpl-3.0 | 939,939,761,361,892,700 | 21.365994 | 79 | 0.541039 | false |
bolkedebruin/airflow | tests/providers/google/cloud/operators/test_compute_system_helper.py | 1 | 5190 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import argparse
import os
from tests.contrib.utils.logging_command_executor import LoggingCommandExecutor
from tests.providers.google.cloud.utils.gcp_authenticator import GCP_COMPUTE_KEY, GcpAuthenticator
GCE_INSTANCE = os.environ.get('GCE_INSTANCE', 'testinstance')
GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'example-project')
GCE_INSTANCE_GROUP_MANAGER_NAME = os.environ.get('GCE_INSTANCE_GROUP_MANAGER_NAME',
'instance-group-test')
GCE_ZONE = os.environ.get('GCE_ZONE', 'europe-west1-b')
GCE_TEMPLATE_NAME = os.environ.get('GCE_TEMPLATE_NAME',
'instance-template-test')
GCE_NEW_TEMPLATE_NAME = os.environ.get('GCE_NEW_TEMPLATE_NAME',
'instance-template-test-new')
class GCPComputeTestHelper(LoggingCommandExecutor):
def delete_instance(self):
self.execute_cmd([
'gcloud', 'beta', 'compute', '--project', GCP_PROJECT_ID,
'--quiet', '--verbosity=none',
'instances', 'delete', GCE_INSTANCE, '--zone', GCE_ZONE,
])
def create_instance(self):
self.execute_cmd([
'gcloud', 'beta', 'compute', '--project', GCP_PROJECT_ID, '--quiet',
'instances', 'create', GCE_INSTANCE,
'--zone', GCE_ZONE
])
def delete_instance_group_and_template(self, silent=False):
self.execute_cmd([
'gcloud', 'beta', 'compute', '--project', GCP_PROJECT_ID,
'--quiet', '--verbosity=none',
'instance-groups', 'managed', 'delete', GCE_INSTANCE_GROUP_MANAGER_NAME,
'--zone', GCE_ZONE
], silent=silent)
self.execute_cmd([
'gcloud', 'beta', 'compute', '--project', GCP_PROJECT_ID,
'--quiet', '--verbosity=none',
'instance-templates', 'delete', GCE_NEW_TEMPLATE_NAME
], silent=silent)
self.execute_cmd([
'gcloud', 'beta', 'compute',
'--project', GCP_PROJECT_ID,
'--quiet', '--verbosity=none',
'instance-templates', 'delete', GCE_TEMPLATE_NAME
], silent=silent)
def create_instance_group_and_template(self):
self.execute_cmd([
'gcloud', 'beta', 'compute', '--project', GCP_PROJECT_ID, '--quiet',
'instance-templates', 'create', GCE_TEMPLATE_NAME
])
self.execute_cmd([
'gcloud', 'beta', 'compute', '--project', GCP_PROJECT_ID, '--quiet',
'instance-groups', 'managed', 'create', GCE_INSTANCE_GROUP_MANAGER_NAME,
'--template', GCE_TEMPLATE_NAME,
'--zone', GCE_ZONE, '--size=1'
])
self.execute_cmd([
'gcloud', 'beta', 'compute', '--project', GCP_PROJECT_ID, '--quiet',
'instance-groups', 'managed', 'wait-until-stable',
GCE_INSTANCE_GROUP_MANAGER_NAME,
'--zone', GCE_ZONE
])
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Create or delete GCE instances/instance groups for system tests.')
parser.add_argument('--action', dest='action', required=True,
choices=('create-instance', 'delete-instance',
'create-instance-group', 'delete-instance-group',
'before-tests', 'after-tests'))
action = parser.parse_args().action
helper = GCPComputeTestHelper()
gcp_authenticator = GcpAuthenticator(GCP_COMPUTE_KEY)
helper.log.info('Starting action: {}'.format(action))
gcp_authenticator.gcp_store_authentication()
try:
gcp_authenticator.gcp_authenticate()
if action == 'before-tests':
pass
elif action == 'after-tests':
pass
elif action == 'create-instance':
helper.create_instance()
elif action == 'delete-instance':
helper.delete_instance()
elif action == 'create-instance-group':
helper.create_instance_group_and_template()
elif action == 'delete-instance-group':
helper.delete_instance_group_and_template()
else:
raise Exception("Unknown action: {}".format(action))
finally:
gcp_authenticator.gcp_restore_authentication()
helper.log.info('Finishing action: {}'.format(action))
| apache-2.0 | 6,849,869,695,158,749,000 | 40.854839 | 98 | 0.603276 | false |
xuru/pyvisdk | pyvisdk/do/virtual_serial_port_file_backing_option.py | 1 | 1088 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def VirtualSerialPortFileBackingOption(vim, *args, **kwargs):
'''The data object type contains the options for backing a serial port with a host
file.'''
obj = vim.client.factory.create('ns0:VirtualSerialPortFileBackingOption')
# do some validation checking...
if (len(args) + len(kwargs)) < 1:
raise IndexError('Expected at least 2 arguments got: %d' % len(args))
required = [ 'type' ]
optional = [ 'fileNameExtensions', 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| mit | -870,878,209,018,097,300 | 31.029412 | 124 | 0.611213 | false |
pattisdr/osf.io | api/meetings/views.py | 1 | 9146 |
from rest_framework import generics, permissions as drf_permissions
from rest_framework.exceptions import NotFound
from django.db.models import Q, Count, Subquery, OuterRef, Case, When, Value, CharField, F, IntegerField
from django.db.models.functions import Length, Substr, Coalesce
from django.contrib.contenttypes.models import ContentType
from addons.osfstorage.models import OsfStorageFile
from api.base import permissions as base_permissions
from api.base.exceptions import InvalidFilterOperator
from api.base.filters import ListFilterMixin
from api.base.views import JSONAPIBaseView
from api.base.utils import get_object_or_error
from api.base.versioning import PrivateVersioning
from api.meetings.serializers import MeetingSerializer, MeetingSubmissionSerializer
from api.meetings.permissions import IsPublic
from api.nodes.views import NodeMixin
from framework.auth.oauth_scopes import CoreScopes
from osf.models import AbstractNode, Conference, Contributor, Tag, PageCounter
from website import settings
class MeetingMixin(object):
"""Mixin with convenience method get_meeting
"""
meeting_lookup_url_kwarg = 'meeting_id'
def get_meeting(self):
meeting = get_object_or_error(
Conference,
Q(endpoint=self.kwargs[self.meeting_lookup_url_kwarg]),
self.request,
display_name='meeting',
)
return meeting
class BaseMeetingView(JSONAPIBaseView, MeetingMixin):
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.MEETINGS_READ]
required_write_scopes = [CoreScopes.NULL]
model = Conference
# This view goes under the _/ namespace
versioning_class = PrivateVersioning
serializer_class = MeetingSerializer
view_category = 'meetings'
class MeetingList(BaseMeetingView, generics.ListAPIView, ListFilterMixin):
view_name = 'meeting-list'
ordering = ('-modified', ) # default ordering
ordering_fields = ('name', 'submissions_count', 'location', 'start_date',)
# overrides ListFilterMixin
def get_default_queryset(self):
tags = Tag.objects.filter(
abstractnode_tagged__is_public=True,
abstractnode_tagged__is_deleted=False,
).annotate(
num_nodes=Count(F('abstractnode_tagged')),
).filter(name=OuterRef('endpoint'))
conferences = Conference.objects.filter(is_meeting=True).annotate(
submissions_count=Subquery(
tags.values('num_nodes')[:1], output_field=IntegerField(),
),
)
return conferences.filter(submissions_count__gte=settings.CONFERENCE_MIN_COUNT)
# overrides ListAPIView
def get_queryset(self):
return self.get_queryset_from_request()
class MeetingDetail(BaseMeetingView, generics.RetrieveAPIView):
view_name = 'meeting-detail'
def get_object(self):
# No minimum submissions count for accessing meeting directly
return self.get_meeting()
class BaseMeetingSubmission(JSONAPIBaseView, MeetingMixin):
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
base_permissions.TokenHasScope,
IsPublic,
)
required_read_scopes = [CoreScopes.MEETINGS_READ, CoreScopes.NODE_BASE_READ]
required_write_scopes = [CoreScopes.NULL]
model = AbstractNode
# This view goes under the _/ namespace
versioning_class = PrivateVersioning
serializer_class = MeetingSubmissionSerializer
view_category = 'meetings'
def get_serializer_context(self):
context = super(BaseMeetingSubmission, self).get_serializer_context()
context['meeting'] = self.get_meeting()
return context
class MeetingSubmissionList(BaseMeetingSubmission, generics.ListAPIView, ListFilterMixin):
view_name = 'meeting-submissions'
ordering = ('-created', ) # default ordering
ordering_fields = ('title', 'meeting_category', 'author_name', 'download_count', 'created', )
# overrides ListFilterMixin
def get_default_queryset(self):
meeting = self.get_meeting()
return self.annotate_queryset_for_filtering_and_sorting(meeting, meeting.submissions)
# overrides ListAPIView
def get_queryset(self):
return self.get_queryset_from_request()
def build_query_from_field(self, field_name, operation):
if field_name == 'author_name':
if operation['op'] != 'eq':
raise InvalidFilterOperator(value=operation['op'], valid_operators=['eq'])
return Q(author_name__icontains=operation['value'])
if field_name == 'meeting_category':
if operation['op'] != 'eq':
raise InvalidFilterOperator(value=operation['op'], valid_operators=['eq'])
return Q(meeting_category__icontains=operation['value'])
return super(MeetingSubmissionList, self).build_query_from_field(field_name, operation)
def annotate_queryset_for_filtering_and_sorting(self, meeting, queryset):
queryset = self.annotate_queryset_with_meeting_category(meeting, queryset)
queryset = self.annotate_queryset_with_author_name(queryset)
queryset = self.annotate_queryset_with_download_count(queryset)
return queryset
def annotate_queryset_with_meeting_category(self, meeting, queryset):
"""
Annotates queryset with meeting_category - if submission1 tag exists, use this,
otherwise assume default submission2 tag
"""
# Setup meeting category subquery (really existence of certain tags)
category_1 = meeting.field_names.get('submission1', 'poster')
category_2 = meeting.field_names.get('submission2', 'talk')
tag_subquery = Tag.objects.filter(
abstractnode_tagged=OuterRef('pk'),
name=category_1,
).values_list('name', flat=True)
queryset = queryset.annotate(cat_one_count=Count(Subquery(tag_subquery))).annotate(
meeting_category=Case(
When(cat_one_count=1, then=Value(category_1)),
default=Value(category_2),
output_field=CharField(),
),
)
return queryset
def annotate_queryset_with_author_name(self, queryset):
"""
Annotates queryset with author_name_category - it is the family_name if it exists, otherwise,
the fullname is used
"""
# Setup author name subquery (really first bibliographic contributor)
contributors = Contributor.objects.filter(
visible=True,
node_id=OuterRef('pk'),
).order_by('_order')
queryset = queryset.annotate(
author_family_name=Subquery(contributors.values(('user__family_name'))[:1]),
author_full_name=Subquery(contributors.values(('user__fullname'))[:1]),
author_id=Subquery(contributors.values(('user__guids___id'))[:1]),
).annotate(
author_name=Case(
When(author_family_name='', then=F('author_full_name')),
default=F('author_family_name'),
output_field=CharField(),
),
)
return queryset
def annotate_queryset_with_download_count(self, queryset):
"""
Annotates queryset with download count of first osfstorage file
NOTE: This is a brittle way to do this. PageCounter _ids are of the form
<file_action>:<node__id>:<file__id>:<sometimes version>.
- Assumes the "download" file action is the only action with that many letters
- Assumes node and file guids are a consistent length
- ENG-122 would get rid of this string matching behavior
"""
pages = PageCounter.objects.annotate(
node_id=Substr('_id', 10, 5),
file_id=Substr('_id', 16),
_id_length=Length('_id'),
).filter(
_id__icontains='download',
node_id=OuterRef('guids___id'),
file_id=OuterRef('file_id'),
).exclude(_id_length__gt=39)
file_subqs = OsfStorageFile.objects.filter(
target_content_type_id=ContentType.objects.get_for_model(AbstractNode),
target_object_id=OuterRef('pk'),
).order_by('created')
queryset = queryset.annotate(
file_id=Subquery(file_subqs.values('_id')[:1]),
).annotate(
download_count=Coalesce(Subquery(pages.values('total')[:1]), Value(0)),
)
return queryset
class MeetingSubmissionDetail(BaseMeetingSubmission, generics.RetrieveAPIView, NodeMixin):
view_name = 'meeting-submission-detail'
serializer_class = MeetingSubmissionSerializer
node_lookup_url_kwarg = 'submission_id'
def get_object(self):
meeting = self.get_meeting()
node = self.get_node()
# Submission must be associated with the Conference
if meeting.endpoint not in node.tags.values_list('name', flat=True):
raise NotFound('This is not a submission to {}.'.format(meeting.name))
return node
| apache-2.0 | -3,653,973,906,636,123,000 | 36.63786 | 104 | 0.663459 | false |
vijayendrabvs/ssl-neutron | neutron/plugins/ml2/driver_api.py | 1 | 21254 | # Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from abc import ABCMeta, abstractmethod, abstractproperty
import six
# The following keys are used in the segment dictionaries passed via
# the driver API. These are defined separately from similar keys in
# neutron.extensions.providernet so that drivers don't need to change
# if/when providernet moves to the core API.
#
ID = 'id'
NETWORK_TYPE = 'network_type'
PHYSICAL_NETWORK = 'physical_network'
SEGMENTATION_ID = 'segmentation_id'
@six.add_metaclass(ABCMeta)
class TypeDriver(object):
"""Define stable abstract interface for ML2 type drivers.
ML2 type drivers each support a specific network_type for provider
and/or tenant network segments. Type drivers must implement this
abstract interface, which defines the API by which the plugin uses
the driver to manage the persistent type-specific resource
allocation state associated with network segments of that type.
Network segments are represented by segment dictionaries using the
NETWORK_TYPE, PHYSICAL_NETWORK, and SEGMENTATION_ID keys defined
above, corresponding to the provider attributes. Future revisions
of the TypeDriver API may add additional segment dictionary
keys. Attributes not applicable for a particular network_type may
either be excluded or stored as None.
"""
@abstractmethod
def get_type(self):
"""Get driver's network type.
:returns network_type value handled by this driver
"""
pass
@abstractmethod
def initialize(self):
"""Perform driver initialization.
Called after all drivers have been loaded and the database has
been initialized. No abstract methods defined below will be
called prior to this method being called.
"""
pass
@abstractmethod
def validate_provider_segment(self, segment):
"""Validate attributes of a provider network segment.
:param segment: segment dictionary using keys defined above
:raises: neutron.common.exceptions.InvalidInput if invalid
Called outside transaction context to validate the provider
attributes for a provider network segment. Raise InvalidInput
if:
- any required attribute is missing
- any prohibited or unrecognized attribute is present
- any attribute value is not valid
The network_type attribute is present in segment, but
need not be validated.
"""
pass
@abstractmethod
def reserve_provider_segment(self, session, segment):
"""Reserve resource associated with a provider network segment.
:param session: database session
:param segment: segment dictionary using keys defined above
Called inside transaction context on session to reserve the
type-specific resource for a provider network segment. The
segment dictionary passed in was returned by a previous
validate_provider_segment() call.
"""
pass
@abstractmethod
def allocate_tenant_segment(self, session):
"""Allocate resource for a new tenant network segment.
:param session: database session
:returns: segment dictionary using keys defined above
Called inside transaction context on session to allocate a new
tenant network, typically from a type-specific resource
pool. If successful, return a segment dictionary describing
the segment. If tenant network segment cannot be allocated
(i.e. tenant networks not supported or resource pool is
exhausted), return None.
"""
pass
@abstractmethod
def release_segment(self, session, segment):
"""Release network segment.
:param session: database session
:param segment: segment dictionary using keys defined above
Called inside transaction context on session to release a
tenant or provider network's type-specific resource. Runtime
errors are not expected, but raising an exception will result
in rollback of the transaction.
"""
pass
@six.add_metaclass(ABCMeta)
class NetworkContext(object):
"""Context passed to MechanismDrivers for changes to network resources.
A NetworkContext instance wraps a network resource. It provides
helper methods for accessing other relevant information. Results
from expensive operations are cached so that other
MechanismDrivers can freely access the same information.
"""
@abstractproperty
def current(self):
"""Return the current state of the network.
Return the current state of the network, as defined by
NeutronPluginBaseV2.create_network and all extensions in the
ml2 plugin.
"""
pass
@abstractproperty
def original(self):
"""Return the original state of the network.
Return the original state of the network, prior to a call to
update_network. Method is only valid within calls to
update_network_precommit and update_network_postcommit.
"""
pass
@abstractproperty
def network_segments(self):
"""Return the segments associated with this network resource."""
pass
@six.add_metaclass(ABCMeta)
class SubnetContext(object):
"""Context passed to MechanismDrivers for changes to subnet resources.
A SubnetContext instance wraps a subnet resource. It provides
helper methods for accessing other relevant information. Results
from expensive operations are cached so that other
MechanismDrivers can freely access the same information.
"""
@abstractproperty
def current(self):
"""Return the current state of the subnet.
Return the current state of the subnet, as defined by
NeutronPluginBaseV2.create_subnet and all extensions in the
ml2 plugin.
"""
pass
@abstractproperty
def original(self):
"""Return the original state of the subnet.
Return the original state of the subnet, prior to a call to
update_subnet. Method is only valid within calls to
update_subnet_precommit and update_subnet_postcommit.
"""
pass
@six.add_metaclass(ABCMeta)
class PortContext(object):
"""Context passed to MechanismDrivers for changes to port resources.
A PortContext instance wraps a port resource. It provides helper
methods for accessing other relevant information. Results from
expensive operations are cached so that other MechanismDrivers can
freely access the same information.
"""
@abstractproperty
def current(self):
"""Return the current state of the port.
Return the current state of the port, as defined by
NeutronPluginBaseV2.create_port and all extensions in the ml2
plugin.
"""
pass
@abstractproperty
def original(self):
"""Return the original state of the port
Return the original state of the port, prior to a call to
update_port. Method is only valid within calls to
update_port_precommit and update_port_postcommit.
"""
pass
@abstractproperty
def network(self):
"""Return the NetworkContext associated with this port."""
pass
@abstractproperty
def bound_segment(self):
"""Return the currently bound segment dictionary."""
pass
@abstractmethod
def host_agents(self, agent_type):
"""Get agents of the specified type on port's host.
:param agent_type: Agent type identifier
:returns: List of agents_db.Agent records
"""
pass
@abstractmethod
def set_binding(self, segment_id, vif_type, vif_details):
"""Set the binding for the port.
:param segment_id: Network segment bound for the port.
:param vif_type: The VIF type for the bound port.
:param vif_details: Dictionary with details for VIF driver.
Called by MechanismDriver.bind_port to indicate success and
specify binding details to use for port. The segment_id must
identify an item in network.network_segments.
"""
pass
@six.add_metaclass(ABCMeta)
class MechanismDriver(object):
"""Define stable abstract interface for ML2 mechanism drivers.
A mechanism driver is called on the creation, update, and deletion
of networks and ports. For every event, there are two methods that
get called - one within the database transaction (method suffix of
_precommit), one right afterwards (method suffix of _postcommit).
Exceptions raised by methods called inside the transaction can
rollback, but should not make any blocking calls (for example,
REST requests to an outside controller). Methods called after
transaction commits can make blocking external calls, though these
will block the entire process. Exceptions raised in calls after
the transaction commits may cause the associated resource to be
deleted.
Because rollback outside of the transaction is not done in the
update network/port case, all data validation must be done within
methods that are part of the database transaction.
"""
@abstractmethod
def initialize(self):
"""Perform driver initialization.
Called after all drivers have been loaded and the database has
been initialized. No abstract methods defined below will be
called prior to this method being called.
"""
pass
def create_network_precommit(self, context):
"""Allocate resources for a new network.
:param context: NetworkContext instance describing the new
network.
Create a new network, allocating resources as necessary in the
database. Called inside transaction context on session. Call
cannot block. Raising an exception will result in a rollback
of the current transaction.
"""
pass
def create_network_postcommit(self, context):
"""Create a network.
:param context: NetworkContext instance describing the new
network.
Called after the transaction commits. Call can block, though
will block the entire process so care should be taken to not
drastically affect performance. Raising an exception will
cause the deletion of the resource.
"""
pass
def update_network_precommit(self, context):
"""Update resources of a network.
:param context: NetworkContext instance describing the new
state of the network, as well as the original state prior
to the update_network call.
Update values of a network, updating the associated resources
in the database. Called inside transaction context on session.
Raising an exception will result in rollback of the
transaction.
update_network_precommit is called for all changes to the
network state. It is up to the mechanism driver to ignore
state or state changes that it does not know or care about.
"""
pass
def update_network_postcommit(self, context):
"""Update a network.
:param context: NetworkContext instance describing the new
state of the network, as well as the original state prior
to the update_network call.
Called after the transaction commits. Call can block, though
will block the entire process so care should be taken to not
drastically affect performance. Raising an exception will
cause the deletion of the resource.
update_network_postcommit is called for all changes to the
network state. It is up to the mechanism driver to ignore
state or state changes that it does not know or care about.
"""
pass
def delete_network_precommit(self, context):
"""Delete resources for a network.
:param context: NetworkContext instance describing the current
state of the network, prior to the call to delete it.
Delete network resources previously allocated by this
mechanism driver for a network. Called inside transaction
context on session. Runtime errors are not expected, but
raising an exception will result in rollback of the
transaction.
"""
pass
def delete_network_postcommit(self, context):
"""Delete a network.
:param context: NetworkContext instance describing the current
state of the network, prior to the call to delete it.
Called after the transaction commits. Call can block, though
will block the entire process so care should be taken to not
drastically affect performance. Runtime errors are not
expected, and will not prevent the resource from being
deleted.
"""
pass
def create_subnet_precommit(self, context):
"""Allocate resources for a new subnet.
:param context: SubnetContext instance describing the new
subnet.
Create a new subnet, allocating resources as necessary in the
database. Called inside transaction context on session. Call
cannot block. Raising an exception will result in a rollback
of the current transaction.
"""
pass
def create_subnet_postcommit(self, context):
"""Create a subnet.
:param context: SubnetContext instance describing the new
subnet.
Called after the transaction commits. Call can block, though
will block the entire process so care should be taken to not
drastically affect performance. Raising an exception will
cause the deletion of the resource.
"""
pass
def update_subnet_precommit(self, context):
"""Update resources of a subnet.
:param context: SubnetContext instance describing the new
state of the subnet, as well as the original state prior
to the update_subnet call.
Update values of a subnet, updating the associated resources
in the database. Called inside transaction context on session.
Raising an exception will result in rollback of the
transaction.
update_subnet_precommit is called for all changes to the
subnet state. It is up to the mechanism driver to ignore
state or state changes that it does not know or care about.
"""
pass
def update_subnet_postcommit(self, context):
"""Update a subnet.
:param context: SubnetContext instance describing the new
state of the subnet, as well as the original state prior
to the update_subnet call.
Called after the transaction commits. Call can block, though
will block the entire process so care should be taken to not
drastically affect performance. Raising an exception will
cause the deletion of the resource.
update_subnet_postcommit is called for all changes to the
subnet state. It is up to the mechanism driver to ignore
state or state changes that it does not know or care about.
"""
pass
def delete_subnet_precommit(self, context):
"""Delete resources for a subnet.
:param context: SubnetContext instance describing the current
state of the subnet, prior to the call to delete it.
Delete subnet resources previously allocated by this
mechanism driver for a subnet. Called inside transaction
context on session. Runtime errors are not expected, but
raising an exception will result in rollback of the
transaction.
"""
pass
def delete_subnet_postcommit(self, context):
"""Delete a subnet.
:param context: SubnetContext instance describing the current
state of the subnet, prior to the call to delete it.
Called after the transaction commits. Call can block, though
will block the entire process so care should be taken to not
drastically affect performance. Runtime errors are not
expected, and will not prevent the resource from being
deleted.
"""
pass
def create_port_precommit(self, context):
"""Allocate resources for a new port.
:param context: PortContext instance describing the port.
Create a new port, allocating resources as necessary in the
database. Called inside transaction context on session. Call
cannot block. Raising an exception will result in a rollback
of the current transaction.
"""
pass
def create_port_postcommit(self, context):
"""Create a port.
:param context: PortContext instance describing the port.
Called after the transaction completes. Call can block, though
will block the entire process so care should be taken to not
drastically affect performance. Raising an exception will
result in the deletion of the resource.
"""
pass
def update_port_precommit(self, context):
"""Update resources of a port.
:param context: PortContext instance describing the new
state of the port, as well as the original state prior
to the update_port call.
Called inside transaction context on session to complete a
port update as defined by this mechanism driver. Raising an
exception will result in rollback of the transaction.
update_port_precommit is called for all changes to the port
state. It is up to the mechanism driver to ignore state or
state changes that it does not know or care about.
"""
pass
def update_port_postcommit(self, context):
"""Update a port.
:param context: PortContext instance describing the new
state of the port, as well as the original state prior
to the update_port call.
Called after the transaction completes. Call can block, though
will block the entire process so care should be taken to not
drastically affect performance. Raising an exception will
result in the deletion of the resource.
update_port_postcommit is called for all changes to the port
state. It is up to the mechanism driver to ignore state or
state changes that it does not know or care about.
"""
pass
def delete_port_precommit(self, context):
"""Delete resources of a port.
:param context: PortContext instance describing the current
state of the port, prior to the call to delete it.
Called inside transaction context on session. Runtime errors
are not expected, but raising an exception will result in
rollback of the transaction.
"""
pass
def delete_port_postcommit(self, context):
"""Delete a port.
:param context: PortContext instance describing the current
state of the port, prior to the call to delete it.
Called after the transaction completes. Call can block, though
will block the entire process so care should be taken to not
drastically affect performance. Runtime errors are not
expected, and will not prevent the resource from being
deleted.
"""
pass
def bind_port(self, context):
"""Attempt to bind a port.
:param context: PortContext instance describing the port
Called inside transaction context on session, prior to
create_port_precommit or update_port_precommit, to
attempt to establish a port binding. If the driver is able to
bind the port, it calls context.set_binding with the binding
details.
"""
pass
def validate_port_binding(self, context):
"""Check whether existing port binding is still valid.
:param context: PortContext instance describing the port
:returns: True if binding is valid, otherwise False
Called inside transaction context on session to validate that
the MechanismDriver's existing binding for the port is still
valid.
"""
return False
def unbind_port(self, context):
"""Undo existing port binding.
:param context: PortContext instance describing the port
Called inside transaction context on session to notify the
MechanismDriver that its existing binding for the port is no
longer valid.
"""
pass
| apache-2.0 | -3,205,980,288,567,005,000 | 34.781145 | 78 | 0.682177 | false |
duonys/deep-learning-chainer | dlchainer/SdA.py | 1 | 5225 | #-*- coding: utf-8 -*-
from abc import ABCMeta, abstractmethod
import copy
import numpy as np
from sklearn.base import BaseEstimator, ClassifierMixin, RegressorMixin
from sklearn.externals.six import with_metaclass
from chainer import Variable, FunctionSet, optimizers, cuda
import chainer.functions as F
from .dA import dA
from . import utils
class SdAMixin(with_metaclass(ABCMeta, BaseEstimator)):
"""
Stacked Denoising Autoencoder
References:
http://deeplearning.net/tutorial/SdA.html
https://github.com/pfnet/chainer/blob/master/examples/mnist/train_mnist.py
"""
def __init__(self, n_input, n_hiddens, n_output, noise_levels=None, dropout_ratios=None, do_pretrain=True,
batch_size=100, n_epoch_pretrain=20, n_epoch_finetune=20, optimizer=optimizers.Adam(),
activation_func=F.relu, verbose=False, gpu=-1):
self.n_input = n_input
self.n_hiddens = n_hiddens
self.n_output = n_output
self.do_pretrain = do_pretrain
self.batch_size = batch_size
self.n_epoch_pretrain = n_epoch_pretrain
self.n_epoch_finetune = n_epoch_finetune
self.optimizer = optimizer
self.dAs = \
[dA(self.n_input, self.n_hiddens[0],
self._check_var(noise_levels, 0), self._check_var(dropout_ratios, 0), self.batch_size,
self.n_epoch_pretrain, copy.deepcopy(optimizer),
activation_func, verbose, gpu)] + \
[dA(self.n_hiddens[i], self.n_hiddens[i + 1],
self._check_var(noise_levels, i + 1), self._check_var(dropout_ratios, i + 1), self.batch_size,
self.n_epoch_pretrain, copy.deepcopy(optimizer),
activation_func, verbose, gpu) for i in range(len(n_hiddens) - 1)]
self.verbose = verbose
self.gpu = gpu
def _check_var(self, var, index, default_val=0.0):
return var[index] if var is not None else default_val
def fit(self, X, y):
if self.do_pretrain:
self._pretrain(X)
self._finetune(X, y)
def _pretrain(self, X):
for layer, dA in enumerate(self.dAs):
utils.disp('*** pretrain layer: {} ***'.format(layer + 1), self.verbose)
if layer == 0:
layer_input = X
else:
layer_input = self.dAs[layer - 1].encode(Variable(layer_input), train=False).data
dA.fit(layer_input)
def _finetune(self, X, y):
utils.disp('*** finetune ***', self.verbose)
# construct model and setup optimizer
params = {'l{}'.format(layer + 1): dA.encoder for layer, dA in enumerate(self.dAs)}
params.update({'l{}'.format(len(self.dAs) + 1): F.Linear(self.dAs[-1].n_hidden, self.n_output)})
self.model = FunctionSet(**params)
self.optimizer.setup(self.model)
if self.gpu >= 0:
cuda.get_device(self.gpu).use()
self.model.to_gpu()
xp = cuda.cupy if self.gpu >= 0 else np
n = len(X)
for epoch in range(self.n_epoch_finetune):
utils.disp('epoch: {}'.format(epoch + 1), self.verbose)
perm = np.random.permutation(n)
sum_loss = 0
for i in range(0, n, self.batch_size):
X_batch = xp.asarray(X[perm[i: i + self.batch_size]])
y_batch = xp.asarray(y[perm[i: i + self.batch_size]])
self.optimizer.zero_grads()
y_var = self._forward(X_batch)
loss = self._loss_func(y_var, Variable(y_batch))
loss.backward()
self.optimizer.update()
sum_loss += float(loss.data) * len(X_batch)
utils.disp('fine tune mean loss={}'.format(sum_loss / n), self.verbose)
def _forward(self, X, train=True):
X_var = Variable(X)
output = X_var
for dA in self.dAs:
output = dA.encode(output, train)
y_var = self.model['l{}'.format(len(self.dAs) + 1)](output)
return y_var
@abstractmethod
def _loss_func(self, y_var, t_var):
pass
class SdAClassifier(SdAMixin, ClassifierMixin):
"""
References:
http://scikit-learn.org/stable/developers/#rolling-your-own-estimator
"""
def _loss_func(self, y_var, t_var):
return F.softmax_cross_entropy(y_var, t_var)
def fit(self, X, y):
assert X.dtype == np.float32 and y.dtype == np.int32
super().fit(X, y)
def transform(self, X):
return self._forward(X, train=False).data
def predict(self, X):
return np.apply_along_axis(lambda x: np.argmax(x), arr=self.transform(X), axis=1)
class SdARegressor(SdAMixin, RegressorMixin):
"""
References:
http://scikit-learn.org/stable/developers/#rolling-your-own-estimator
"""
def _loss_func(self, y_var, t_var):
y_var = F.reshape(y_var, [len(y_var)])
return F.mean_squared_error(y_var, t_var)
def fit(self, X, y):
assert X.dtype == np.float32 and y.dtype == np.float32
super().fit(X, y)
def transform(self, X):
return self._forward(X, train=False).data
def predict(self, X):
return self.transform(X)
| mit | -6,527,209,594,631,249,000 | 32.280255 | 110 | 0.587368 | false |
SecPi/SecPi | worker/temperature_sensor.py | 1 | 2642 | from tools.sensor import Sensor
import glob
import logging
import os
import threading
import time
class TemperatureSensor(Sensor): #DS18B20 digital temperature sensor
def __init__(self, id, params, worker):
super(TemperatureSensor, self).__init__(id, params, worker)
#self.active = False
try:
self.min = int(params["min"])
self.max = int(params["max"])
self.bouncetime = int(params["bouncetime"])
self.device_id = params["device_id"]
except ValueError as ve: # if one configuration parameter can't be parsed as int
logging.error("TemperatureSensor: Wasn't able to initialize the sensor, please check your configuration: %s" % ve)
self.corrupted = True
return
except KeyError as ke: # if config parameters are missing
logging.error("TemperatureSensor: Wasn't able to initialize the sensor, it seems there is a config parameter missing: %s" % ke)
self.corrupted = True
return
os.system('modprobe w1-gpio')
os.system('modprobe w1-therm')
base_dir = '/sys/bus/w1/devices/'
#device_folder = glob.glob(base_dir + '28*')[0]
self.device_file = base_dir + self.device_id + '/w1_slave'
if not os.path.isfile(self.device_file): # if there is no slave file which contains the temperature
self.corrupted = True
logging.error("TemperatureSensor: Wasn't able to find temperature file at %s" % self.device_file)
return
logging.debug("TemperatureSensor: Sensor initialized")
def activate(self):
if not self.corrupted:
self.stop_thread = False
self.checker_thread = threading.Thread(name="thread-checker-%s" % self.device_id,
target=self.check_temperature)
self.checker_thread.start()
else:
logging.error("TemperatureSensor: Sensor couldn't be activated")
def deactivate(self):
if not self.corrupted:
self.stop_thread = True
else:
logging.error("TemperatureSensor: Sensor couldn't be deactivated")
def check_temperature(self):
while True:
if self.stop_thread: #exit thread when flag is set
return
current_temp = self.read_temp()
if current_temp < self.min or current_temp > self.max:
self.alarm("Temperature is not in valid range: %s" % current_temp)
time.sleep(self.bouncetime)
continue
time.sleep(3)
def read_temp_raw(self):
f = open(self.device_file, 'r')
lines = f.readlines()
f.close()
return lines
def read_temp(self):
lines = self.read_temp_raw()
while lines[0].strip()[-3:] != 'YES':
time.sleep(0.2)
lines = read_temp_raw()
equals_pos = lines[1].find('t=')
if equals_pos != -1:
temp_string = lines[1][equals_pos+2:]
temp_c = float(temp_string) / 1000.00
return temp_c
| gpl-3.0 | 729,324,569,803,099,900 | 30.452381 | 130 | 0.696064 | false |
openstates/openstates | openstates/ct/events.py | 1 | 1993 | import datetime
import json
from pupa.scrape import Scraper, Event
import pytz
from .utils import open_csv
class CTEventScraper(Scraper):
_tz = pytz.timezone("US/Eastern")
def __init__(self, *args, **kwargs):
super(CTEventScraper, self).__init__(*args, **kwargs)
def scrape(self):
for (code, name) in self.get_comm_codes():
yield from self.scrape_committee_events(code, name)
def scrape_committee_events(self, code, name):
events_url = (
"http://www.cga.ct.gov/basin/fullcalendar/commevents.php?"
"comm_code={}".format(code)
)
events_data = self.get(events_url).text
events = json.loads(events_data)
DATETIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
for info in events:
if info["title"] is None:
self.warning("Event found with no title; it will be skipped")
continue
elif info["title"].startswith("CANCELLED:"):
self.info(
"Cancelled event found; it will be skipped: {}".format(
info["title"]
)
)
continue
when = datetime.datetime.strptime(info["start"], DATETIME_FORMAT)
# end = datetime.datetime.strptime(info['end'], DATETIME_FORMAT)
where = "{0} {1}".format(info["building"].strip(), info["location"].strip())
# end_time=self._tz.localize(end),
event = Event(
start_date=self._tz.localize(when),
location_name=where,
name=info["title"],
description=info["title"],
)
event.add_source(events_url)
yield event
def get_comm_codes(self):
url = "ftp://ftp.cga.ct.gov/pub/data/committee.csv"
page = self.get(url)
page = open_csv(page)
return [(row["comm_code"].strip(), row["comm_name"].strip()) for row in page]
| gpl-3.0 | -1,077,329,166,467,352,700 | 31.145161 | 88 | 0.537883 | false |
martinjrobins/hobo | pints/tests/test_opt_cmaes.py | 1 | 5620 | #!/usr/bin/env python3
#
# Tests the basic methods of the CMAES optimiser.
#
# This file is part of PINTS.
# Copyright (c) 2017-2018, University of Oxford.
# For licensing information, see the LICENSE file distributed with the PINTS
# software package.
#
import unittest
import numpy as np
import pints
import pints.toy
from shared import CircularBoundaries
from shared import StreamCapture
debug = False
method = pints.CMAES
# Consistent unit testing in Python 2 and 3
try:
unittest.TestCase.assertRaisesRegex
except AttributeError:
unittest.TestCase.assertRaisesRegex = unittest.TestCase.assertRaisesRegexp
class TestCMAES(unittest.TestCase):
"""
Tests the basic methods of the CMAES optimiser.
"""
def setUp(self):
""" Called before every test """
np.random.seed(1)
def problem(self):
""" Returns a test problem, starting point, sigma, and boundaries. """
r = pints.toy.ParabolicError()
x = [0.1, 0.1]
s = 0.1
b = pints.RectangularBoundaries([-1, -1], [1, 1])
return r, x, s, b
def test_unbounded(self):
# Runs an optimisation without boundaries.
r, x, s, b = self.problem()
opt = pints.OptimisationController(r, x, method=method)
opt.set_threshold(1e-3)
opt.set_log_to_screen(debug)
found_parameters, found_solution = opt.run()
self.assertTrue(found_solution < 1e-3)
def test_bounded(self):
# Runs an optimisation with boundaries.
r, x, s, b = self.problem()
# Rectangular boundaries
b = pints.RectangularBoundaries([-1, -1], [1, 1])
opt = pints.OptimisationController(r, x, boundaries=b, method=method)
opt.set_log_to_screen(debug)
found_parameters, found_solution = opt.run()
self.assertTrue(found_solution < 1e-3)
# Circular boundaries
# Start near edge, to increase chance of out-of-bounds occurring.
b = CircularBoundaries([0, 0], 1)
x = [0.99, 0]
opt = pints.OptimisationController(r, x, boundaries=b, method=method)
opt.set_log_to_screen(debug)
found_parameters, found_solution = opt.run()
self.assertTrue(found_solution < 1e-3)
def test_bounded_and_sigma(self):
# Runs an optimisation without boundaries and sigma.
r, x, s, b = self.problem()
opt = pints.OptimisationController(r, x, s, b, method)
opt.set_threshold(1e-3)
opt.set_log_to_screen(debug)
found_parameters, found_solution = opt.run()
self.assertTrue(found_solution < 1e-3)
@unittest.skip('Newer versions of cma no longer trigger this condition')
def test_stopping_on_ill_conditioned_covariance_matrix(self):
# Tests that ill conditioned covariance matrices are detected.
from scipy.integrate import odeint
#TODO: A quicker test-case for this would be great!
def OnePopControlODE(y, t, p):
a, b, c = p
dydt = np.zeros(y.shape)
k = (a - b) / c * (y[0] + y[1])
dydt[0] = a * y[0] - b * y[0] - k * y[0]
dydt[1] = k * y[0] - b * y[1]
return dydt
class Model(pints.ForwardModel):
def simulate(self, parameters, times):
y0 = [2000000, 0]
solution = odeint(
OnePopControlODE, y0, times, args=(parameters,))
return np.sum(np.array(solution), axis=1)
def n_parameters(self):
return 3
model = Model()
times = [0, 0.5, 2, 4, 8, 24]
values = [2e6, 3.9e6, 3.1e7, 3.7e8, 1.6e9, 1.6e9]
problem = pints.SingleOutputProblem(model, times, values)
score = pints.SumOfSquaresError(problem)
x = [3.42, -0.21, 5e6]
opt = pints.OptimisationController(score, x, method=method)
with StreamCapture() as c:
opt.run()
self.assertTrue('Ill-conditioned covariance matrix' in c.text())
def test_ask_tell(self):
# Tests ask-and-tell related error handling.
r, x, s, b = self.problem()
opt = method(x)
# Stop called when not running
self.assertFalse(opt.stop())
# Best position and score called before run
self.assertEqual(list(opt.xbest()), list(x))
self.assertEqual(opt.fbest(), float('inf'))
# Tell before ask
self.assertRaisesRegex(
Exception, r'ask\(\) not called before tell\(\)', opt.tell, 5)
def test_is_default(self):
# Checks this is the default optimiser.
r, x, s, b = self.problem()
opt = pints.OptimisationController(r, x)
self.assertIsInstance(opt.optimiser(), method)
def test_hyper_parameter_interface(self):
# Tests the hyper parameter interface for this optimiser.
r, x, s, b = self.problem()
opt = pints.OptimisationController(r, x, method=method)
m = opt.optimiser()
self.assertEqual(m.n_hyper_parameters(), 1)
n = m.population_size() + 2
m.set_hyper_parameters([n])
self.assertEqual(m.population_size(), n)
self.assertRaisesRegex(
ValueError, 'at least 1', m.set_hyper_parameters, [0])
def test_name(self):
# Test the name() method.
opt = method(np.array([0, 1.01]))
self.assertIn('CMA-ES', opt.name())
if __name__ == '__main__':
print('Add -v for more debug output')
import sys
if '-v' in sys.argv:
debug = True
import logging
logging.basicConfig(level=logging.DEBUG)
unittest.main()
| bsd-3-clause | -920,075,941,382,228,200 | 32.855422 | 78 | 0.602847 | false |
huvermann/CiscoPhoneDirectory | Cisco79xxPhoneDirectory/PhonebookProvider/PhoneBookProviderBase.py | 1 | 5669 | # -*- coding: utf-8 -*-
from flask import request
from configuration import config, ConfFields
from Cisco79xxPhoneDirectory import app
from flask.views import View
class PhoneBookProviderBase(View):
"""Phonebook provider base class."""
Routing = {}
NameFieldKey = "name"
NumberFieldKey = "number"
PageParamName = "page"
def __init__(self, providerKey):
super(PhoneBookProviderBase, self).__init__()
self._configuration = config[providerKey]
self._providerKey = providerKey
self._host = config.get(ConfFields.ServerAddress)
self._port = config.get(ConfFields.ServerPort)
pass
def dispatch_request(self):
return app.response_class(self.xmlPhoneBook(), mimetype='text/xml')
def providerKey(self):
return self._providerKey
def isEnabled(self):
return self._configuration[ConfFields.Enabled]
def title(self):
return self._configuration[ConfFields.Title]
def menuName(self):
return self._configuration[ConfFields.MenuName]
def host(self):
return self._host
def port(self):
return self._port
def getPhoneBook(self):
"""Returns the phonebook array."""
return []
def xmlPhoneBook(self):
"""
Generates the XML in order to display the phone directory.
"""
page = request.args.get(self.PageParamName)
directory = self.getPhoneBook()
if page == None or len(page) == 0:
return self.xmlPageList(directory)
else:
return self.phoneBookPage(directory, page)
def phoneBookPage(self, directory, page):
"""Creates a phone book page."""
start, end = self._calculateStartEnd(directory, page)
xml = '<?xml version="1.0" ?>'
xml += "<CiscoIPPhoneDirectory>\n"
xml += "\t<Title>{0}</Title>\n".format(self._configuration[ConfFields.Title])
xml += "\t<Prompt>{0}</Prompt>\n".format(self._configuration[ConfFields.Prompt])
for entry in directory[start:end]:
xml += "\t<DirectoryEntry>\n"
xml += "\t\t<Telephone>{0}</Telephone>\n".format(entry[self.NumberFieldKey])
xml += "\t\t<Name>{0}</Name>\n".format(entry[self.NameFieldKey])
xml += "\t</DirectoryEntry>\n"
xml += "</CiscoIPPhoneDirectory>\n"
return xml
def _getPageSize(self):
"""Returns the page size."""
return config.get("pagesize", 10)
def _calculateStartEnd(self, directory, page):
"""Calculates the start and end index of a page."""
try:
pageNum = int(page)
except:
pageNum = 0
count = len(directory)
pageSize = self._getPageSize()
start = pageNum * pageSize
if start > count:
start = count
end = start + pageSize
if end > count:
end = count
return start, end
def _getShortListName(self, directory, pageIndex):
"""Creates a short list entry for a page."""
start, end = self._calculateStartEnd(directory, pageIndex)
firstEntry = directory[start][self.NameFieldKey][0:8]
lastEntry = directory[end-1][self.NameFieldKey][0:8]
result = firstEntry + "... - " + lastEntry+"..."
return result
def xmlPageList(self, directory):
"""Creates a page list."""
pageSize = self._getPageSize()
pageCount = len(directory) // pageSize
if pageCount > 1:
result = "<CiscoIPPhoneMenu>"
for i in range(pageCount):
shortName = self._getShortListName(directory, i)
result += "<MenuItem>"
result += "<Name>{0}</Name>".format(shortName)
result += "<URL>http://{0}:{1}/{2}?{3}={4}</URL>".format(self.host(),
self.port(),
self.providerKey(),
self.PageParamName,
i)
result += "</MenuItem>"
result += "<Prompt>Select a page!</Prompt>"
result += "</CiscoIPPhoneMenu>"
return result
else:
return self.phoneBookPage(directory, 0)
def responseHandler(self):
app.response_class(self.xmlPhoneBook(), mimetype='text/xml')
@staticmethod
def routeProvider(application, provider, providerKey):
"""Adds the provider to the routing list"""
#conf = config[providerKey]
instance = provider(providerKey)
PhoneBookProviderBase.Routing[providerKey] = instance
app.add_url_rule("/" + providerKey, view_func=instance.as_view(providerKey))
pass
@staticmethod
def generateMenue(config):
"""Generates the main menu xml."""
HOST = config[ConfFields.ServerAddress]
PORT = config[ConfFields.ServerPort]
result = "<CiscoIPPhoneMenu>"
for prov in PhoneBookProviderBase.Routing:
provider = PhoneBookProviderBase.Routing[prov]
if provider.isEnabled():
result += "<MenuItem>"
result += "<URL>http://{0}:{1}/{2}</URL>".format(HOST, PORT, provider.providerKey())
result += "<Name>{0}</Name>".format(provider.menuName())
result += "</MenuItem>"
result += "<Prompt>{0}</Prompt>".format(config[ConfFields.MenuPrompt])
result += "</CiscoIPPhoneMenu>"
return result
| mit | -3,116,200,379,703,463,000 | 34.43125 | 100 | 0.562004 | false |
openthread/silk | silk/tests/openthread/ot_test_child_table.py | 1 | 6689 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
import unittest
from silk.config import wpan_constants as wpan
from silk.node.wpan_node import WpanCredentials
from silk.tools import wpan_table_parser
from silk.utils import process_cleanup
import silk.hw.hw_resource as hwr
import silk.node.fifteen_four_dev_board as ffdb
import silk.tests.testcase as testcase
hwr.global_instance()
CHILD_TABLE_AS_VALMAP_ENTRY = (
"Age",
"AverageRssi",
"ExtAddress",
"FullFunction",
"FullNetworkData",
"LastRssi",
"LinkQualityIn",
"NetworkDataVersion",
"RLOC16",
"RxOnWhenIdle",
"SecureDataRequest",
"Timeout",
)
class TestChildTable(testcase.TestCase):
@classmethod
def hardware_select(cls):
cls.router = ffdb.ThreadDevBoard()
cls.joiner_list = []
while True:
try:
device = ffdb.ThreadDevBoard()
except Exception:
break
else:
cls.joiner_list.append(device)
print(cls.joiner_list)
@classmethod
@testcase.setup_class_decorator
def setUpClass(cls):
# Check and clean up wpantund process if any left over
process_cleanup.ps_cleanup()
cls.hardware_select()
cls.add_test_device(cls.router)
for end_node in cls.joiner_list:
cls.add_test_device(end_node)
for device in cls.device_list:
device.set_logger(cls.logger)
device.set_up()
cls.network_data = WpanCredentials(network_name="SILK-{0:04X}".format(random.randint(0, 0xffff)),
psk="00112233445566778899aabbccdd{0:04x}".format(random.randint(0, 0xffff)),
channel=random.randint(11, 25),
fabric_id="{0:06x}dead".format(random.randint(0, 0xffffff)))
cls.thread_sniffer_init(cls.network_data.channel)
@classmethod
@testcase.teardown_class_decorator
def tearDownClass(cls):
for device in cls.device_list:
device.tear_down()
@testcase.setup_decorator
def setUp(self):
pass
@testcase.teardown_decorator
def tearDown(self):
pass
@testcase.test_method_decorator
def test01_Pairing(self):
self.router.form(self.network_data, "router")
self.router.permit_join(60 * len(self.joiner_list))
self.wait_for_completion(self.device_list)
self.logger.info(self.router.ip6_lla)
self.logger.info(self.router.ip6_thread_ula)
self.network_data.xpanid = self.router.xpanid
self.network_data.panid = self.router.panid
for end_node in self.joiner_list[:-1]:
end_node.join(self.network_data, "sleepy-end-device")
end_node.set_sleep_poll_interval(100)
self.wait_for_completion([end_node])
self.wait_for_completion(self.device_list)
# The last one will join in as end-device
self.joiner_list[-1].join(self.network_data, "end-node")
self.wait_for_completion(self.device_list)
for end_node in self.joiner_list:
self.logger.info(end_node.ip6_lla)
self.logger.info(end_node.ip6_thread_ula)
ret = self.router.wpanctl("get", "status", 2)
print(ret)
for end_node in self.joiner_list:
ret = end_node.wpanctl("get", "status", 2)
print(ret)
@testcase.test_method_decorator
def test02_Verify_ChildTable(self):
child_table = self.router.wpanctl("get", "get " + wpan.WPAN_THREAD_CHILD_TABLE, 2)
child_table = wpan_table_parser.parse_child_table_result(child_table)
print(child_table)
self.assertEqual(len(child_table), len(self.joiner_list))
counter = 0
for i, child in enumerate(self.joiner_list):
ext_addr = child.getprop(wpan.WPAN_EXT_ADDRESS)[1:-1]
for entry in child_table:
if entry.ext_address == ext_addr:
self.assertEqual(int(entry.rloc16, 16), int(child.getprop(wpan.WPAN_THREAD_RLOC16), 16))
self.assertEqual(int(entry.timeout), int(child.getprop(wpan.WPAN_THREAD_CHILD_TIMEOUT)))
if i == len(self.joiner_list) - 1:
self.assertTrue(entry.is_ftd())
self.assertTrue(entry.is_rx_on_when_idle())
else:
self.assertFalse(entry.is_ftd())
self.assertFalse(entry.is_rx_on_when_idle())
counter += 1
missing_entry = len(self.joiner_list) - counter
self.assertEqual(missing_entry, 0, "Missing {} child entry in Child table".format(str(missing_entry)))
@testcase.test_method_decorator
def test03_Verify_ChildTableAddress(self):
child_addr_table = self.router.wpanctl("get", "get " + wpan.WPAN_THREAD_CHILD_TABLE_ADDRESSES, 2)
child_addr_table = wpan_table_parser.parse_child_table_address_result(child_addr_table)
print(child_addr_table)
self.assertEqual(len(child_addr_table), len(self.joiner_list))
counter = 0
for child in self.joiner_list:
ext_addr = child.getprop(wpan.WPAN_EXT_ADDRESS)[1:-1]
for entry in child_addr_table:
if entry.ext_address == ext_addr:
self.assertEqual(int(entry.rloc16, 16), int(child.getprop(wpan.WPAN_THREAD_RLOC16), 16))
counter += 1
missing_entry = len(self.joiner_list) - counter
self.assertEqual(missing_entry, 0, "Missing {} child entry in Child table".format(str(missing_entry)))
@testcase.test_method_decorator
def test04_Verify_ChildTable_AsValMap(self):
child_table = self.router.wpanctl("get", "get " + wpan.WPAN_THREAD_CHILD_TABLE_ASVALMAP, 2)
print(child_table)
total_child_table_entry = len(self.joiner_list)
for item in CHILD_TABLE_AS_VALMAP_ENTRY:
self.assertEqual(child_table.count(item), total_child_table_entry)
if __name__ == "__main__":
unittest.main()
| apache-2.0 | 7,680,028,345,725,509,000 | 33.658031 | 119 | 0.622066 | false |
frink182/stevostat | pir.py | 1 | 1322 | #!/usr/bin/env python
from time import sleep
from time import strftime
import RPi.GPIO as GPIO
import os
import paho.mqtt.publish as publish
import paho.mqtt.client as mqtt
from datetime import datetime
PIR=26
SCREEN_TIMEOUT=300
SCREEN='/sys/class/backlight/rpi_backlight/bl_power'
ON=0
OFF=1
TOPIC="presence/PIR"
GPIO.setmode(GPIO.BCM)
GPIO.setup(PIR, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
def my_callback(channel):
sleep(0.5) # confirm the movement by waiting 0.5 sec
if GPIO.input(PIR): # and check again the input
publishMqtt()
screenOn()
# stop detection for a while
GPIO.remove_event_detect(PIR)
sleep(60)
GPIO.add_event_detect(PIR, GPIO.RISING, callback=my_callback, bouncetime=300)
GPIO.add_event_detect(PIR, GPIO.RISING, callback=my_callback, bouncetime=300)
def publishMqtt():
message = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
publish.single(TOPIC, message, qos=0, retain=True, hostname="slug")
def screenOn():
toggleScreen(ON)
def screenOff():
toggleScreen(OFF)
def toggleScreen(value):
file = open(SCREEN,'r')
current_status = int(file.read(1))
if current_status != value:
# print strftime("%d %b %H:%M:%S") + " toggle screen to " + str(value)
os.system("echo " + str(value) + " > " + SCREEN)
# you can continue doing other stuff here
while True:
sleep(60)
| gpl-2.0 | -4,928,873,503,737,569,000 | 23.036364 | 79 | 0.712557 | false |
rane-hs/fabric-py3 | tests/Python26SocketServer.py | 1 | 22074 | """Generic socket server classes.
This module tries to capture the various aspects of defining a server:
For socket-based servers:
- address family:
- AF_INET{,6}: IP (Internet Protocol) sockets (default)
- AF_UNIX: Unix domain sockets
- others, e.g. AF_DECNET are conceivable (see <socket.h>
- socket type:
- SOCK_STREAM (reliable stream, e.g. TCP)
- SOCK_DGRAM (datagrams, e.g. UDP)
For request-based servers (including socket-based):
- client address verification before further looking at the request
(This is actually a hook for any processing that needs to look
at the request before anything else, e.g. logging)
- how to handle multiple requests:
- synchronous (one request is handled at a time)
- forking (each request is handled by a new process)
- threading (each request is handled by a new thread)
The classes in this module favor the server type that is simplest to
write: a synchronous TCP/IP server. This is bad class design, but
save some typing. (There's also the issue that a deep class hierarchy
slows down method lookups.)
There are five classes in an inheritance diagram, four of which represent
synchronous servers of four types:
+------------+
| BaseServer |
+------------+
|
v
+-----------+ +------------------+
| TCPServer |------->| UnixStreamServer |
+-----------+ +------------------+
|
v
+-----------+ +--------------------+
| UDPServer |------->| UnixDatagramServer |
+-----------+ +--------------------+
Note that UnixDatagramServer derives from UDPServer, not from
UnixStreamServer -- the only difference between an IP and a Unix
stream server is the address family, which is simply repeated in both
unix server classes.
Forking and threading versions of each type of server can be created
using the ForkingMixIn and ThreadingMixIn mix-in classes. For
instance, a threading UDP server class is created as follows:
class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
The Mix-in class must come first, since it overrides a method defined
in UDPServer! Setting the various member variables also changes
the behavior of the underlying server mechanism.
To implement a service, you must derive a class from
BaseRequestHandler and redefine its handle() method. You can then run
various versions of the service by combining one of the server classes
with your request handler class.
The request handler class must be different for datagram or stream
services. This can be hidden by using the request handler
subclasses StreamRequestHandler or DatagramRequestHandler.
Of course, you still have to use your head!
For instance, it makes no sense to use a forking server if the service
contains state in memory that can be modified by requests (since the
modifications in the child process would never reach the initial state
kept in the parent process and passed to each child). In this case,
you can use a threading server, but you will probably have to use
locks to avoid two requests that come in nearly simultaneous to apply
conflicting changes to the server state.
On the other hand, if you are building e.g. an HTTP server, where all
data is stored externally (e.g. in the file system), a synchronous
class will essentially render the service "deaf" while one request is
being handled -- which may be for a very long time if a client is slow
to reqd all the data it has requested. Here a threading or forking
server is appropriate.
In some cases, it may be appropriate to process part of a request
synchronously, but to finish processing in a forked child depending on
the request data. This can be implemented by using a synchronous
server and doing an explicit fork in the request handler class
handle() method.
Another approach to handling multiple simultaneous requests in an
environment that supports neither threads nor fork (or where these are
too expensive or inappropriate for the service) is to maintain an
explicit table of partially finished requests and to use select() to
decide which request to work on next (or whether to handle a new
incoming request). This is particularly important for stream services
where each client can potentially be connected for a long time (if
threads or subprocesses cannot be used).
Future work:
- Standard classes for Sun RPC (which uses either UDP or TCP)
- Standard mix-in classes to implement various authentication
and encryption schemes
- Standard framework for select-based multiplexing
XXX Open problems:
- What to do with out-of-band data?
BaseServer:
- split generic "request" functionality out into BaseServer class.
Copyright (C) 2000 Luke Kenneth Casson Leighton <[email protected]>
example: read entries from a SQL database (requires overriding
get_request() to return a table entry from the database).
entry is processed by a RequestHandlerClass.
"""
# This file copyright (c) 2001-2015 Python Software Foundation; All Rights Reserved
# Author of the BaseServer patch: Luke Kenneth Casson Leighton
# XXX Warning!
# There is a test suite for this module, but it cannot be run by the
# standard regression test.
# To run it manually, run Lib/test/test_socketserver.py.
__version__ = "0.4"
import socket
import select
import sys
import os
try:
import threading
except ImportError:
import dummy_threading as threading
__all__ = ["TCPServer", "UDPServer", "ForkingUDPServer", "ForkingTCPServer",
"ThreadingUDPServer", "ThreadingTCPServer", "BaseRequestHandler",
"StreamRequestHandler", "DatagramRequestHandler",
"ThreadingMixIn", "ForkingMixIn"]
if hasattr(socket, "AF_UNIX"):
__all__.extend(["UnixStreamServer", "UnixDatagramServer",
"ThreadingUnixStreamServer",
"ThreadingUnixDatagramServer"])
class BaseServer:
"""Base class for server classes.
Methods for the caller:
- __init__(server_address, RequestHandlerClass)
- serve_forever(poll_interval=0.5)
- shutdown()
- handle_request() # if you do not use serve_forever()
- fileno() -> int # for select()
Methods that may be overridden:
- server_bind()
- server_activate()
- get_request() -> request, client_address
- handle_timeout()
- verify_request(request, client_address)
- server_close()
- process_request(request, client_address)
- close_request(request)
- handle_error()
Methods for derived classes:
- finish_request(request, client_address)
Class variables that may be overridden by derived classes or
instances:
- timeout
- address_family
- socket_type
- allow_reuse_address
Instance variables:
- RequestHandlerClass
- socket
"""
timeout = None
def __init__(self, server_address, RequestHandlerClass):
"""Constructor. May be extended, do not override."""
self.server_address = server_address
self.RequestHandlerClass = RequestHandlerClass
self.__is_shut_down = threading.Event()
self.__serving = False
def server_activate(self):
"""Called by constructor to activate the server.
May be overridden.
"""
pass
def serve_forever(self, poll_interval=0.5):
"""Handle one request at a time until shutdown.
Polls for shutdown every poll_interval seconds. Ignores
self.timeout. If you need to do periodic tasks, do them in
another thread.
"""
self.__serving = True
self.__is_shut_down.clear()
while self.__serving:
# XXX: Consider using another file descriptor or
# connecting to the socket to wake this up instead of
# polling. Polling reduces our responsiveness to a
# shutdown request and wastes cpu at all other times.
r, w, e = select.select([self], [], [], poll_interval)
if r:
self._handle_request_noblock()
self.__is_shut_down.set()
def shutdown(self):
"""Stops the serve_forever loop.
Blocks until the loop has finished. This must be called while
serve_forever() is running in another thread, or it will
deadlock.
"""
self.__serving = False
self.__is_shut_down.wait()
# The distinction between handling, getting, processing and
# finishing a request is fairly arbitrary. Remember:
#
# - handle_request() is the top-level call. It calls
# select, get_request(), verify_request() and process_request()
# - get_request() is different for stream or datagram sockets
# - process_request() is the place that may fork a new process
# or create a new thread to finish the request
# - finish_request() instantiates the request handler class;
# this constructor will handle the request all by itself
def handle_request(self):
"""Handle one request, possibly blocking.
Respects self.timeout.
"""
# Support people who used socket.settimeout() to escape
# handle_request before self.timeout was available.
timeout = self.socket.gettimeout()
if timeout is None:
timeout = self.timeout
elif self.timeout is not None:
timeout = min(timeout, self.timeout)
fd_sets = select.select([self], [], [], timeout)
if not fd_sets[0]:
self.handle_timeout()
return
self._handle_request_noblock()
def _handle_request_noblock(self):
"""Handle one request, without blocking.
I assume that select.select has returned that the socket is
readable before this function was called, so there should be
no risk of blocking in get_request().
"""
try:
request, client_address = self.get_request()
except socket.error:
return
if self.verify_request(request, client_address):
try:
self.process_request(request, client_address)
except:
self.handle_error(request, client_address)
self.close_request(request)
def handle_timeout(self):
"""Called if no new request arrives within self.timeout.
Overridden by ForkingMixIn.
"""
pass
def verify_request(self, request, client_address):
"""Verify the request. May be overridden.
Return True if we should proceed with this request.
"""
return True
def process_request(self, request, client_address):
"""Call finish_request.
Overridden by ForkingMixIn and ThreadingMixIn.
"""
self.finish_request(request, client_address)
self.close_request(request)
def server_close(self):
"""Called to clean-up the server.
May be overridden.
"""
pass
def finish_request(self, request, client_address):
"""Finish one request by instantiating RequestHandlerClass."""
self.RequestHandlerClass(request, client_address, self)
def close_request(self, request):
"""Called to clean up an individual request."""
pass
def handle_error(self, request, client_address):
"""Handle an error gracefully. May be overridden.
The default is to print a traceback and continue.
"""
print(('-' * 40))
print(('Exception happened during processing of request from %s' % (client_address,)))
import traceback
traceback.print_exc() # XXX But this goes to stderr!
print(('-' * 40))
class TCPServer(BaseServer):
"""Base class for various socket-based server classes.
Defaults to synchronous IP stream (i.e., TCP).
Methods for the caller:
- __init__(server_address, RequestHandlerClass, bind_and_activate=True)
- serve_forever(poll_interval=0.5)
- shutdown()
- handle_request() # if you don't use serve_forever()
- fileno() -> int # for select()
Methods that may be overridden:
- server_bind()
- server_activate()
- get_request() -> request, client_address
- handle_timeout()
- verify_request(request, client_address)
- process_request(request, client_address)
- close_request(request)
- handle_error()
Methods for derived classes:
- finish_request(request, client_address)
Class variables that may be overridden by derived classes or
instances:
- timeout
- address_family
- socket_type
- request_queue_size (only for stream sockets)
- allow_reuse_address
Instance variables:
- server_address
- RequestHandlerClass
- socket
"""
address_family = socket.AF_INET
socket_type = socket.SOCK_STREAM
request_queue_size = 5
allow_reuse_address = False
def __init__(self, server_address, RequestHandlerClass,
bind_and_activate=True):
"""Constructor. May be extended, do not override."""
BaseServer.__init__(self, server_address, RequestHandlerClass)
self.socket = socket.socket(self.address_family,
self.socket_type)
if bind_and_activate:
self.server_bind()
self.server_activate()
def server_bind(self):
"""Called by constructor to bind the socket.
May be overridden.
"""
if self.allow_reuse_address:
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(self.server_address)
self.server_address = self.socket.getsockname()
def server_activate(self):
"""Called by constructor to activate the server.
May be overridden.
"""
self.socket.listen(self.request_queue_size)
def server_close(self):
"""Called to clean-up the server.
May be overridden.
"""
self.socket.close()
def fileno(self):
"""Return socket file number.
Interface required by select().
"""
return self.socket.fileno()
def get_request(self):
"""Get the request and client address from the socket.
May be overridden.
"""
return self.socket.accept()
def close_request(self, request):
"""Called to clean up an individual request."""
request.close()
class UDPServer(TCPServer):
"""UDP server class."""
allow_reuse_address = False
socket_type = socket.SOCK_DGRAM
max_packet_size = 8192
def get_request(self):
data, client_addr = self.socket.recvfrom(self.max_packet_size)
return (data, self.socket), client_addr
def server_activate(self):
# No need to call listen() for UDP.
pass
def close_request(self, request):
# No need to close anything.
pass
class ForkingMixIn:
"""Mix-in class to handle each request in a new process."""
timeout = 300
active_children = None
max_children = 40
def collect_children(self):
"""Internal routine to wait for children that have exited."""
if self.active_children is None:
return
while len(self.active_children) >= self.max_children:
# XXX: This will wait for any child process, not just ones
# spawned by this library. This could confuse other
# libraries that expect to be able to wait for their own
# children.
try:
pid, status = os.waitpid(0, 0)
except os.error:
pid = None
if pid not in self.active_children:
continue
self.active_children.remove(pid)
# XXX: This loop runs more system calls than it ought
# to. There should be a way to put the active_children into a
# process group and then use os.waitpid(-pgid) to wait for any
# of that set, but I couldn't find a way to allocate pgids
# that couldn't collide.
for child in self.active_children:
try:
pid, status = os.waitpid(child, os.WNOHANG)
except os.error:
pid = None
if not pid:
continue
try:
self.active_children.remove(pid)
except ValueError as e:
raise ValueError('%s. x=%d and list=%r' % \
(e.message, pid, self.active_children))
def handle_timeout(self):
"""Wait for zombies after self.timeout seconds of inactivity.
May be extended, do not override.
"""
self.collect_children()
def process_request(self, request, client_address):
"""Fork a new subprocess to process the request."""
self.collect_children()
pid = os.fork()
if pid:
# Parent process
if self.active_children is None:
self.active_children = []
self.active_children.append(pid)
self.close_request(request)
return
else:
# Child process.
# This must never return, hence os._exit()!
try:
self.finish_request(request, client_address)
os._exit(0)
except:
try:
self.handle_error(request, client_address)
finally:
os._exit(1)
class ThreadingMixIn:
"""Mix-in class to handle each request in a new thread."""
# Decides how threads will act upon termination of the
# main process
daemon_threads = False
def process_request_thread(self, request, client_address):
"""Same as in BaseServer but as a thread.
In addition, exception handling is done here.
"""
try:
self.finish_request(request, client_address)
self.close_request(request)
except:
self.handle_error(request, client_address)
self.close_request(request)
def process_request(self, request, client_address):
"""Start a new thread to process the request."""
t = threading.Thread(target=self.process_request_thread,
args=(request, client_address))
if self.daemon_threads:
t.setDaemon(1)
t.start()
class ForkingUDPServer(ForkingMixIn, UDPServer):
pass
class ForkingTCPServer(ForkingMixIn, TCPServer):
pass
class ThreadingUDPServer(ThreadingMixIn, UDPServer):
pass
class ThreadingTCPServer(ThreadingMixIn, TCPServer):
pass
if hasattr(socket, 'AF_UNIX'):
class UnixStreamServer(TCPServer):
address_family = socket.AF_UNIX
class UnixDatagramServer(UDPServer):
address_family = socket.AF_UNIX
class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer):
pass
class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer):
pass
class BaseRequestHandler:
"""Base class for request handler classes.
This class is instantiated for each request to be handled. The
constructor sets the instance variables request, client_address
and server, and then calls the handle() method. To implement a
specific service, all you need to do is to derive a class which
defines a handle() method.
The handle() method can find the request as self.request, the
client address as self.client_address, and the server (in case it
needs access to per-server information) as self.server. Since a
separate instance is created for each request, the handle() method
can define arbitrary other instance variariables.
"""
def __init__(self, request, client_address, server):
self.request = request
self.client_address = client_address
self.server = server
try:
self.setup()
self.handle()
self.finish()
finally:
sys.exc_info()[2] = None # Help garbage collection
def setup(self):
pass
def handle(self):
pass
def finish(self):
pass
# The following two classes make it possible to use the same service
# class for stream or datagram servers.
# Each class sets up these instance variables:
# - rfile: a file object from which receives the request is read
# - wfile: a file object to which the reply is written
# When the handle() method returns, wfile is flushed properly
class StreamRequestHandler(BaseRequestHandler):
"""Define self.rfile and self.wfile for stream sockets."""
# Default buffer sizes for rfile, wfile.
# We default rfile to buffered because otherwise it could be
# really slow for large data (a getc() call per byte); we make
# wfile unbuffered because (a) often after a write() we want to
# read and we need to flush the line; (b) big writes to unbuffered
# files are typically optimized by stdio even when big reads
# aren't.
rbufsize = -1
wbufsize = 0
def setup(self):
self.connection = self.request
self.rfile = self.connection.makefile('rb', self.rbufsize)
self.wfile = self.connection.makefile('wb', self.wbufsize)
def finish(self):
if not self.wfile.closed:
self.wfile.flush()
self.wfile.close()
self.rfile.close()
class DatagramRequestHandler(BaseRequestHandler):
# XXX Regrettably, I cannot get this working on Linux;
# s.recvfrom() doesn't return a meaningful client address.
"""Define self.rfile and self.wfile for datagram sockets."""
def setup(self):
try:
from io import StringIO
except ImportError:
from io import StringIO
self.packet, self.socket = self.request
self.rfile = StringIO(self.packet)
self.wfile = StringIO()
def finish(self):
self.socket.sendto(self.wfile.getvalue(), self.client_address)
| bsd-2-clause | 8,849,255,045,800,621,000 | 30.670014 | 94 | 0.642339 | false |
StefGou/Kijiji-Repost-Headless | kijiji_repost_headless/kijiji_cmd.py | 1 | 4487 | import argparse
import os
import sys
from time import sleep
import kijiji_api
if sys.version_info < (3, 0):
raise Exception("This program requires Python 3.0 or greater")
def main():
##Start here
#Takes: config(user/pass)
#One of:
#post adPostingFile
#show
#delete adId
#show
#repost adPostingFile
parser = argparse.ArgumentParser(
description="Post ads on Kijiji")
parser.add_argument('-u', '--username', help='username of your kijiji account')
parser.add_argument('-p', '--password', help='password of your kijiji account')
subparsers = parser.add_subparsers(help ='sub-command help')
postParser = subparsers.add_parser('post', help='post a new ad')
postParser.add_argument('inf_file', type=str, help='.inf file containing posting details')
postParser.set_defaults(function=post_ad)
folderParser = subparsers.add_parser('folder', help='post ad from folder')
folderParser.add_argument('folderName', type=str, help='folder containing ad details')
folderParser.set_defaults(function=post_folder)
repostFolderParser = subparsers.add_parser('repost_folder', help='post ad from folder')
repostFolderParser.add_argument('folderName', type=str, help='folder containing ad details')
repostFolderParser.set_defaults(function=repost_folder)
showParser = subparsers.add_parser('show', help='show currently listed ads')
showParser.set_defaults(function=show_ads)
deleteParser = subparsers.add_parser('delete', help='delete a listed ad')
deleteParser.add_argument('id',type=str, help='id of the ad you wish to delete')
deleteParser.set_defaults(function=delete_ad)
nukeParser = subparsers.add_parser('nuke', help='delete all ads')
nukeParser.set_defaults(function=nuke)
repostParser = subparsers.add_parser('repost', help='repost an existing ad')
repostParser.add_argument('inf_file', type = str,help = '.inf file containing posting details')
repostParser.set_defaults(function=repost_ad)
args = parser.parse_args()
#try:
args.function(args)
#except AttributeError as err:
# print(err)
# parser.print_help()
#HELPER FUNCTIONS
def get_folder_data(args):
args.inf_file = "item.inf"
cred_file = args.folderName+"/login.inf"
f = open(cred_file, 'r')
creds = [line.strip() for line in f]
args.username = creds[0]
args.password = creds[1]
def get_inf_details(inf_file):
with open(inf_file, 'rt') as infFileLines:
data = {key: val for line in infFileLines for (key, val) in (line.strip().split("="),)}
files = [open(picture, 'rb').read() for picture in data['imageCsv'].split(",")]
return [data, files]
##Actual Functions called from main
def post_folder(args):
get_folder_data(args)
os.chdir(args.folderName)
post_ad(args)
def post_ad(args):
[data, imageFiles] = get_inf_details(args.inf_file)
api = kijiji_api.KijijiApi()
api.login(args.username, args.password)
api.post_ad_using_data(data, imageFiles)
def show_ads(args):
api = kijiji_api.KijijiApi()
api.login(args.username, args.password)
[print("{} '{}'".format(adId, adName)) for adName, adId in api.get_all_ads()]
def delete_ad(args):
api = kijiji_api.KijijiApi()
api.login(args.username, args.password)
api.delete_ad(args.id)
def delete_ad_using_title(name):
api = kijiji_api.KijijiApi()
api.delete_ad_using_title(name)
#Try to delete ad with same name if possible
#post new ad
def repost_ad(args):
api = kijiji_api.KijijiApi()
api.login(args.username, args.password)
delAdName = ""
for line in open(args.inf_file, 'rt'):
[key, val] = line.strip().rstrip("\n").split("=")
if key =='postAdForm.title':
delAdName = val
try:
api.delete_ad_using_title(delAdName)
print("Existing ad deleted before reposting")
except kijiji_api.DeleteAdException:
print("Did not find an existing ad with matching title, skipping ad deletion")
pass
# Must wait a bit before posting the same ad even after deleting it, otherwise Kijiji will automatically remove it
sleep(180)
post_ad(args)
def repost_folder(args):
get_folder_data(args)
os.chdir(args.folderName)
repost_ad(args)
def nuke(args):
api = kijiji_api.KijijiApi()
api.login(args.username, args.password)
allAds = api.get_all_ads()
[api.delete_ad(adId) for adName, adId in allAds]
if __name__ == "__main__":
main()
| mit | -5,459,380,662,855,144,000 | 32.485075 | 118 | 0.682416 | false |
Ophiuchus1312/enigma2-master | Navigation.py | 1 | 7217 | from enigma import eServiceCenter, eServiceReference, eTimer, pNavigation, getBestPlayableServiceReference, iPlayableService
from Components.ParentalControl import parentalControl
from Components.config import config
from Tools.BoundFunction import boundFunction
from Tools.StbHardware import setFPWakeuptime, getFPWakeuptime, getFPWasTimerWakeup
from time import time
import RecordTimer
import PowerTimer
import Screens.Standby
import NavigationInstance
import ServiceReference
from Screens.InfoBar import InfoBar, MoviePlayer
from os import path
# TODO: remove pNavgation, eNavigation and rewrite this stuff in python.
class Navigation:
def __init__(self, nextRecordTimerAfterEventActionAuto=False, nextPowerManagerAfterEventActionAuto=False):
if NavigationInstance.instance is not None:
raise NavigationInstance.instance
NavigationInstance.instance = self
self.ServiceHandler = eServiceCenter.getInstance()
import Navigation as Nav
Nav.navcore = self
self.pnav = pNavigation()
self.pnav.m_event.get().append(self.dispatchEvent)
self.pnav.m_record_event.get().append(self.dispatchRecordEvent)
self.event = [ ]
self.record_event = [ ]
self.currentlyPlayingServiceReference = None
self.currentlyPlayingServiceOrGroup = None
self.currentlyPlayingService = None
self.RecordTimer = RecordTimer.RecordTimer()
self.PowerTimer = PowerTimer.PowerTimer()
self.__wasTimerWakeup = False
self.__wasRecTimerWakeup = False
self.__wasPowerTimerWakeup = False
if getFPWasTimerWakeup():
self.__wasTimerWakeup = True
if nextRecordTimerAfterEventActionAuto and abs(self.RecordTimer.getNextRecordingTime() - time()) <= 360:
self.__wasRecTimerWakeup = True
print 'RECTIMER: wakeup to standby detected.'
f = open("/tmp/was_rectimer_wakeup", "w")
f.write('1')
f.close()
# as we woke the box to record, place the box in standby.
self.standbytimer = eTimer()
self.standbytimer.callback.append(self.gotostandby)
self.standbytimer.start(15000, True)
elif nextPowerManagerAfterEventActionAuto:
self.__wasPowerTimerWakeup = True
print 'POWERTIMER: wakeup to standby detected.'
f = open("/tmp/was_powertimer_wakeup", "w")
f.write('1')
f.close()
# as a PowerTimer WakeToStandby was actiond to it.
self.standbytimer = eTimer()
self.standbytimer.callback.append(self.gotostandby)
self.standbytimer.start(15000, True)
def wasTimerWakeup(self):
return self.__wasTimerWakeup
def wasRecTimerWakeup(self):
return self.__wasRecTimerWakeup
def wasPowerTimerWakeup(self):
return self.__wasPowerTimerWakeup
def gotostandby(self):
print 'TIMER: now entering standby'
from Tools import Notifications
Notifications.AddNotification(Screens.Standby.Standby)
def dispatchEvent(self, i):
for x in self.event:
x(i)
if i == iPlayableService.evEnd:
self.currentlyPlayingServiceReference = None
self.currentlyPlayingServiceOrGroup = None
self.currentlyPlayingService = None
def dispatchRecordEvent(self, rec_service, event):
# print "record_event", rec_service, event
for x in self.record_event:
x(rec_service, event)
def playService(self, ref, checkParentalControl = True, forceRestart = False):
oldref = self.currentlyPlayingServiceReference
if ref and oldref and ref == oldref and not forceRestart:
print "ignore request to play already running service(1)"
return 0
print "playing", ref and ref.toString()
if path.exists("/proc/stb/lcd/symbol_signal") and config.lcd.mode.getValue() == '1':
try:
if ref.toString().find('0:0:0:0:0:0:0:0:0') == -1:
signal = 1
else:
signal = 0
f = open("/proc/stb/lcd/symbol_signal", "w")
f.write(str(signal))
f.close()
except:
f = open("/proc/stb/lcd/symbol_signal", "w")
f.write("0")
f.close()
elif path.exists("/proc/stb/lcd/symbol_signal") and config.lcd.mode.getValue() == '0':
f = open("/proc/stb/lcd/symbol_signal", "w")
f.write("0")
f.close()
if ref is None:
self.stopService()
return 0
InfoBarInstance = InfoBar.instance
if not checkParentalControl or parentalControl.isServicePlayable(ref, boundFunction(self.playService, checkParentalControl = False)):
if ref.flags & eServiceReference.isGroup:
if not oldref:
oldref = eServiceReference()
playref = getBestPlayableServiceReference(ref, oldref)
print "playref", playref
if playref and oldref and playref == oldref and not forceRestart:
print "ignore request to play already running service(2)"
return 0
if not playref or (checkParentalControl and not parentalControl.isServicePlayable(playref, boundFunction(self.playService, checkParentalControl = False))):
self.stopService()
return 0
else:
playref = ref
if self.pnav:
self.pnav.stopService()
self.currentlyPlayingServiceReference = playref
self.currentlyPlayingServiceOrGroup = ref
if InfoBarInstance is not None:
InfoBarInstance.servicelist.servicelist.setCurrent(ref)
if self.pnav.playService(playref):
print "Failed to start", playref
self.currentlyPlayingServiceReference = None
self.currentlyPlayingServiceOrGroup = None
return 0
elif oldref:
InfoBarInstance.servicelist.servicelist.setCurrent(oldref)
return 1
def getCurrentlyPlayingServiceReference(self):
return self.currentlyPlayingServiceReference
def getCurrentlyPlayingServiceOrGroup(self):
return self.currentlyPlayingServiceOrGroup
def isMovieplayerActive(self):
MoviePlayerInstance = MoviePlayer.instance
if MoviePlayerInstance is not None and self.currentlyPlayingServiceReference.toString().find('0:0:0:0:0:0:0:0:0') != -1:
from Screens.InfoBarGenerics import setResumePoint
setResumePoint(MoviePlayer.instance.session)
MoviePlayerInstance.close()
def recordService(self, ref, simulate=False):
service = None
if not simulate: print "recording service: %s" % (str(ref))
if isinstance(ref, ServiceReference.ServiceReference):
ref = ref.ref
if ref:
if ref.flags & eServiceReference.isGroup:
ref = getBestPlayableServiceReference(ref, eServiceReference(), simulate)
service = ref and self.pnav and self.pnav.recordService(ref, simulate)
if service is None:
print "record returned non-zero"
return service
def stopRecordService(self, service):
ret = self.pnav and self.pnav.stopRecordService(service)
return ret
def getRecordings(self, simulate=False):
return self.pnav and self.pnav.getRecordings(simulate)
def getCurrentService(self):
if not self.currentlyPlayingService:
self.currentlyPlayingService = self.pnav and self.pnav.getCurrentService()
return self.currentlyPlayingService
def stopService(self):
if self.pnav:
self.pnav.stopService()
self.currentlyPlayingServiceReference = None
self.currentlyPlayingServiceOrGroup = None
if path.exists("/proc/stb/lcd/symbol_signal"):
f = open("/proc/stb/lcd/symbol_signal", "w")
f.write("0")
f.close()
def pause(self, p):
return self.pnav and self.pnav.pause(p)
def shutdown(self):
self.RecordTimer.shutdown()
self.PowerTimer.shutdown()
self.ServiceHandler = None
self.pnav = None
def stopUserServices(self):
self.stopService()
| gpl-2.0 | 6,904,893,535,751,126,000 | 34.033981 | 159 | 0.749758 | false |
john5223/bottle-auth | auth/controllers/user_controller.py | 1 | 2915 |
import logging
logger = logging.getLogger(__name__)
from bottle import route, get, post, delete
from bottle import request, response
def error(code, message):
response.status = code
message['status'] = code
return message
get_user_table = lambda db: db.get_table('users', primary_id='userid', primary_type='String(100)')
@get('/users/<userid>')
def get_user(db, userid):
user_table = get_user_table(db)
user = user_table.find_one(userid=userid)
if not user:
return error(404, {'error': 'Not a valid user'})
else:
group_table = db.get_table('groups')
groups = group_table.distinct('name', userid=userid)
user['groups'] =sorted( [x['name'] for x in groups] )
return user
@delete('/users/<userid>')
def delete_user(db, userid):
user_table = get_user_table(db)
user = user_table.find_one(userid=userid)
if not user:
return error(404, {'error': 'userid not found'})
else:
user_table.delete(userid=userid)
return {'status': 200}
@route('/users/<userid>', method=['POST', 'PUT'])
def create_update_user(db, userid):
data = request.json
data_keys = data.keys()
required_fields = ['first_name', 'last_name', 'userid', 'groups']
missing_fields = [x for x in required_fields if x not in data_keys]
extra_fields = [x for x in data_keys if x not in required_fields]
if missing_fields:
return error(400, {'error': 'Missing fields (%s)' % (','.join(missing_fields)) })
if extra_fields:
return error(400, {'error': 'Extra fields (%s)' % (','.join(extra_fields)) })
user_table = get_user_table(db)
existing_user = user_table.find_one(userid=data['userid'])
if request.method == 'POST' and existing_user:
return error(409, {'error': 'User already exists'})
if request.method == 'PUT' and not existing_user:
return error(404, {'error': 'User does not exist'})
#update this user's group membership
userid = data.get('userid')
groups = data.pop('groups')
groups_table = db.get_table('groups')
if request.method == 'POST':
user_insert = user_table.insert(data)
elif request.method == 'PUT':
user_update = user_table.update(data, ['userid'])
for name in groups:
groups_table.upsert(dict(name=name, userid=userid), ['name','userid'])
if request.method == 'PUT':
#get rid of any old groups for this user
params = {}
for counter, group in enumerate(groups,1):
params["group_name" + str(counter)] = group
counter += 1
where_clause = 'name NOT IN(:' + ",:".join(params.keys()) + ')' # b/c sqlalchemy can't use a list!?
params['userid'] = userid
q = '''DELETE FROM groups WHERE userid=:userid AND ''' + where_clause
db.executable.execute(q, params)
return {'status': 200, 'user': get_user(db, userid)}
| gpl-2.0 | -8,540,600,454,075,682,000 | 34.120482 | 107 | 0.616467 | false |
ProfessorX/Config | .PyCharm30/system/python_stubs/-1247971765/PyKDE4/kdeui/KPixmapCache.py | 1 | 2878 | # encoding: utf-8
# module PyKDE4.kdeui
# from /usr/lib/python3/dist-packages/PyKDE4/kdeui.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyKDE4.kdecore as __PyKDE4_kdecore
import PyQt4.QtCore as __PyQt4_QtCore
import PyQt4.QtGui as __PyQt4_QtGui
import PyQt4.QtSvg as __PyQt4_QtSvg
class KPixmapCache(): # skipped bases: <class 'sip.wrapper'>
# no doc
def cacheLimit(self, *args, **kwargs): # real signature unknown
pass
def deleteCache(self, *args, **kwargs): # real signature unknown
pass
def discard(self, *args, **kwargs): # real signature unknown
pass
def ensureInited(self, *args, **kwargs): # real signature unknown
pass
def find(self, *args, **kwargs): # real signature unknown
pass
def insert(self, *args, **kwargs): # real signature unknown
pass
def isEnabled(self, *args, **kwargs): # real signature unknown
pass
def isValid(self, *args, **kwargs): # real signature unknown
pass
def loadCustomData(self, *args, **kwargs): # real signature unknown
pass
def loadCustomIndexHeader(self, *args, **kwargs): # real signature unknown
pass
def loadFromFile(self, *args, **kwargs): # real signature unknown
pass
def loadFromSvg(self, *args, **kwargs): # real signature unknown
pass
def recreateCacheFiles(self, *args, **kwargs): # real signature unknown
pass
def removeEntries(self, *args, **kwargs): # real signature unknown
pass
def removeEntryStrategy(self, *args, **kwargs): # real signature unknown
pass
def setCacheLimit(self, *args, **kwargs): # real signature unknown
pass
def setRemoveEntryStrategy(self, *args, **kwargs): # real signature unknown
pass
def setTimestamp(self, *args, **kwargs): # real signature unknown
pass
def setUseQPixmapCache(self, *args, **kwargs): # real signature unknown
pass
def setValid(self, *args, **kwargs): # real signature unknown
pass
def size(self, *args, **kwargs): # real signature unknown
pass
def timestamp(self, *args, **kwargs): # real signature unknown
pass
def useQPixmapCache(self, *args, **kwargs): # real signature unknown
pass
def writeCustomData(self, *args, **kwargs): # real signature unknown
pass
def writeCustomIndexHeader(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
RemoveLeastRecentlyUsed = 2
RemoveOldest = 0
RemoveSeldomUsed = 1
RemoveStrategy = None # (!) real value is ''
| gpl-2.0 | -1,374,738,902,669,333,000 | 26.941748 | 101 | 0.646977 | false |
EclipseXuLu/DataHouse | DataHouse/crawler/university_spider.py | 1 | 3941 | import requests
from bs4 import BeautifulSoup
from lxml import etree
import pandas as pd
from io import StringIO, BytesIO
university_list = []
class University():
def __init__(self, name='', is_985=False, is_211=False, has_institute=False, location='', orgnization='',
education_level='', education_type='', university_type=''):
self.name = name
self.is_985 = is_985
self.is_211 = is_211
self.has_institute = has_institute
self.location = location
self.orgnization = orgnization
self.education_level = education_level
self.education_type = education_type
self.university_type = university_type
def __str__(self):
return "{ " + str(self.name) + " ;" + str(self.is_985) + " ;" + str(self.is_211) + " ;" + str(
self.has_institute) + " ;" + self.location + " ;" + self.orgnization + " ;" + self.education_level + " ;" \
+ self.education_type + " ;" + self.university_type + " }"
def crawl(page_url):
headers = {
'Host': 'gaokao.chsi.com.cn',
'Upgrade-Insecure-Requests': '1',
'Referer': 'http://gaokao.chsi.com.cn/sch/search--ss-on,searchType-1,option-qg,start-0.dhtml',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/59.0.3071.115 Safari/537.36'
}
response = requests.get(page_url, timeout=20, headers=headers)
if response.status_code == 200:
html_raw = response.text
soup = BeautifulSoup(html_raw, 'html5lib')
parser = etree.HTMLParser()
tree = etree.parse(StringIO(html_raw), parser)
for tr in soup.find_all(bgcolor="#E1E1E1")[0].find_all('tr', attrs={'bgcolor': '#FFFFFF'}):
try:
name = tr.td.a.text.strip() # 大学名称
detail_url = 'http://gaokao.chsi.com.cn' + tr.td.a['href'] # 详情信息页面
is_985 = True if tr.td.find(class_='a211985 span985') is not None else False # 985
is_211 = True if tr.td.find(class_='a211985 span211') is not None else False # 211
has_institute = True if tr.td.find(class_='a211985 spanyan') is not None else False # 研究生院
location = tr.find_all('td')[1].get_text().strip() # 学校地址
orgnization = tr.find_all('td')[2].get_text().strip() # 所属机构
education_level = tr.find_all('td')[3].get_text().strip() # 学历层次
education_type = tr.find_all('td')[4].get_text().strip() # 办学类型
university_type = tr.find_all('td')[5].get_text().strip() # 院校类型
university = University(name, is_985, is_211, has_institute, location, orgnization, education_level,
education_type, university_type)
print(university)
university_list.append([name, is_985, is_211, has_institute, location, orgnization, education_level,
education_type, university_type])
except:
pass
else:
print('Error!!')
def output(some_list, filepath):
col = [
u'院校名称',
u'985',
u'211',
u'研究生院',
u'所在地',
u'院校隶属',
u'学历层次',
u'办学类型',
u'院校类型']
df = pd.DataFrame(some_list, columns=col)
df.to_excel(filepath, '大学', index=False)
if __name__ == '__main__':
page_urllist = ['http://gaokao.chsi.com.cn/sch/search--ss-on,searchType-1,option-qg,start-%d.dhtml'
% _ for _ in range(0, 2660, 20)]
# crawl('http://gaokao.chsi.com.cn/sch/search--ss-on,searchType-1,option-qg,start-0.dhtml')
for page_url in page_urllist:
crawl(page_url)
output(university_list, './大学.xlsx')
| mit | -7,579,131,018,589,852,000 | 40.423913 | 119 | 0.558908 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.