hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 11
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
251
| max_stars_repo_name
stringlengths 4
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
251
| max_issues_repo_name
stringlengths 4
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
251
| max_forks_repo_name
stringlengths 4
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.05M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.04M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
71ecbe30760d8ff6d5c97c8f6cb9ae037d64dc1b | 39,956 | py | Python | sc2/bot_ai.py | Lexa307/PhotonDefender | a08dc652e5c64e3ccb33b7cfa206846dca0575bd | [
"MIT"
] | 2 | 2019-07-17T13:00:32.000Z | 2019-07-17T13:09:30.000Z | sc2/bot_ai.py | Lexa307/PhotonDefender | a08dc652e5c64e3ccb33b7cfa206846dca0575bd | [
"MIT"
] | null | null | null | sc2/bot_ai.py | Lexa307/PhotonDefender | a08dc652e5c64e3ccb33b7cfa206846dca0575bd | [
"MIT"
] | null | null | null | import itertools
import logging
import math
import random
from collections import Counter
from typing import Any, Dict, List, Optional, Set, Tuple, Union # mypy type checking
from .cache import property_cache_forever, property_cache_once_per_frame
from .data import ActionResult, Alert, Race, Result, Target, race_gas, race_townhalls, race_worker
from .data import ActionResult, Attribute, Race, race_worker, race_townhalls, race_gas, Target, Result
from .game_data import AbilityData, GameData
# imports for mypy and pycharm autocomplete
from .game_state import GameState
from .game_data import GameData, AbilityData
from .ids.ability_id import AbilityId
from .ids.unit_typeid import UnitTypeId
from .ids.upgrade_id import UpgradeId
from .pixel_map import PixelMap
from .position import Point2, Point3
from .unit import Unit
from .units import Units
logger = logging.getLogger(__name__)
def _correct_zerg_supply(self):
""" The client incorrectly rounds zerg supply down instead of up (see
https://github.com/Blizzard/s2client-proto/issues/123), so self.supply_used
and friends return the wrong value when there are an odd number of zerglings
and banelings. This function corrects the bad values. """
# TODO: remove when Blizzard/sc2client-proto#123 gets fixed.
half_supply_units = {
UnitTypeId.ZERGLING,
UnitTypeId.ZERGLINGBURROWED,
UnitTypeId.BANELING,
UnitTypeId.BANELINGBURROWED,
UnitTypeId.BANELINGCOCOON,
}
correction = self.units(half_supply_units).amount % 2
self.supply_used += correction
self.supply_army += correction
self.supply_left -= correction
async def get_available_abilities(
self, units: Union[List[Unit], Units], ignore_resource_requirements=False
) -> List[List[AbilityId]]:
""" Returns available abilities of one or more units. Right know only checks cooldown, energy cost, and whether the ability has been researched.
Example usage:
units_abilities = await self.get_available_abilities(self.units)
or
units_abilities = await self.get_available_abilities([self.units.random]) """
return await self._client.query_available_abilities(units, ignore_resource_requirements)
def can_feed(self, unit_type: UnitTypeId) -> bool:
""" Checks if you have enough free supply to build the unit """
required = self._game_data.units[unit_type.value]._proto.food_required
return required == 0 or self.supply_left >= required
def can_afford(
self, item_id: Union[UnitTypeId, UpgradeId, AbilityId], check_supply_cost: bool = True
) -> "CanAffordWrapper":
"""Tests if the player has enough resources to build a unit or cast an ability."""
enough_supply = True
if isinstance(item_id, UnitTypeId):
unit = self._game_data.units[item_id.value]
cost = self._game_data.calculate_ability_cost(unit.creation_ability)
if check_supply_cost:
enough_supply = self.can_feed(item_id)
elif isinstance(item_id, UpgradeId):
cost = self._game_data.upgrades[item_id.value].cost
else:
cost = self._game_data.calculate_ability_cost(item_id)
return CanAffordWrapper(cost.minerals <= self.minerals, cost.vespene <= self.vespene, enough_supply)
def select_build_worker(self, pos: Union[Unit, Point2, Point3], force: bool = False) -> Optional[Unit]:
"""Select a worker to build a building with."""
workers = (
self.workers.filter(lambda w: (w.is_gathering or w.is_idle) and w.distance_to(pos) < 20) or self.workers
)
if workers:
for worker in workers.sorted_by_distance_to(pos).prefer_idle:
if (
not worker.orders
or len(worker.orders) == 1
and worker.orders[0].ability.id in {AbilityId.MOVE, AbilityId.HARVEST_GATHER}
):
return worker
return workers.random if force else None
def already_pending_upgrade(self, upgrade_type: UpgradeId) -> Union[int, float]:
""" Check if an upgrade is being researched
Return values:
0: not started
0 < x < 1: researching
1: finished
"""
assert isinstance(upgrade_type, UpgradeId)
if upgrade_type in self.state.upgrades:
return 1
level = None
if "LEVEL" in upgrade_type.name:
level = upgrade_type.name[-1]
creationAbilityID = self._game_data.upgrades[upgrade_type.value].research_ability.id
for structure in self.units.filter(lambda unit: unit.is_structure and unit.is_ready):
for order in structure.orders:
if order.ability.id is creationAbilityID:
if level and order.ability.button_name[-1] != level:
return 0
return order.progress
return 0
def already_pending(self, unit_type: Union[UpgradeId, UnitTypeId], all_units: bool = True) -> int:
"""
Returns a number of buildings or units already in progress, or if a
worker is en route to build it. This also includes queued orders for
workers and build queues of buildings.
If all_units==True, then build queues of other units (such as Carriers
(Interceptors) or Oracles (Stasis Ward)) are also included.
"""
# TODO / FIXME: SCV building a structure might be counted as two units
if isinstance(unit_type, UpgradeId):
return self.already_pending_upgrade(unit_type)
ability = self._game_data.units[unit_type.value].creation_ability
amount = len(self.units(unit_type).not_ready)
if all_units:
amount += sum([o.ability == ability for u in self.units for o in u.orders])
else:
amount += sum([o.ability == ability for w in self.workers for o in w.orders])
amount += sum([egg.orders[0].ability == ability for egg in self.units(UnitTypeId.EGG)])
return amount
async def build(self, building: UnitTypeId, near: Union[Point2, Point3], max_distance: int=20, unit: Optional[Unit]=None, random_alternative: bool=True, placement_step: int=2):
"""Build a building."""
if isinstance(near, Unit):
near = near.position.to2
elif near is not None:
near = near.to2
else:
return
p = await self.find_placement(building, near.rounded, max_distance, random_alternative, placement_step)
if p is None:
return ActionResult.CantFindPlacementLocation
unit = unit or self.select_build_worker(p)
if unit is None or not self.can_afford(building):
return ActionResult.Error
return await self.do(unit.build(building, p))
def prevent_double_actions(self, action):
# always add actions if queued
if action.queue:
return True
if action.unit.orders:
# action: UnitCommand
# current_action: UnitOrder
current_action = action.unit.orders[0]
if current_action.ability.id != action.ability:
# different action, return true
return True
try:
if current_action.target == action.target.tag:
# same action, remove action if same target unit
return False
except AttributeError:
pass
try:
if action.target.x == current_action.target.x and action.target.y == current_action.target.y:
# same action, remove action if same target position
return False
except AttributeError:
pass
return True
return True
# For the functions below, make sure you are inside the boundries of the map size.
def get_terrain_height(self, pos: Union[Point2, Point3, Unit]) -> int:
""" Returns terrain height at a position.
Caution: terrain height is different from a unit's z-coordinate.
"""
assert isinstance(pos, (Point2, Point3, Unit)), f"pos is not of type Point2, Point3 or Unit"
pos = pos.position.to2.rounded
return self._game_info.terrain_height[pos] # returns int
def get_terrain_z_height(self, pos: Union[Point2, Point3, Unit]) -> int:
""" Returns terrain z-height at a position. """
assert isinstance(pos, (Point2, Point3, Unit)), f"pos is not of type Point2, Point3 or Unit"
pos = pos.position.to2.rounded
return -16 + 32 * self._game_info.terrain_height[pos] / 255
def in_placement_grid(self, pos: Union[Point2, Point3, Unit]) -> bool:
""" Returns True if you can place something at a position.
Remember, buildings usually use 2x2, 3x3 or 5x5 of these grid points.
Caution: some x and y offset might be required, see ramp code:
https://github.com/Dentosal/python-sc2/blob/master/sc2/game_info.py#L17-L18 """
assert isinstance(pos, (Point2, Point3, Unit))
pos = pos.position.to2.rounded
return self._game_info.placement_grid[pos] == 1
def in_pathing_grid(self, pos: Union[Point2, Point3, Unit]) -> bool:
""" Returns True if a unit can pass through a grid point. """
assert isinstance(pos, (Point2, Point3, Unit))
pos = pos.position.to2.rounded
return self._game_info.pathing_grid[pos] == 1
def is_visible(self, pos: Union[Point2, Point3, Unit]) -> bool:
""" Returns True if you have vision on a grid point. """
# more info: https://github.com/Blizzard/s2client-proto/blob/9906df71d6909511907d8419b33acc1a3bd51ec0/s2clientprotocol/spatial.proto#L19
assert isinstance(pos, (Point2, Point3, Unit))
pos = pos.position.to2.rounded
return self.state.visibility[pos] == 2
def has_creep(self, pos: Union[Point2, Point3, Unit]) -> bool:
""" Returns True if there is creep on the grid point. """
assert isinstance(pos, (Point2, Point3, Unit))
pos = pos.position.to2.rounded
return self.state.creep[pos] == 1
def _prepare_start(self, client, player_id, game_info, game_data):
"""Ran until game start to set game and player data."""
self._client: "Client" = client
self._game_info: "GameInfo" = game_info
self._game_data: GameData = game_data
self.player_id: int = player_id
self.race: Race = Race(self._game_info.player_races[self.player_id])
self._units_previous_map: dict = dict()
self._previous_upgrades: Set[UpgradeId] = set()
self.units: Units = Units([])
def _prepare_first_step(self):
"""First step extra preparations. Must not be called before _prepare_step."""
if self.townhalls:
self._game_info.player_start_location = self.townhalls.first.position
self._game_info.map_ramps, self._game_info.vision_blockers = self._game_info._find_ramps_and_vision_blockers()
def _prepare_step(self, state, proto_game_info):
# Set attributes from new state before on_step."""
self.state: GameState = state # See game_state.py
# update pathing grid
self._game_info.pathing_grid: PixelMap = PixelMap(
proto_game_info.game_info.start_raw.pathing_grid, in_bits=True, mirrored=False
)
# Required for events
self._units_previous_map: Dict = {unit.tag: unit for unit in self.units}
self.units: Units = state.own_units
self.workers: Units = self.units(race_worker[self.race])
self.townhalls: Units = self.units(race_townhalls[self.race])
self.geysers: Units = self.units(race_gas[self.race])
self.minerals: int = state.common.minerals
self.vespene: int = state.common.vespene
self.supply_army: int = state.common.food_army
self.supply_workers: int = state.common.food_workers # Doesn't include workers in production
self.supply_cap: int = state.common.food_cap
self.supply_used: int = state.common.food_used
self.supply_left: int = self.supply_cap - self.supply_used
if self.race == Race.Zerg:
self.larva_count: int = state.common.larva_count
# Workaround Zerg supply rounding bug
self._correct_zerg_supply()
elif self.race == Race.Protoss:
self.warp_gate_count: int = state.common.warp_gate_count
self.idle_worker_count: int = state.common.idle_worker_count
self.army_count: int = state.common.army_count
# reset cached values
self.cached_known_enemy_structures = None
self.cached_known_enemy_units = None
def on_start(self):
""" Allows initializing the bot when the game data is available. """
def on_end(self, game_result: Result):
""" Triggered at the end of a game. """
class CanAffordWrapper:
def __init__(self, can_afford_minerals, can_afford_vespene, have_enough_supply):
self.can_afford_minerals = can_afford_minerals
self.can_afford_vespene = can_afford_vespene
self.have_enough_supply = have_enough_supply
| 44.150276 | 180 | 0.633922 |
71ecddbb1bd02f445d97d4e77a4a4128c68a4abe | 4,260 | py | Python | src/python/pants/jvm/resolve/lockfile_metadata.py | xyzst/pants | d6a357fe67ee7e8e1aefeae625e107f5609f1717 | [
"Apache-2.0"
] | null | null | null | src/python/pants/jvm/resolve/lockfile_metadata.py | xyzst/pants | d6a357fe67ee7e8e1aefeae625e107f5609f1717 | [
"Apache-2.0"
] | 6 | 2022-01-25T15:49:26.000Z | 2022-02-09T11:21:13.000Z | src/python/pants/jvm/resolve/lockfile_metadata.py | thejcannon/pants | 7c24f42cb78cc462b63698cef736eda7a85c40e0 | [
"Apache-2.0"
] | null | null | null | # Copyright 2022 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
from typing import Any, Iterable, cast
from pants.core.util_rules.lockfile_metadata import (
LockfileMetadata,
LockfileMetadataValidation,
LockfileScope,
_get_metadata,
lockfile_metadata_registrar,
)
from pants.jvm.resolve.common import ArtifactRequirement
from pants.util.ordered_set import FrozenOrderedSet
_jvm_lockfile_metadata = lockfile_metadata_registrar(LockfileScope.JVM)
| 33.809524 | 100 | 0.711268 |
71ece1b40046a77ed95f80492a330f22d42912ee | 1,528 | py | Python | generator/generator.py | GregorKikelj/opendbc | a20ed24ea2593e5d019adf538dc0cecfc7ef8709 | [
"MIT"
] | 1,059 | 2017-05-31T06:33:27.000Z | 2022-03-31T23:02:29.000Z | generator/generator.py | DIMO-Network/opendbc | 9a1fbe581846f9d0191f142f498ef3f1c35826ea | [
"MIT"
] | 248 | 2017-07-14T01:45:40.000Z | 2022-03-21T17:55:26.000Z | generator/generator.py | DIMO-Network/opendbc | 9a1fbe581846f9d0191f142f498ef3f1c35826ea | [
"MIT"
] | 940 | 2017-06-02T16:40:42.000Z | 2022-03-29T16:49:58.000Z | #!/usr/bin/env python3
import os
import re
cur_path = os.path.dirname(os.path.realpath(__file__))
opendbc_root = os.path.join(cur_path, '../')
include_pattern = re.compile(r'CM_ "IMPORT (.*?)";')
if __name__ == "__main__":
create_all(opendbc_root)
| 28.296296 | 90 | 0.716623 |
71eed31624e7569b476bc79838cb16831bf90648 | 1,105 | py | Python | customer/admin.py | matheusdemicheli/dogtel | 4eed44c8214fe814c26a6df0125af9b065c81c1c | [
"MIT"
] | null | null | null | customer/admin.py | matheusdemicheli/dogtel | 4eed44c8214fe814c26a6df0125af9b065c81c1c | [
"MIT"
] | null | null | null | customer/admin.py | matheusdemicheli/dogtel | 4eed44c8214fe814c26a6df0125af9b065c81c1c | [
"MIT"
] | null | null | null | from django.contrib import admin
from django.utils.safestring import mark_safe
from customer.models import Owner, Dog, Breed, SubBreed
admin.site.register(Dog, DogAdmin)
admin.site.register(Owner, OwnerAdmin)
admin.site.register(Breed, BreedAdmin)
admin.site.register(SubBreed, SubBreedAdmin) | 24.021739 | 71 | 0.650679 |
71eee010313a8cf81a43634e4dc40236254335a2 | 464 | py | Python | kaneda/tasks/rq.py | APSL/kaneda | 739db48588d2237dd7710b16f23921d489182868 | [
"MIT"
] | 59 | 2016-05-03T20:17:59.000Z | 2019-09-05T18:35:21.000Z | kaneda/tasks/rq.py | APSL/kaneda | 739db48588d2237dd7710b16f23921d489182868 | [
"MIT"
] | 2 | 2018-07-25T21:51:18.000Z | 2019-07-31T22:51:09.000Z | kaneda/tasks/rq.py | APSL/kaneda | 739db48588d2237dd7710b16f23921d489182868 | [
"MIT"
] | 11 | 2016-05-20T19:07:46.000Z | 2021-09-10T17:43:58.000Z | from __future__ import absolute_import
from redis import Redis
from rq.decorators import job
from kaneda.utils import get_backend
backend = get_backend()
| 23.2 | 75 | 0.724138 |
71ef13879f7d9412c115fe5712bcdcce5a10b758 | 4,067 | py | Python | src/ripper.py | jg-rivera/cert-ripper | 2bab5e02cd2da8e92a1c308640917b6f5ee729cb | [
"MIT"
] | null | null | null | src/ripper.py | jg-rivera/cert-ripper | 2bab5e02cd2da8e92a1c308640917b6f5ee729cb | [
"MIT"
] | null | null | null | src/ripper.py | jg-rivera/cert-ripper | 2bab5e02cd2da8e92a1c308640917b6f5ee729cb | [
"MIT"
] | null | null | null | from dotenv import load_dotenv
from PyPDF2 import PdfFileReader, PdfFileWriter
import os
import json
if __name__ == "__main__":
load_dotenv()
ripper = CertRipper(
start_page_index=os.getenv("START_PAGE_INDEX"),
master_pdf_path=os.getenv("MASTER_PDF_PATH"),
json_points_path=os.getenv("JSON_POINTS_PATH"),
ripped_certs_path=os.getenv("RIPPED_CERTS_PATH"),
ripped_cert_file_name=os.getenv("RIPPED_CERT_FILE_NAME"),
)
ripper.process()
| 35.060345 | 152 | 0.614212 |
71ef7b545410fc717e2f36b835e68675481e1947 | 2,192 | py | Python | venv/Lib/site-packages/tests/test_111_FieldNumAddCol.py | shehzadulislam/Assignment4 | a9cced70be6ae5d2685027d68032d5849f638301 | [
"Apache-2.0"
] | null | null | null | venv/Lib/site-packages/tests/test_111_FieldNumAddCol.py | shehzadulislam/Assignment4 | a9cced70be6ae5d2685027d68032d5849f638301 | [
"Apache-2.0"
] | null | null | null | venv/Lib/site-packages/tests/test_111_FieldNumAddCol.py | shehzadulislam/Assignment4 | a9cced70be6ae5d2685027d68032d5849f638301 | [
"Apache-2.0"
] | null | null | null | #
# Licensed Materials - Property of IBM
#
# (c) Copyright IBM Corp. 2007-2008
#
import unittest, sys
import ibm_db
import config
from testfunctions import IbmDbTestFunctions
#__END__
#__LUW_EXPECTED__
#False
#int(0)
#int(1)
#False
#False
#False
#int(1)
#False
#__ZOS_EXPECTED__
#False
#int(0)
#int(1)
#False
#False
#False
#int(1)
#False
#__SYSTEMI_EXPECTED__
#False
#int(0)
#int(1)
#False
#False
#False
#int(1)
#False
#__IDS_EXPECTED__
#False
#int(0)
#int(1)
#False
#False
#False
#int(1)
#False
| 21.92 | 123 | 0.629562 |
71f05726793fa3f17f84622c2fdc4b1adae30d42 | 2,382 | py | Python | foundation/djangocms_pagebanner/cms_toolbar.py | Mindelirium/foundation | 2d07e430915d696ca7376afea633692119c4d30e | [
"MIT"
] | null | null | null | foundation/djangocms_pagebanner/cms_toolbar.py | Mindelirium/foundation | 2d07e430915d696ca7376afea633692119c4d30e | [
"MIT"
] | null | null | null | foundation/djangocms_pagebanner/cms_toolbar.py | Mindelirium/foundation | 2d07e430915d696ca7376afea633692119c4d30e | [
"MIT"
] | null | null | null | from cms.api import get_page_draft
from cms.toolbar_pool import toolbar_pool
from cms.toolbar_base import CMSToolbar
from cms.utils import get_cms_setting
from cms.utils.permissions import has_page_change_permission
from django.core.urlresolvers import reverse, NoReverseMatch
from django.utils.translation import ugettext_lazy as _
from .models import PageBannerExtension
_banner_change_url = 'admin:djangocms_pagebanner_pagebannerextension_change'
_banner_add_url = 'admin:djangocms_pagebanner_pagebannerextension_add'
| 41.789474 | 76 | 0.625105 |
71f0694c1e4dfc112a543268e37e44700697d2ed | 7,346 | py | Python | tasks/lm/models/lm.py | etri-edgeai/nn-comp-discblock | 6e00a019c223508797ca91a7d5ffec7917b12c6d | [
"Apache-2.0"
] | 10 | 2021-11-19T06:24:51.000Z | 2022-02-09T15:44:00.000Z | tasks/lm/models/lm.py | etri-edgeai/nn-comp-discblock | 6e00a019c223508797ca91a7d5ffec7917b12c6d | [
"Apache-2.0"
] | 9 | 2021-10-01T11:06:27.000Z | 2021-12-23T02:10:52.000Z | tasks/lm/models/lm.py | etri-edgeai/nn-comp-discblock | 6e00a019c223508797ca91a7d5ffec7917b12c6d | [
"Apache-2.0"
] | 2 | 2021-09-14T04:08:36.000Z | 2021-11-19T06:24:54.000Z | import math
import torch
import torch.nn as nn
import torch.nn.functional as F
# Temporarily leave PositionalEncoding module here. Will be moved somewhere else.
| 40.585635 | 122 | 0.611081 |
71f079a62757b3209f276ed71f4e310438ef3cc4 | 248 | py | Python | fibo.py | Baibhabswain/pythonPrograms | 38380174f22e73b766b98754b00cd78a56b4bf59 | [
"MIT"
] | 1 | 2019-03-29T04:32:09.000Z | 2019-03-29T04:32:09.000Z | fibo.py | Baibhabswain/pythonPrograms | 38380174f22e73b766b98754b00cd78a56b4bf59 | [
"MIT"
] | null | null | null | fibo.py | Baibhabswain/pythonPrograms | 38380174f22e73b766b98754b00cd78a56b4bf59 | [
"MIT"
] | null | null | null | main() | 17.714286 | 42 | 0.608871 |
71f16750be3a7d0922d774531a82147a9b72ab6b | 44 | py | Python | scrapper/playstation/__init__.py | gghf-service/gghf-api | 9740700d1dd160e90fc949f9c3e652c3483a49aa | [
"MIT"
] | 1 | 2018-12-10T14:37:11.000Z | 2018-12-10T14:37:11.000Z | scrapper/playstation/__init__.py | tapkain/gghf.api | 9740700d1dd160e90fc949f9c3e652c3483a49aa | [
"MIT"
] | null | null | null | scrapper/playstation/__init__.py | tapkain/gghf.api | 9740700d1dd160e90fc949f9c3e652c3483a49aa | [
"MIT"
] | null | null | null | from scrapper.playstation.spider import main | 44 | 44 | 0.886364 |
71f1b81a3d9a5c16cb1f510f6c706a2a817d94b4 | 2,737 | py | Python | plugins/number.py | motakine/ILAS_slackbot | ddfb34db1cddcb459fef34cfc04c498d6f85d135 | [
"MIT"
] | null | null | null | plugins/number.py | motakine/ILAS_slackbot | ddfb34db1cddcb459fef34cfc04c498d6f85d135 | [
"MIT"
] | null | null | null | plugins/number.py | motakine/ILAS_slackbot | ddfb34db1cddcb459fef34cfc04c498d6f85d135 | [
"MIT"
] | null | null | null | import slackbot.bot
import random
answer = random.randint(1, 50)
max = 50
def number(num):
'''number
Args:
num (int):
Returns:
str: num answer : 'Too large'
num answer : 'Too small'
num answer : 'Correct!'
: 'Can I kick you?.'
0
max121
'''
global answer
global max
#
if num == 0:
return ' is a mysterious number...'
elif num < max + 1:
if num > answer:
return ' is too large. The answer is more small.'
elif num < answer:
return ' is too small. The answer is more large.'
elif num == answer:
answer = random.randint(1, max)
return ' is correct! :tada: Now, start a new game.'
elif max == 1:
return '? Can I kick you? Only 1.'
return '? Can I kick you? 1 to %d.' % max
def number_set(num):
'''number set
Args:
num (int):
Returns:
str: max50
1
0
'''
global answer
global max
# max
if num == 0:
return 'There is a mysterious number... It is '
elif num == 1:
max = 1
answer = random.randint(1, max)
return '1? Really? Then, the maximum of the answer is '
max = num
answer = random.randint(1, max)
return 'OK. Then, the maximum of the answer is '
| 25.110092 | 93 | 0.588235 |
71f3dc5d5c4a8e087378f18d43a8168ef202c67c | 30,964 | py | Python | pytype/analyze.py | hatal175/pytype | 22150dd56c2a11f3d385a1cbb28eed985df31d72 | [
"Apache-2.0"
] | null | null | null | pytype/analyze.py | hatal175/pytype | 22150dd56c2a11f3d385a1cbb28eed985df31d72 | [
"Apache-2.0"
] | null | null | null | pytype/analyze.py | hatal175/pytype | 22150dd56c2a11f3d385a1cbb28eed985df31d72 | [
"Apache-2.0"
] | null | null | null | """Code for checking and inferring types."""
import collections
import logging
import re
import subprocess
from typing import Any, Dict, Union
from pytype import abstract
from pytype import abstract_utils
from pytype import convert_structural
from pytype import debug
from pytype import function
from pytype import metrics
from pytype import output
from pytype import special_builtins
from pytype import state as frame_state
from pytype import vm
from pytype.overlays import typing_overlay
from pytype.pytd import builtins
from pytype.pytd import escape
from pytype.pytd import optimize
from pytype.pytd import pytd
from pytype.pytd import pytd_utils
from pytype.pytd import visitors
from pytype.typegraph import cfg
log = logging.getLogger(__name__)
# Most interpreter functions (including lambdas) need to be analyzed as
# stand-alone functions. The exceptions are comprehensions and generators, which
# have names like "<listcomp>" and "<genexpr>".
_SKIP_FUNCTION_RE = re.compile("<(?!lambda).+>$")
CallRecord = collections.namedtuple(
"CallRecord", ["node", "function", "signatures", "positional_arguments",
"keyword_arguments", "return_value"])
# How deep to follow call chains:
INIT_MAXIMUM_DEPTH = 4 # during module loading
MAXIMUM_DEPTH = 3 # during non-quick analysis
QUICK_CHECK_MAXIMUM_DEPTH = 2 # during quick checking
QUICK_INFER_MAXIMUM_DEPTH = 1 # during quick inference
def check_types(src, filename, errorlog, options, loader,
deep=True, init_maximum_depth=INIT_MAXIMUM_DEPTH,
maximum_depth=None, **kwargs):
"""Verify the Python code."""
tracer = CallTracer(errorlog=errorlog, options=options,
generate_unknowns=False, loader=loader, **kwargs)
loc, defs = tracer.run_program(src, filename, init_maximum_depth)
snapshotter = metrics.get_metric("memory", metrics.Snapshot)
snapshotter.take_snapshot("analyze:check_types:tracer")
if deep:
if maximum_depth is None:
maximum_depth = (
QUICK_CHECK_MAXIMUM_DEPTH if options.quick else MAXIMUM_DEPTH)
tracer.analyze(loc, defs, maximum_depth=maximum_depth)
snapshotter.take_snapshot("analyze:check_types:post")
_maybe_output_debug(options, tracer.program)
def infer_types(src, errorlog, options, loader,
filename=None, deep=True, init_maximum_depth=INIT_MAXIMUM_DEPTH,
show_library_calls=False, maximum_depth=None, tracer_vm=None,
**kwargs):
"""Given Python source return its types.
Args:
src: A string containing Python source code.
errorlog: Where error messages go. Instance of errors.ErrorLog.
options: config.Options object
loader: A load_pytd.Loader instance to load PYI information.
filename: Filename of the program we're parsing.
deep: If True, analyze all functions, even the ones not called by the main
execution flow.
init_maximum_depth: Depth of analysis during module loading.
show_library_calls: If True, call traces are kept in the output.
maximum_depth: Depth of the analysis. Default: unlimited.
tracer_vm: An instance of CallTracer, in case the caller wants to
instantiate and retain the vm used for type inference.
**kwargs: Additional parameters to pass to vm.VirtualMachine
Returns:
A tuple of (ast: TypeDeclUnit, builtins: TypeDeclUnit)
Raises:
AssertionError: In case of a bad parameter combination.
"""
# If the caller has passed in a vm, use that.
if tracer_vm:
assert isinstance(tracer_vm, CallTracer)
tracer = tracer_vm
else:
tracer = CallTracer(errorlog=errorlog, options=options,
generate_unknowns=options.protocols,
store_all_calls=not deep, loader=loader, **kwargs)
loc, defs = tracer.run_program(src, filename, init_maximum_depth)
log.info("===Done running definitions and module-level code===")
snapshotter = metrics.get_metric("memory", metrics.Snapshot)
snapshotter.take_snapshot("analyze:infer_types:tracer")
if deep:
if maximum_depth is None:
if not options.quick:
maximum_depth = MAXIMUM_DEPTH
elif options.analyze_annotated:
# Since there's no point in analyzing annotated functions for inference,
# the presence of this option means that the user wants checking, too.
maximum_depth = QUICK_CHECK_MAXIMUM_DEPTH
else:
maximum_depth = QUICK_INFER_MAXIMUM_DEPTH
tracer.exitpoint = tracer.analyze(loc, defs, maximum_depth)
else:
tracer.exitpoint = loc
snapshotter.take_snapshot("analyze:infer_types:post")
ast = tracer.compute_types(defs)
ast = tracer.loader.resolve_ast(ast)
if tracer.has_unknown_wildcard_imports or any(
a in defs for a in abstract_utils.DYNAMIC_ATTRIBUTE_MARKERS):
if "__getattr__" not in ast:
ast = pytd_utils.Concat(
ast, builtins.GetDefaultAst(options.python_version))
# If merged with other if statement, triggers a ValueError: Unresolved class
# when attempts to load from the protocols file
if options.protocols:
protocols_pytd = tracer.loader.import_name("protocols")
else:
protocols_pytd = None
builtins_pytd = tracer.loader.concat_all()
# Insert type parameters, where appropriate
ast = ast.Visit(visitors.CreateTypeParametersForSignatures())
if options.protocols:
log.info("=========== PyTD to solve =============\n%s",
pytd_utils.Print(ast))
ast = convert_structural.convert_pytd(ast, builtins_pytd, protocols_pytd)
elif not show_library_calls:
log.info("Solving is turned off. Discarding call traces.")
# Rename remaining "~unknown" to "?"
ast = ast.Visit(visitors.RemoveUnknownClasses())
# Remove "~list" etc.:
ast = convert_structural.extract_local(ast)
_maybe_output_debug(options, tracer.program)
return ast, builtins_pytd
def _maybe_output_debug(options, program):
"""Maybe emit debugging output."""
if options.output_cfg or options.output_typegraph:
dot = debug.program_to_dot(program, set([]), bool(options.output_cfg))
svg_file = options.output_cfg or options.output_typegraph
proc = subprocess.Popen(["/usr/bin/dot", "-T", "svg", "-o", svg_file],
stdin=subprocess.PIPE, universal_newlines=True)
(_, stderr) = proc.communicate(dot)
if stderr:
log.info("Failed to create %s: %s", svg_file, stderr)
if options.output_debug:
text = debug.program_to_text(program)
if options.output_debug == "-":
log.info("=========== Program Dump =============\n%s", text)
else:
with options.open_function(options.output_debug, "w") as fi:
fi.write(text)
| 41.61828 | 80 | 0.687993 |
71f490ebabe7d689d377fda1a39b6fe3eaf67bee | 3,979 | py | Python | src/cupcake/post_isoseq_cluster/demux_by_barcode_groups.py | milescsmith/cDNA_Cupcake | 776d841c69fc6d8b3dce95bb9f076546bc0429c0 | [
"BSD-3-Clause-Clear"
] | null | null | null | src/cupcake/post_isoseq_cluster/demux_by_barcode_groups.py | milescsmith/cDNA_Cupcake | 776d841c69fc6d8b3dce95bb9f076546bc0429c0 | [
"BSD-3-Clause-Clear"
] | null | null | null | src/cupcake/post_isoseq_cluster/demux_by_barcode_groups.py | milescsmith/cDNA_Cupcake | 776d841c69fc6d8b3dce95bb9f076546bc0429c0 | [
"BSD-3-Clause-Clear"
] | null | null | null | #!/usr/bin/env python
__author__ = "[email protected]"
"""
Given a pooled input GFF + demux CSV file, write out per-{barcode group} GFFs
If input fasta/fastq is given, optionally also output per-{barcode group} FASTA/FASTQ
"""
import re
from collections import defaultdict
from csv import DictReader
from typing import Optional
import typer
from Bio import SeqIO
import cupcake.sequence.GFF as GFF
from cupcake import version_callback
from cupcake import cupcake_logger as logger
rex_pbid = re.compile(r"(PB.\d+.\d+)(|\S+)")
app = typer.Typer(name="cupcake.post_isoseq_cluster.demux_by_barcode_groups")
def regroup_gff(
pooled_gff, demux_count_file, output_prefix, out_group_dict, in_fafq=None
):
"""
:param pooled_sam: SAM file
:param demux_count_file: comma-delimited per-barcode count file
:param output_prefix: output prefix for GFF
:param out_group_dict: dict of barcode name --> group to be long in (ex: {'EM1':'EM', 'EM2':'EM'})
:param in_fafq: optional fasta/fastq that was input to SAM
"""
if in_fafq is not None:
type_fafq = get_type_fafq(in_fafq)
in_tissue = defaultdict(
lambda: set()
) # pbid --> list of tissue it is in (EM, END, R)
for r in DictReader(open(demux_count_file), delimiter=","):
for k, v in r.items():
if k != "id" and int(v) > 0:
in_tissue[r["id"]].add(k)
# in_tissue = dict(in_tissue)
handles = {}
handles_fafq = {}
for g in out_group_dict.values():
handles[g] = open(f"{output_prefix}_{g}_only.gff", "w")
if in_fafq is not None:
handles_fafq[g] = open(f"{output_prefix}_{g}_only.{type_fafq}", "w")
if in_fafq is not None:
fafq_dict = SeqIO.to_dict(SeqIO.parse(open(in_fafq), type_fafq))
fafq_dict_keys = list(fafq_dict.keys())
for k in fafq_dict_keys:
m = rex_pbid.match(k)
if m is not None:
fafq_dict[m.group(1)] = fafq_dict[k]
reader = GFF.collapseGFFReader(pooled_gff)
for r in reader:
groups_to_write_in = set()
pbid = r.seqid
if pbid not in in_tissue:
logger.info(
f"WARNING: {pbid} does not belong to any group indicated by outgroup_dict"
)
for tissue in in_tissue[pbid]:
groups_to_write_in.add(out_group_dict[tissue])
for g in groups_to_write_in:
GFF.write_collapseGFF_format(handles[g], r)
if in_fafq is not None:
SeqIO.write(fafq_dict[pbid], handles_fafq[g], type_fafq)
if __name__ == "__main__":
typer.run(main)
| 32.349593 | 111 | 0.6381 |
71f49ed361f20a67913d6d3671205fa5c226035f | 5,168 | py | Python | vll/data/circle_dataset.py | paulhfu/3dcv-students | f8d42c985cf33903170733b0c8f6a2199099553c | [
"MIT"
] | 4 | 2020-04-21T21:40:13.000Z | 2022-02-13T18:18:13.000Z | vll/data/circle_dataset.py | paulhfu/3dcv-students | f8d42c985cf33903170733b0c8f6a2199099553c | [
"MIT"
] | 1 | 2022-02-03T11:24:07.000Z | 2022-02-03T11:24:07.000Z | vll/data/circle_dataset.py | paulhfu/3dcv-students | f8d42c985cf33903170733b0c8f6a2199099553c | [
"MIT"
] | 8 | 2020-04-22T10:24:27.000Z | 2022-01-13T16:25:52.000Z | import random
import numpy as np
import math
from skimage.draw import line, line_aa, circle, set_color, circle_perimeter_aa
from skimage.io import imsave
from skimage.util import random_noise
maxSlope = 10 # restrict the maximum slope of generated lines for stability
minLength = 20 # restrict the minimum length of line segments
| 31.13253 | 147 | 0.663893 |
71f5039371a3b37776d4da5587717221d15a60a1 | 5,276 | py | Python | VAE/reduced_model/nesm_generator.py | youngmg1995/NES-Music-Maker | aeda10a541cfd439cfa46c45e63411e0d98e41c1 | [
"MIT"
] | 3 | 2020-06-26T22:02:35.000Z | 2021-11-20T19:24:33.000Z | VAE/reduced_model/nesm_generator.py | youngmg1995/NES-Music-Maker | aeda10a541cfd439cfa46c45e63411e0d98e41c1 | [
"MIT"
] | null | null | null | VAE/reduced_model/nesm_generator.py | youngmg1995/NES-Music-Maker | aeda10a541cfd439cfa46c45e63411e0d98e41c1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Wed Apr 1 17:14:19 2020
@author: Mitchell
nesm_generator.py
~~~~~~~~~~~~~~~~~
This file serves as a script for using our pre-trained VAE model to generate
brand new NES music soundtracks. NOTE - using the reduced model we only
generate the first melodic voice for each track rather than each of the four
voices present in an NESM track. To do so we first reconstruct our model using
the file VAE class defined in `VAE.py` and the same parameters used in
`model_training`. Then we use functions from the file `generation_utils` to
have our trained model create entirely new and original NES music.
"""
# Imports
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# NOTE - nesmdb folder manually added to environment libraries
from dataset_utils import load_training
from VAE import VAE
from generation_utils import generate_seprsco, latent_SVD, get_latent_vecs,\
plot_track, filter_tracks
import nesmdb
from nesmdb.vgm.vgm_to_wav import save_vgmwav
import tensorflow as tf
import numpy as np
import os, json
### Load Mappings
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Parameters for shape of dataset (note these are also used for model def.)
measures = 8
measure_len = 96
# load data
training_foldername = '../../nesmdb24_seprsco/train/'
train_save_filename = 'transformed_dataset.json'
dataset , labels2int_map , int2labels_map = \
load_training(training_foldername, train_save_filename,
measures = measures, measure_len = measure_len)
### Reinitiate Model
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
### Model Parameters
latent_dim = 124
input_dim = len(int2labels_map) - 1
dropout = .1
maxnorm = None
vae_b1 , vae_b2 = .02 , .1
print('Reinitiating VAE Model')
# Build Model
model = VAE(latent_dim, input_dim, measures, measure_len, dropout,
maxnorm, vae_b1 , vae_b2)
# Reload Saved Weights
checkpoint_dir = './training_checkpoints'
checkpoint_prefix = os.path.join(checkpoint_dir, "model_ckpt")
model.load_weights(checkpoint_prefix)
model.build(tf.TensorShape([None, measures, measure_len, ]))
# Print Summary of Model
model.summary()
### Sample Latent Variable Distributions
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Here we use SVD to more effectively sample from the orthogonal components
# of our latent space
# Parameters for sampling
num_songs = 10
print('Generating Latent Samples to Generate {} New Tracks'.format(num_songs))
# Grab distributions of dataset over latent space
# Have to run in batches due to size of the dataset
batch_size = 300
latent_vecs = get_latent_vecs(model, dataset, batch_size)
# Sample from normal distribution
rand_vecs = np.random.normal(0.0, 1.0, (num_songs, latent_dim))
# perform SVD
plot_eigenvalues = True
sample_vecs = latent_SVD(latent_vecs, rand_vecs, plot_eigenvalues)
### Generate New Tracks
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Create new seprsco tracks using our model and the random samples
# Seprsco files can later be converted to valid NES music format
# Parameters for track generation (specifically filtering)
p_min = .5
print('Generating New Tracks from Latent Samples')
# Decode samples using VAE
decoded_tracks = model.decoder(sample_vecs)
# Plot first decoded track
print("Example Model Generated Track")
plot_track(decoded_tracks[0])
# Filter Track
decoded_tracks = filter_tracks(decoded_tracks, p_min)
# Plot first filtered track
print("Example Filtered Track")
plot_track(decoded_tracks[0])
# Convert tracks to seprsco format
print('Converting Model Output to Seprsco')
seprsco_tracks = generate_seprsco(decoded_tracks, int2labels_map)
### Convert to WAV
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Convert seprsco tracks to WAV files so we can listen!!!
print('Converting Seprsco to WAV Audio')
wav_tracks = []
for track in seprsco_tracks:
wav = nesmdb.convert.seprsco_to_wav(track)
wav_tracks.append(wav)
### Save WAV Files
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Save our wav tracks to appropriate files (be sure not to overwrite existing)
# Also save latent variables so we can reproduce songs we like
# Save WAV tracks
save_wav = False
if save_wav:
print('Saving Generated WAV Audio Tracks')
wav_folder = 'model_gen_files/'
for i in range(len(wav_tracks)):
wav_file = wav_folder+'VAE_NESM_{}.wav'.format(i)
save_vgmwav(wav_file, wav_tracks[i])
# Save Latent Variables
save_latent_var = False
if save_latent_var:
print('Saving Latent Variables for Generated Tracks')
latent_filename = os.path.join(wav_folder, "latent_variables.json")
with open(latent_filename, 'w') as f:
json.dump({
'VAE_NESM_{}.wav'.format(i): sample_vecs[i].tolist()
for i in range(sample_vecs.shape[0])
}, f)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#----------------------------------END FILE------------------------------------
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ | 32.567901 | 79 | 0.638931 |
71f546859f563e461fa98070b804316bbbaa69c8 | 1,030 | py | Python | anlogger/logger.py | anttin/anlogger | dfa7be7ba2f4651507b188f986c10bab9bd7460e | [
"MIT"
] | null | null | null | anlogger/logger.py | anttin/anlogger | dfa7be7ba2f4651507b188f986c10bab9bd7460e | [
"MIT"
] | null | null | null | anlogger/logger.py | anttin/anlogger | dfa7be7ba2f4651507b188f986c10bab9bd7460e | [
"MIT"
] | null | null | null | import logging
import logging.handlers
import os
| 28.611111 | 94 | 0.659223 |
71f5e7d6ce9c05a9fca443446dc43b2633e1dc3d | 133 | py | Python | Python/hello_world-theopaid.py | saurabhcommand/Hello-world | 647bad9da901a52d455f05ecc37c6823c22dc77e | [
"MIT"
] | 1,428 | 2018-10-03T15:15:17.000Z | 2019-03-31T18:38:36.000Z | Python/hello_world-theopaid.py | saurabhcommand/Hello-world | 647bad9da901a52d455f05ecc37c6823c22dc77e | [
"MIT"
] | 1,162 | 2018-10-03T15:05:49.000Z | 2018-10-18T14:17:52.000Z | Python/hello_world-theopaid.py | saurabhcommand/Hello-world | 647bad9da901a52d455f05ecc37c6823c22dc77e | [
"MIT"
] | 3,909 | 2018-10-03T15:07:19.000Z | 2019-03-31T18:39:08.000Z | #Author Theodosis Paidakis
print("Hello World")
hello_list = ["Hello World"]
print(hello_list[0])
for i in hello_list:
print(i) | 16.625 | 28 | 0.721805 |
71f6399c6d0b985a134ae6927664662c261371d5 | 330 | py | Python | question_answering/stubs.py | uliang/NaturalLanguageQueryingSystem | d18b4ae429362ba311d6f26debbcfe391b810458 | [
"MIT"
] | null | null | null | question_answering/stubs.py | uliang/NaturalLanguageQueryingSystem | d18b4ae429362ba311d6f26debbcfe391b810458 | [
"MIT"
] | null | null | null | question_answering/stubs.py | uliang/NaturalLanguageQueryingSystem | d18b4ae429362ba311d6f26debbcfe391b810458 | [
"MIT"
] | null | null | null | from collections import namedtuple
from unittest.mock import MagicMock
_fake_ext = namedtuple('_', ['qtype', 'kb_ident'])
| 23.571429 | 51 | 0.654545 |
71f66963cc795d3d06ec835c6cc0e9a8392f9d65 | 729 | py | Python | lib/env/trade/BaseTradeStrategy.py | devas123/Bitcoin-Trader-RL | 097cb0ba7428b2c4f997bdb0425a6153c23f9c83 | [
"MIT"
] | null | null | null | lib/env/trade/BaseTradeStrategy.py | devas123/Bitcoin-Trader-RL | 097cb0ba7428b2c4f997bdb0425a6153c23f9c83 | [
"MIT"
] | null | null | null | lib/env/trade/BaseTradeStrategy.py | devas123/Bitcoin-Trader-RL | 097cb0ba7428b2c4f997bdb0425a6153c23f9c83 | [
"MIT"
] | null | null | null | from abc import ABCMeta, abstractmethod
from typing import Tuple, Callable
| 30.375 | 90 | 0.577503 |
71f75308f26d63f94b10ee2cdd22fde8e48a6afe | 304 | py | Python | 4day/Book04_1.py | jsjang93/joony | 62f7a325094c887212b894932263bf84500e0f03 | [
"MIT"
] | null | null | null | 4day/Book04_1.py | jsjang93/joony | 62f7a325094c887212b894932263bf84500e0f03 | [
"MIT"
] | null | null | null | 4day/Book04_1.py | jsjang93/joony | 62f7a325094c887212b894932263bf84500e0f03 | [
"MIT"
] | null | null | null | # Book04_1.py
b1 = Book(); print(b1.category)
b2 = b1; print(b2.category)
print(Book.category)
Book.category = ''
print(b2.category); print(b1.category) ; print(Book.category)
b2.category = 'IT'
print(b2.category); print(b1.category) ; print(Book.category) | 23.384615 | 61 | 0.700658 |
71f7f60857c1f64d4455062df57530ef2f07c039 | 4,146 | py | Python | wrappers/python/virgil_crypto_lib/foundation/kdf1.py | odidev/virgil-crypto-c | 3d5d5cb19fdcf81eab08cdc63647f040117ecbd8 | [
"BSD-3-Clause"
] | 26 | 2018-12-17T13:45:25.000Z | 2022-01-16T20:00:04.000Z | wrappers/python/virgil_crypto_lib/foundation/kdf1.py | odidev/virgil-crypto-c | 3d5d5cb19fdcf81eab08cdc63647f040117ecbd8 | [
"BSD-3-Clause"
] | 4 | 2019-01-03T12:08:52.000Z | 2021-12-02T05:21:13.000Z | wrappers/python/virgil_crypto_lib/foundation/kdf1.py | odidev/virgil-crypto-c | 3d5d5cb19fdcf81eab08cdc63647f040117ecbd8 | [
"BSD-3-Clause"
] | 8 | 2019-01-24T08:22:06.000Z | 2022-02-07T11:37:00.000Z | # Copyright (C) 2015-2021 Virgil Security, Inc.
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# (1) Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# (2) Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# (3) Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ''AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# Lead Maintainer: Virgil Security Inc. <[email protected]>
from ctypes import *
from ._c_bridge import VscfKdf1
from ._c_bridge import VscfImplTag
from ._c_bridge import VscfStatus
from virgil_crypto_lib.common._c_bridge import Data
from virgil_crypto_lib.common._c_bridge import Buffer
from .alg import Alg
from .kdf import Kdf
| 37.017857 | 117 | 0.7137 |
71f886304dcb6c6099a9de6e408e657ec12b3bde | 497 | py | Python | mysite/zoo/tests.py | leixiayang/django-python | 8faa84867af5645d3d3d8e67fe8020be4dc68551 | [
"Apache-2.0"
] | 54 | 2015-07-13T14:23:01.000Z | 2021-08-05T10:51:00.000Z | mysite/zoo/tests.py | leixiayang/django-python | 8faa84867af5645d3d3d8e67fe8020be4dc68551 | [
"Apache-2.0"
] | 32 | 2015-07-16T08:58:00.000Z | 2020-04-30T09:41:57.000Z | mysite/zoo/tests.py | leixiayang/django-python | 8faa84867af5645d3d3d8e67fe8020be4dc68551 | [
"Apache-2.0"
] | 31 | 2015-07-13T15:32:01.000Z | 2022-02-19T17:19:51.000Z | #!/usr/bin/env python
# encoding: utf-8
from django.test import TestCase
from zoo import models
| 18.407407 | 44 | 0.593561 |
71f8c70583f9e40977155faf528bc04d31d42d94 | 123 | py | Python | EX025.py | gjaosdij/PythonProject | ae27990efa93462b632f165d13c08c7fd93beb38 | [
"MIT"
] | null | null | null | EX025.py | gjaosdij/PythonProject | ae27990efa93462b632f165d13c08c7fd93beb38 | [
"MIT"
] | null | null | null | EX025.py | gjaosdij/PythonProject | ae27990efa93462b632f165d13c08c7fd93beb38 | [
"MIT"
] | null | null | null | print('Digite seu nome completo: ')
nome = input().strip().upper()
print('Seu nome tem "Silva"?')
print('SILVA' in nome)
| 17.571429 | 35 | 0.658537 |
71f9440eb1c326307f7552af69580f96b76f02f9 | 3,283 | py | Python | configs/_base_/datasets/stvqa_dataset.py | linxi1158/iMIX | af87a17275f02c94932bb2e29f132a84db812002 | [
"Apache-2.0"
] | 23 | 2021-06-26T08:45:19.000Z | 2022-03-02T02:13:33.000Z | configs/_base_/datasets/stvqa_dataset.py | XChuanLee/iMIX | 99898de97ef8b45462ca1d6bf2542e423a73d769 | [
"Apache-2.0"
] | null | null | null | configs/_base_/datasets/stvqa_dataset.py | XChuanLee/iMIX | 99898de97ef8b45462ca1d6bf2542e423a73d769 | [
"Apache-2.0"
] | 9 | 2021-06-10T02:36:20.000Z | 2021-11-09T02:18:16.000Z | dataset_type = 'STVQADATASET'
data_root = '/home/datasets/mix_data/iMIX/'
feature_path = 'data/datasets/stvqa/defaults/features/'
ocr_feature_path = 'data/datasets/stvqa/defaults/ocr_features/'
annotation_path = 'data/datasets/stvqa/defaults/annotations/'
vocab_path = 'data/datasets/stvqa/defaults/extras/vocabs/'
train_datasets = ['train']
test_datasets = ['val']
reader_train_cfg = dict(
type='STVQAREADER',
card='default',
mix_features=dict(
train=data_root + feature_path + 'detectron.lmdb',
val=data_root + feature_path + 'detectron.lmdb',
test=data_root + feature_path + 'detectron.lmdb',
),
mix_ocr_features=dict(
train=data_root + ocr_feature_path + 'ocr_en_frcn_features.lmdb',
val=data_root + ocr_feature_path + 'ocr_en_frcn_features.lmdb',
test=data_root + ocr_feature_path + 'ocr_en_frcn_features.lmdb',
),
mix_annotations=dict(
train=data_root + annotation_path + 'imdb_subtrain.npy',
val=data_root + annotation_path + 'imdb_subval.npy',
test=data_root + annotation_path + 'imdb_test_task3.npy',
),
datasets=train_datasets)
reader_test_cfg = dict(
type='STVQAREADER',
card='default',
mix_features=dict(
train=data_root + feature_path + 'detectron.lmdb',
val=data_root + feature_path + 'detectron.lmdb',
test=data_root + feature_path + 'detectron.lmdb',
),
mix_ocr_features=dict(
train=data_root + ocr_feature_path + 'ocr_en_frcn_features.lmdb',
val=data_root + ocr_feature_path + 'ocr_en_frcn_features.lmdb',
test=data_root + ocr_feature_path + 'ocr_en_frcn_features.lmdb',
),
mix_annotations=dict(
train=data_root + annotation_path + 'imdb_subtrain.npy',
val=data_root + annotation_path + 'imdb_subval.npy',
test=data_root + annotation_path + 'imdb_test_task3.npy',
),
datasets=train_datasets)
info_cpler_cfg = dict(
type='STVQAInfoCpler',
glove_weights=dict(
glove6b50d=data_root + 'glove/glove.6B.50d.txt.pt',
glove6b100d=data_root + 'glove/glove.6B.100d.txt.pt',
glove6b200d=data_root + 'glove/glove.6B.200d.txt.pt',
glove6b300d=data_root + 'glove/glove.6B.300d.txt.pt',
),
fasttext_weights=dict(
wiki300d1m=data_root + 'fasttext/wiki-news-300d-1M.vec',
wiki300d1msub=data_root + 'fasttext/wiki-news-300d-1M-subword.vec',
wiki_bin=data_root + 'fasttext/wiki.en.bin',
),
tokenizer='/home/datasets/VQA/bert/' + 'bert-base-uncased-vocab.txt',
mix_vocab=dict(
answers_st_5k=data_root + vocab_path + 'fixed_answer_vocab_stvqa_5k.txt',
vocabulary_100k=data_root + vocab_path + 'vocabulary_100k.txt',
),
max_seg_lenth=20,
max_ocr_lenth=10,
word_mask_ratio=0.0,
vocab_name='vocabulary_100k',
vocab_answer_name='answers_st_5k',
glove_name='glove6b300d',
fasttext_name='wiki_bin',
if_bert=True,
)
train_data = dict(
samples_per_gpu=16,
workers_per_gpu=1,
data=dict(type=dataset_type, reader=reader_train_cfg, info_cpler=info_cpler_cfg, limit_nums=800))
test_data = dict(
samples_per_gpu=16,
workers_per_gpu=1,
data=dict(type=dataset_type, reader=reader_test_cfg, info_cpler=info_cpler_cfg),
)
| 36.88764 | 101 | 0.696924 |
71f98ce850b9a0d28247d4a6575715ee5f7a82c8 | 2,955 | py | Python | src/rpocore/migrations/0007_auto_20160927_1517.py | 2martens/rpo-website | 14990920722c537810aecd2b97f5af6bbdd1b5ec | [
"MIT"
] | null | null | null | src/rpocore/migrations/0007_auto_20160927_1517.py | 2martens/rpo-website | 14990920722c537810aecd2b97f5af6bbdd1b5ec | [
"MIT"
] | null | null | null | src/rpocore/migrations/0007_auto_20160927_1517.py | 2martens/rpo-website | 14990920722c537810aecd2b97f5af6bbdd1b5ec | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-09-27 13:17
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import mezzanine.core.fields
| 43.455882 | 186 | 0.626396 |
71fa3b7f37795303ef477432b9138d2cfeb1171c | 320 | py | Python | code/Line.py | manno-xx/FutureLearnRobotBuggy | d5f0172597ad88d6a8b883b0b16d425a76edfb0b | [
"MIT"
] | null | null | null | code/Line.py | manno-xx/FutureLearnRobotBuggy | d5f0172597ad88d6a8b883b0b16d425a76edfb0b | [
"MIT"
] | null | null | null | code/Line.py | manno-xx/FutureLearnRobotBuggy | d5f0172597ad88d6a8b883b0b16d425a76edfb0b | [
"MIT"
] | null | null | null | #LineSensor test
from gpiozero import LineSensor
from time import sleep
from signal import pause
sensor = LineSensor(14)
sensor.when_line = lineDetected
sensor.when_no_line = noLineDetected
pause()
sensor.close()
| 14.545455 | 36 | 0.759375 |
71fa640b3932ba3d1df289310da43d75dd4a2089 | 4,239 | py | Python | litex_boards/platforms/myminieye_runber.py | chmousset/litex-boards | c081177d77f37a4ea6cff150d42a69bd6f0abbc2 | [
"BSD-2-Clause"
] | null | null | null | litex_boards/platforms/myminieye_runber.py | chmousset/litex-boards | c081177d77f37a4ea6cff150d42a69bd6f0abbc2 | [
"BSD-2-Clause"
] | null | null | null | litex_boards/platforms/myminieye_runber.py | chmousset/litex-boards | c081177d77f37a4ea6cff150d42a69bd6f0abbc2 | [
"BSD-2-Clause"
] | null | null | null | #
# This file is part of LiteX-Boards.
#
# Copyright (c) 2021 Gwenhael Goavec-Merou <[email protected]>
# SPDX-License-Identifier: BSD-2-Clause
from migen import *
from litex.build.generic_platform import *
from litex.build.gowin.platform import GowinPlatform
from litex.build.openfpgaloader import OpenFPGALoader
# IOs ----------------------------------------------------------------------------------------------
_io = [
# Clk / Rst
("clk12", 0, Pins("4"), IOStandard("LVCMOS33")),
# Leds
("user_led", 0, Pins("23"), IOStandard("LVCMOS33")),
("user_led", 1, Pins("24"), IOStandard("LVCMOS33")),
("user_led", 2, Pins("25"), IOStandard("LVCMOS33")),
("user_led", 3, Pins("26"), IOStandard("LVCMOS33")),
("user_led", 4, Pins("27"), IOStandard("LVCMOS33")),
("user_led", 5, Pins("28"), IOStandard("LVCMOS33")),
("user_led", 6, Pins("29"), IOStandard("LVCMOS33")),
("user_led", 7, Pins("30"), IOStandard("LVCMOS33")),
# RGB led, active-low
("rgb_led", 0,
Subsignal("r", Pins("112")),
Subsignal("g", Pins("114")),
Subsignal("b", Pins("113")),
IOStandard("LVCMOS33"),
),
("rgb_led", 1,
Subsignal("r", Pins("106")),
Subsignal("g", Pins("111")),
Subsignal("b", Pins("110")),
IOStandard("LVCMOS33"),
),
("rgb_led", 2,
Subsignal("r", Pins("101")),
Subsignal("g", Pins("104")),
Subsignal("b", Pins("102")),
IOStandard("LVCMOS33"),
),
("rgb_led", 3,
Subsignal("r", Pins("98")),
Subsignal("g", Pins("100")),
Subsignal("b", Pins("99")),
IOStandard("LVCMOS33"),
),
# Switches
("user_sw", 0, Pins("75"), IOStandard("LVCMOS33")),
("user_sw", 1, Pins("76"), IOStandard("LVCMOS33")),
("user_sw", 2, Pins("78"), IOStandard("LVCMOS33")),
("user_sw", 3, Pins("79"), IOStandard("LVCMOS33")),
("user_sw", 4, Pins("80"), IOStandard("LVCMOS33")),
("user_sw", 5, Pins("81"), IOStandard("LVCMOS33")),
("user_sw", 6, Pins("82"), IOStandard("LVCMOS33")),
("user_sw", 7, Pins("83"), IOStandard("LVCMOS33")),
# Buttons.
("user_btn", 0, Pins("58"), IOStandard("LVCMOS33")),
("user_btn", 1, Pins("59"), IOStandard("LVCMOS33")),
("user_btn", 2, Pins("60"), IOStandard("LVCMOS33")),
("user_btn", 3, Pins("61"), IOStandard("LVCMOS33")),
("user_btn", 4, Pins("62"), IOStandard("LVCMOS33")),
("user_btn", 5, Pins("63"), IOStandard("LVCMOS33")),
("user_btn", 6, Pins("64"), IOStandard("LVCMOS33")),
("user_btn", 7, Pins("65"), IOStandard("LVCMOS33")),
# Serial.
# FT232H has only one interface -> use (arbitrary) two pins from J2 to
# connect an external USB<->serial adapter
("serial", 0,
Subsignal("tx", Pins("116")), # J2.17
Subsignal("rx", Pins("115")), # J2.18
IOStandard("LVCMOS33")
),
# Seven Segment
("seven_seg_dig", 0, Pins("137"), IOStandard("LVCMOS33")),
("seven_seg_dig", 1, Pins("140"), IOStandard("LVCMOS33")),
("seven_seg_dig", 2, Pins("141"), IOStandard("LVCMOS33")),
("seven_seg_dig", 3, Pins("7"), IOStandard("LVCMOS33")),
("seven_seg", 0, Pins("138 142 9 11 12 139 8 10"), IOStandard("LVCMOS33")),
]
# Connectors ---------------------------------------------------------------------------------------
_connectors = [
["J1", "- 38 39 40 41 42 43 44 66 67 68 69 70 71 72 96 95 94 93 -"],
["J2", "- 136 135 134 133 132 131 130 129 128 123 122 121 120 119 118 117 116 115 -"],
]
# Platform -----------------------------------------------------------------------------------------
| 36.543103 | 118 | 0.550366 |
71fafdbd17b589475cd452d3837ecd482b296f0c | 8,468 | py | Python | combo/search/discrete/policy.py | zhangkunliang/BayesOptimization | 6d78c9e9f96239b0dbb85650a0d878e9410158ec | [
"MIT"
] | 139 | 2016-02-18T02:31:04.000Z | 2022-02-18T10:38:06.000Z | combo/search/discrete/policy.py | zhangkunliang/BayesOptimization | 6d78c9e9f96239b0dbb85650a0d878e9410158ec | [
"MIT"
] | 8 | 2016-04-18T08:10:44.000Z | 2020-12-30T08:49:33.000Z | combo/search/discrete/policy.py | zhangkunliang/BayesOptimization | 6d78c9e9f96239b0dbb85650a0d878e9410158ec | [
"MIT"
] | 50 | 2016-05-21T01:17:23.000Z | 2022-02-18T01:27:41.000Z | import numpy as np
import copy
import combo.misc
import cPickle as pickle
from results import history
from .. import utility
from ...variable import variable
from ..call_simulator import call_simulator
from ... import predictor
from ...gp import predictor as gp_predictor
from ...blm import predictor as blm_predictor
import combo.search.score
MAX_SEACH = int(20000)
| 32.694981 | 78 | 0.594473 |
71fafe5ef80b5403322b57c793f98a2f2f5a763c | 213 | py | Python | venv/lib/python3.9/site-packages/py2app/bootstrap/disable_linecache.py | dequeb/asmbattle | 27e8b209de5787836e288a2f2f9b7644ce07563e | [
"MIT"
] | 193 | 2020-01-15T09:34:20.000Z | 2022-03-18T19:14:16.000Z | .eggs/py2app-0.21-py3.7.egg/py2app/bootstrap/disable_linecache.py | mdanisurrahmanrony/Covid-Doctor | 1e0e854d01325c1a18dd52f33064aed4c21b8161 | [
"Apache-2.0"
] | 185 | 2020-01-15T08:38:27.000Z | 2022-03-27T17:29:29.000Z | .eggs/py2app-0.21-py3.7.egg/py2app/bootstrap/disable_linecache.py | mdanisurrahmanrony/Covid-Doctor | 1e0e854d01325c1a18dd52f33064aed4c21b8161 | [
"Apache-2.0"
] | 23 | 2020-01-24T14:47:18.000Z | 2022-02-22T17:19:47.000Z |
_disable_linecache()
| 17.75 | 46 | 0.704225 |
71fb4a0f65f1788e4523139873b94749e767304c | 693 | py | Python | source.py | s403o/tw_bot | fd26ebc86d4c7d1be1ae654f26f5ca74c2566a03 | [
"MIT"
] | null | null | null | source.py | s403o/tw_bot | fd26ebc86d4c7d1be1ae654f26f5ca74c2566a03 | [
"MIT"
] | null | null | null | source.py | s403o/tw_bot | fd26ebc86d4c7d1be1ae654f26f5ca74c2566a03 | [
"MIT"
] | null | null | null | import requests
from bs4 import BeautifulSoup as bs
import os
#source
url = '' # the source you want the bot take images from
#down page
page = requests.get(url)
html = bs(page.text, 'html.parser')
#locate
image_loc = html.findAll('img')
#create folder for located imgs
if not os.path.exists('imgs'):
os.makedirs('imgs')
#open the new folder
os.chdir('imgs')
image0 = 0 #img name
#get images
for image in image_loc:
try:
url = image['src']
source = requests.get(url)
if source.status_code == 200:
with open('img-' + str(image0) + '.jpg', 'png') as mkimg:
mkimg.write(requests.get(url).content)
mkimg.close()
image0 += 1
except:
pass
| 19.25 | 63 | 0.65368 |
71fc1181a50c5c3f0fee92d2187d798fa7535036 | 2,403 | py | Python | lib/appController.py | QIAOANGeo/BZB_ydzw | 8c11e9797cca31d1fab26be7eb0a71666cfac15f | [
"MIT"
] | 2 | 2019-12-06T14:49:34.000Z | 2021-06-10T15:57:59.000Z | lib/appController.py | QIAOANGeo/BZB_ydzw | 8c11e9797cca31d1fab26be7eb0a71666cfac15f | [
"MIT"
] | null | null | null | lib/appController.py | QIAOANGeo/BZB_ydzw | 8c11e9797cca31d1fab26be7eb0a71666cfac15f | [
"MIT"
] | null | null | null | '''
1appium
subproccess
1.1
1.2
2driver
'''
from lib.tools import Tool
import subprocess
from lib.path import SYSTEMPATH, ERRORPATH
import time
from appium import webdriver
import queue
# python
driver_queue = queue.Queue()
if __name__ == '__main__':
controller = Controller()
controller.start_server()
if controller.test_server():
controller.start_driver() | 30.417722 | 109 | 0.553891 |
71fcdf4c5cb6b34edec79a08f7d295031300d28a | 2,226 | py | Python | pondSizes.py | passionzhan/LeetCode | c4d33b64b9da15ca7a9b0d41e645d86a697694fe | [
"MIT"
] | 1 | 2019-08-29T01:12:47.000Z | 2019-08-29T01:12:47.000Z | pondSizes.py | passionzhan/LeetCode | c4d33b64b9da15ca7a9b0d41e645d86a697694fe | [
"MIT"
] | null | null | null | pondSizes.py | passionzhan/LeetCode | c4d33b64b9da15ca7a9b0d41e645d86a697694fe | [
"MIT"
] | null | null | null | # -*- encoding: utf-8 -*-
'''
@project : LeetCode
@File : pondSizes.py
@Contact : [email protected]
@Desc :
land0
[
[0,2,1,0],
[0,1,0,1],
[1,1,0,1],
[0,1,0,1]
]
[1,2,4]
0 < len(land) <= 1000
0 < len(land[i]) <= 1000
LeetCode
https://leetcode-cn.com/problems/pond-sizes-lcci
@Modify Time @Author @Version @Desciption
------------ ------- -------- -----------
2020-03-07 zhan 1.0 None
'''
from typing import List
from collections import deque
if __name__ == '__main__':
a = [
[0,2,1,0],
[0,1,0,1],
[1,1,0,1],
[0,1,0,1]
]
ans = Solution().pondSizes(a)
print(ans)
| 25.011236 | 124 | 0.444295 |
71fdcd33231ded5dc2f9d6d67f26d46eb50eca3d | 5,990 | py | Python | geolucidate/functions.py | kurtraschke/geolucidate | 827195a90d972fa5efce5a03bdbe53d8395d94ba | [
"MIT"
] | 3 | 2015-09-17T01:01:53.000Z | 2019-09-10T14:30:43.000Z | geolucidate/functions.py | kurtraschke/geolucidate | 827195a90d972fa5efce5a03bdbe53d8395d94ba | [
"MIT"
] | null | null | null | geolucidate/functions.py | kurtraschke/geolucidate | 827195a90d972fa5efce5a03bdbe53d8395d94ba | [
"MIT"
] | 5 | 2018-09-11T21:54:36.000Z | 2020-06-25T19:05:45.000Z | # -*- coding: utf-8 -*-
from decimal import Decimal, setcontext, ExtendedContext
from geolucidate.links.google import google_maps_link
from geolucidate.links.tools import MapLink
from geolucidate.parser import parser_re
setcontext(ExtendedContext)
def _cleanup(parts):
"""
Normalize up the parts matched by :obj:`parser.parser_re` to
degrees, minutes, and seconds.
>>> _cleanup({'latdir': 'south', 'longdir': 'west',
... 'latdeg':'60','latmin':'30',
... 'longdeg':'50','longmin':'40'})
['S', '60', '30', '00', 'W', '50', '40', '00']
>>> _cleanup({'latdir': 'south', 'longdir': 'west',
... 'latdeg':'60','latmin':'30', 'latdecsec':'.50',
... 'longdeg':'50','longmin':'40','longdecsec':'.90'})
['S', '60', '30.50', '00', 'W', '50', '40.90', '00']
"""
latdir = (parts['latdir'] or parts['latdir2']).upper()[0]
longdir = (parts['longdir'] or parts['longdir2']).upper()[0]
latdeg = parts.get('latdeg')
longdeg = parts.get('longdeg')
latmin = parts.get('latmin', '00') or '00'
longmin = parts.get('longmin', '00') or '00'
latdecsec = parts.get('latdecsec', '')
longdecsec = parts.get('longdecsec', '')
if (latdecsec and longdecsec):
latmin += latdecsec
longmin += longdecsec
latsec = '00'
longsec = '00'
else:
latsec = parts.get('latsec', '') or '00'
longsec = parts.get('longsec', '') or '00'
return [latdir, latdeg, latmin, latsec, longdir, longdeg, longmin, longsec]
def _convert(latdir, latdeg, latmin, latsec,
longdir, longdeg, longmin, longsec):
"""
Convert normalized degrees, minutes, and seconds to decimal degrees.
Quantize the converted value based on the input precision and
return a 2-tuple of strings.
>>> _convert('S','50','30','30','W','50','30','30')
('-50.508333', '-50.508333')
>>> _convert('N','50','27','55','W','127','27','65')
('50.459167', '-127.460833')
"""
if (latsec != '00' or longsec != '00'):
precision = Decimal('0.000001')
elif (latmin != '00' or longmin != '00'):
precision = Decimal('0.001')
else:
precision = Decimal('1')
latitude = Decimal(latdeg)
latmin = Decimal(latmin)
latsec = Decimal(latsec)
longitude = Decimal(longdeg)
longmin = Decimal(longmin)
longsec = Decimal(longsec)
if latsec > 59 or longsec > 59:
# Assume that 'seconds' greater than 59 are actually a decimal
# fraction of minutes
latitude += (latmin +
(latsec / Decimal('100'))) / Decimal('60')
longitude += (longmin +
(longsec / Decimal('100'))) / Decimal('60')
else:
latitude += (latmin +
(latsec / Decimal('60'))) / Decimal('60')
longitude += (longmin +
(longsec / Decimal('60'))) / Decimal('60')
if latdir == 'S':
latitude *= Decimal('-1')
if longdir == 'W':
longitude *= Decimal('-1')
lat_str = str(latitude.quantize(precision))
long_str = str(longitude.quantize(precision))
return (lat_str, long_str)
def replace(string, sub_function=google_maps_link()):
"""
Replace detected coordinates with a map link, using the given substitution
function.
The substitution function will be passed a :class:`~.MapLink` instance, and
should return a string which will be substituted by :func:`re.sub` in place
of the detected coordinates.
>>> replace("58147N/07720W")
'<a href="http://maps.google.com/maps?q=58.235278%2C-77.333333+%2858147N%2F07720W%29&ll=58.235278%2C-77.333333&t=h" title="58147N/07720W (58.235278, -77.333333)">58147N/07720W</a>'
>>> replace("5814N/07720W", google_maps_link('satellite'))
'<a href="http://maps.google.com/maps?q=58.233%2C-77.333+%285814N%2F07720W%29&ll=58.233%2C-77.333&t=k" title="5814N/07720W (58.233, -77.333)">5814N/07720W</a>'
>>> from geolucidate.links.bing import bing_maps_link
>>> replace("58N/077W", bing_maps_link('map'))
'<a href="http://bing.com/maps/default.aspx?style=r&cp=58~-77&sp=Point.58_-77_58N%2F077W&v=2" title="58N/077W (58, -77)">58N/077W</a>'
"""
return parser_re.sub(do_replace, string)
def get_replacements(string, sub_function=google_maps_link()):
"""
Return a dict whose keys are instances of :class:`re.Match` and
whose values are the corresponding replacements. Use
:func:`get_replacements` when the replacement cannot be performed
through ordinary string substitution by :func:`re.sub`, as in
:func:`replace`.
>>> get_replacements("4630 NORTH 5705 WEST 58147N/07720W")
... #doctest: +ELLIPSIS
{<re.Match object...>: '<a href="..." title="...">4630 NORTH 5705 WEST</a>', <re.Match object...>: '<a href="..." title="...">58147N/07720W</a>'}
>>> test_string = "4630 NORTH 5705 WEST 58147N/07720W"
>>> replacements = get_replacements(test_string)
>>> offset = 0
>>> out = bytearray(test_string, encoding="ascii", errors="replace")
>>> for (match, link) in replacements.items():
... start = match.start() + offset
... end = match.end() + offset
... out[start:end] = bytearray(link, encoding="ascii", errors="replace")
... offset += (len(link) - len(match.group()))
>>> out.decode(encoding="ascii") == replace(test_string)
True
"""
substitutions = {}
matches = parser_re.finditer(string)
for match in matches:
(latitude, longitude) = _convert(*_cleanup(match.groupdict()))
substitutions[match] = sub_function(MapLink(match.group(),
latitude, longitude))
return substitutions
| 35.235294 | 184 | 0.602337 |
71fe29113947026b758491d3c116f1982f10cf36 | 715 | py | Python | setup.py | rluzuriaga/pokedex | e5c18c410994d5fb589bc3dceaba71f85268edfb | [
"MIT"
] | 30 | 2020-08-15T01:16:17.000Z | 2022-03-12T17:51:17.000Z | setup.py | rluzuriaga/pokedex | e5c18c410994d5fb589bc3dceaba71f85268edfb | [
"MIT"
] | 86 | 2020-08-12T17:20:55.000Z | 2022-03-28T18:19:28.000Z | setup.py | rluzuriaga/pokedex | e5c18c410994d5fb589bc3dceaba71f85268edfb | [
"MIT"
] | 38 | 2020-08-18T00:09:15.000Z | 2022-03-01T07:47:41.000Z | from setuptools import setup, find_packages
setup(
name='Pokedex',
version='0.1',
zip_safe=False,
packages=find_packages(),
package_data={
'pokedex': ['data/csv/*.csv']
},
install_requires=[
'SQLAlchemy>=1.0,<2.0',
'whoosh>=2.5,<2.7',
'markdown==2.4.1',
'construct==2.5.3',
'six>=1.9.0',
],
entry_points={
'console_scripts': [
'pokedex = pokedex.main:setuptools_entry',
],
},
classifiers=[
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.7",
]
)
| 23.833333 | 54 | 0.524476 |
71feb582dac7753ba6da1c6d33e4df9c56ad4249 | 17,900 | py | Python | lingvo/tasks/asr/encoder.py | j-luo93/lingvo | 7398974078391362f0c1b027164a8f33f88cf86b | [
"Apache-2.0"
] | 4 | 2019-01-08T02:59:38.000Z | 2022-02-18T11:31:37.000Z | lingvo/tasks/asr/encoder.py | j-luo93/lingvo | 7398974078391362f0c1b027164a8f33f88cf86b | [
"Apache-2.0"
] | null | null | null | lingvo/tasks/asr/encoder.py | j-luo93/lingvo | 7398974078391362f0c1b027164a8f33f88cf86b | [
"Apache-2.0"
] | 1 | 2019-07-02T14:09:42.000Z | 2019-07-02T14:09:42.000Z | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Encoders for the speech model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from six.moves import range
from six.moves import zip
import tensorflow as tf
from tensorflow.python.ops import inplace_ops
from lingvo.core import base_encoder
from lingvo.core import base_layer
from lingvo.core import layers
from lingvo.core import plot
from lingvo.core import py_utils
from lingvo.core import rnn_cell
from lingvo.core import rnn_layers
from lingvo.core import summary_utils
from lingvo.core import model_helper
ConvLSTMBlock = collections.namedtuple('ConvLSTMBlock', ('rnn', 'cnn'))
def FProp(self, theta, batch, state0=None):
"""Encodes source as represented by 'inputs' and 'paddings'.
Args:
theta: A NestedMap object containing weights' values of this
layer and its children layers.
batch: A NestedMap with fields:
src_inputs - The inputs tensor. It is expected to be of shape [batch,
time, feature_dim, channels].
paddings - The paddings tensor. It is expected to be of shape [batch,
time].
state0: Recurrent input state. Not supported/ignored by this encoder.
Returns:
(outputs, out_paddings, state1) tuple. Outputs is of the shape
[time, batch, depth], and out_paddings is of the shape [time, batch]
"""
p = self.params
inputs, paddings = batch.src_inputs, batch.paddings
with tf.name_scope(p.name):
# Add a few extra padded timesteps at the end. This is for ensuring the
# correctness of the conv-layers at the edges.
if p.pad_steps > 0:
# inplace_update() is not supported by TPU for now. Since we have done
# padding on the input_generator, we may avoid this additional padding.
assert not py_utils.use_tpu()
inputs_pad = tf.zeros(
inplace_ops.inplace_update(tf.shape(inputs), 1, p.pad_steps),
inputs.dtype)
paddings_pad = tf.ones(
inplace_ops.inplace_update(tf.shape(paddings), 1, p.pad_steps),
paddings.dtype)
inputs = tf.concat([inputs, inputs_pad], 1, name='inputs')
paddings = tf.concat([paddings, paddings_pad], 1)
def ReshapeForPlot(tensor, padding, name):
"""Transposes and flattens channels to [batch, dim, seq_len] shape."""
# Flatten any dimensions beyond the third into the third.
batch_size = tf.shape(tensor)[0]
max_len = tf.shape(tensor)[1]
plot_tensor = tf.reshape(tensor, [batch_size, max_len, -1])
plot_tensor = tf.transpose(plot_tensor, [0, 2, 1], name=name)
return (plot_tensor, summary_utils.SequenceLength(padding))
plots = [
ReshapeForPlot(
tf.transpose(inputs, [0, 1, 3, 2]), paddings, 'inputs')
]
conv_out = inputs
out_padding = paddings
for i, conv_layer in enumerate(self.conv):
conv_out, out_padding = conv_layer.FProp(theta.conv[i], conv_out,
out_padding)
plots.append(
ReshapeForPlot(
tf.transpose(conv_out, [0, 1, 3, 2]), out_padding,
'conv_%d_out' % i))
# Now the conv-lstm part.
conv_lstm_out = conv_out
conv_lstm_out_padding = out_padding
for i, (rnn, cnn) in enumerate(
zip(self.conv_lstm_rnn, self.conv_lstm_cnn)):
conv_lstm_in = conv_lstm_out
# Move time dimension to be the first.
conv_lstm_in = TransposeFirstTwoDims(conv_lstm_in)
conv_lstm_in = tf.expand_dims(conv_lstm_in, 2)
conv_lstm_in_padding = tf.expand_dims(
tf.transpose(conv_lstm_out_padding), 2)
lstm_out = rnn.FProp(theta.conv_lstm_rnn[i], conv_lstm_in,
conv_lstm_in_padding)
# Move time dimension to be the second.
cnn_in = TransposeFirstTwoDims(lstm_out)
cnn_in = tf.squeeze(cnn_in, 2)
cnn_in_padding = conv_lstm_out_padding
cnn_out, cnn_out_padding = cnn.FProp(theta.conv_lstm_cnn[i], cnn_in,
cnn_in_padding)
conv_lstm_out, conv_lstm_out_padding = cnn_out, cnn_out_padding
plots.append(
ReshapeForPlot(conv_lstm_out, conv_lstm_out_padding,
'conv_lstm_%d_out' % i))
# Need to do a reshape before starting the rnn layers.
conv_lstm_out = py_utils.HasRank(conv_lstm_out, 4)
conv_lstm_out_shape = tf.shape(conv_lstm_out)
new_shape = tf.concat([conv_lstm_out_shape[:2], [-1]], 0)
conv_lstm_out = tf.reshape(conv_lstm_out, new_shape)
if self._first_lstm_input_dim_pad:
conv_lstm_out = tf.pad(
conv_lstm_out,
[[0, 0], [0, 0], [0, self._first_lstm_input_dim_pad]])
conv_lstm_out = py_utils.HasShape(conv_lstm_out,
[-1, -1, self._first_lstm_input_dim])
# Transpose to move the time dimension to be the first.
rnn_in = tf.transpose(conv_lstm_out, [1, 0, 2])
rnn_padding = tf.expand_dims(tf.transpose(conv_lstm_out_padding), 2)
# rnn_in is of shape [time, batch, depth]
# rnn_padding is of shape [time, batch, 1]
# Now the rnn layers.
num_skips = 0
for i in range(p.num_lstm_layers):
rnn_out = self.rnn[i].FProp(theta.rnn[i], rnn_in, rnn_padding)
residual_index = i - p.residual_start + 1
if p.residual_start > 0 and residual_index >= 0:
if residual_index % p.residual_stride == 0:
residual_in = rnn_in
if residual_index % p.residual_stride == p.residual_stride - 1:
# Highway skip connection.
if p.highway_skip:
rnn_out = self.highway_skip[num_skips].FProp(
theta.highway_skip[num_skips], residual_in, rnn_out)
num_skips += 1
else:
# Residual skip connection.
rnn_out += py_utils.HasShape(residual_in, tf.shape(rnn_out))
if p.project_lstm_output and (i < p.num_lstm_layers - 1):
# Projection layers.
rnn_out = self.proj[i].FProp(theta.proj[i], rnn_out, rnn_padding)
if i == p.num_lstm_layers - 1:
rnn_out *= (1.0 - rnn_padding)
plots.append(
ReshapeForPlot(
tf.transpose(rnn_out, [1, 0, 2]),
tf.transpose(rnn_padding, [1, 0, 2]), 'rnn_%d_out' % i))
rnn_in = rnn_out
final_out = rnn_in
if self.cluster.add_summary:
fig = plot.MatplotlibFigureSummary(
'encoder_example', figsize=(8, len(plots) * 3.5))
# Order layers from bottom to top.
plots.reverse()
for tensor, seq_len in plots:
fig.AddSubplot(
[tensor, seq_len],
summary_utils.TrimPaddingAndPlotSequence,
title=tensor.name,
xlabel='Time')
fig.Finalize()
rnn_padding = tf.squeeze(rnn_padding, [2])
return final_out, rnn_padding, py_utils.NestedMap()
| 42.517815 | 80 | 0.655587 |
71feffbb5e24e7f37afedbf05e8ccd4bc8d2a4ea | 1,898 | py | Python | pos_neg_graph/graph_ratio.py | Yudabin/Review_Project | b924199d6845defeb4cd243a99426070c014d8d8 | [
"MIT"
] | null | null | null | pos_neg_graph/graph_ratio.py | Yudabin/Review_Project | b924199d6845defeb4cd243a99426070c014d8d8 | [
"MIT"
] | null | null | null | pos_neg_graph/graph_ratio.py | Yudabin/Review_Project | b924199d6845defeb4cd243a99426070c014d8d8 | [
"MIT"
] | 1 | 2020-11-10T00:54:45.000Z | 2020-11-10T00:54:45.000Z | import matplotlib.font_manager as fm
import matplotlib.pyplot as plt
import numpy as np
font_location = './wordcloud_file/malgun.ttf' # For Windows
font_name = fm.FontProperties(fname=font_location).get_name()
plt.rc('font', family=font_name) | 42.177778 | 105 | 0.553741 |
71ff26bb500f7231ddf77573e218582db161237c | 143 | py | Python | blog/views.py | kbilak/City-portal | f567764f3c4ae0287a178acf77f060dde0424f26 | [
"MIT"
] | null | null | null | blog/views.py | kbilak/City-portal | f567764f3c4ae0287a178acf77f060dde0424f26 | [
"MIT"
] | null | null | null | blog/views.py | kbilak/City-portal | f567764f3c4ae0287a178acf77f060dde0424f26 | [
"MIT"
] | null | null | null | from django.shortcuts import render
from django.views.generic import TemplateView | 28.6 | 45 | 0.797203 |
71ffb4e7bdba22327963714071779d2e8b20d391 | 3,727 | py | Python | my_area/views.py | Vincent-Juma/area_master | 3ea1dd1039053fb4de6326deb967383d09d7145b | [
"MIT"
] | 1 | 2021-05-28T14:16:54.000Z | 2021-05-28T14:16:54.000Z | my_area/views.py | Vincent-Juma/area_master | 3ea1dd1039053fb4de6326deb967383d09d7145b | [
"MIT"
] | null | null | null | my_area/views.py | Vincent-Juma/area_master | 3ea1dd1039053fb4de6326deb967383d09d7145b | [
"MIT"
] | 1 | 2021-04-13T09:14:07.000Z | 2021-04-13T09:14:07.000Z | from django.shortcuts import render
from .forms import *
from django.shortcuts import redirect,get_object_or_404
from django.contrib.auth.decorators import login_required
from . models import *
from django.views import generic
def myloc_details(request,myloc_id):
activities=Activity.objects.filter(myloc=myloc_id)
posts=Post.objects.filter(myloc=myloc_id)
myloc=Myloc.objects.get(pk=myloc_id)
return render(request,'details.html',{'myloc':myloc,'activities':activities,'posts':posts})
| 37.646465 | 96 | 0.676952 |
9c013815ab64fbbe61aec8c1d395c39f802c887c | 361 | py | Python | 2020/day_01/__main__.py | d02d33pak/Advent-Of-Code | 765b0302c256ad61864095a537a3f6379901b1c2 | [
"MIT"
] | null | null | null | 2020/day_01/__main__.py | d02d33pak/Advent-Of-Code | 765b0302c256ad61864095a537a3f6379901b1c2 | [
"MIT"
] | null | null | null | 2020/day_01/__main__.py | d02d33pak/Advent-Of-Code | 765b0302c256ad61864095a537a3f6379901b1c2 | [
"MIT"
] | null | null | null | """
Day 1 Main Module
"""
from day01 import parse_input, part1, part2
if __name__ == "__main__":
# trying out the new walrus[:=] oprtr in python
if (part := int(input("Enter Part: "))) == 1:
print(part1(parse_input("input.txt")))
elif part == 2:
print(part2(parse_input("input.txt")))
else:
print("Wrong choice [1|2]")
| 24.066667 | 51 | 0.595568 |
9c01b072850e72696bc89430b3763e9313e105ec | 4,406 | py | Python | quiz_app/settings.py | ignasgri/Django_Quiz | c98969d4181350eaaf8883f3930d0e800c240a44 | [
"MIT"
] | null | null | null | quiz_app/settings.py | ignasgri/Django_Quiz | c98969d4181350eaaf8883f3930d0e800c240a44 | [
"MIT"
] | 6 | 2020-06-05T19:26:57.000Z | 2022-03-11T23:33:08.000Z | quiz_app/settings.py | ignasgri/Django_Quiz | c98969d4181350eaaf8883f3930d0e800c240a44 | [
"MIT"
] | null | null | null | """
Django settings for quiz_app project.
Generated by 'django-admin startproject' using Django 2.1.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
SITE_ID = 1
import os
import dj_database_url
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = os.environ.get('DEBUG', False)
ALLOWED_HOSTS = ['ignas-quiz.herokuapp.com','localhost','127.0.0.1']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.sites',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'storages',
'quiz',
'multichoice',
'true_false',
'essay',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'quiz_app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.media',
],
},
},
]
WSGI_APPLICATION = 'quiz_app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
# }
# }
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
# STATIC_ROOT = 'staticfiles'
STATICFILES_DIRS= (
os.path.join(BASE_DIR, "static"),
)
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
DATABASES = {'default': dj_database_url.parse(os.environ.get('DATABASE_URL')) }
AWS_HEADERS = { # see http://developer.yahoo.com/performance/rules.html#expires
'Expires': 'Thu, 31 Dec 2099 20:00:00 GMT',
'Cache-Control': 'max-age=94608000',
}
AWS_STORAGE_BUCKET_NAME = os.environ.get("AWS_STORAGE_BUCKET_NAME")
AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID")
AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
AWS_S3_HOST = 's3-eu-west-1.amazonaws.com'
AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME
STATICFILES_LOCATION = 'static'
STATICFILES_STORAGE = 'custom_storages.StaticStorage'
STATIC_URL = "https://%s/%s/" % (AWS_S3_CUSTOM_DOMAIN, STATICFILES_LOCATION)
MEDIAFILES_LOCATION = 'media'
MEDIA_URL = "https://%s/%s/" % (AWS_S3_CUSTOM_DOMAIN, MEDIAFILES_LOCATION)
DEFAULT_FILE_STORAGE = 'custom_storages.MediaStorage' | 27.886076 | 91 | 0.70631 |
9c01e95549f9ef77973f439bfde72aea99322f8e | 20,840 | py | Python | scripts/data/topple_dataset.py | davrempe/predicting-physical-dynamics | b0abb385a7ac491e25d1df0b9a9a943621fc2d37 | [
"MIT"
] | 16 | 2020-02-29T06:44:16.000Z | 2022-02-20T13:05:12.000Z | scripts/data/topple_dataset.py | davrempe/predicting-physical-dynamics | b0abb385a7ac491e25d1df0b9a9a943621fc2d37 | [
"MIT"
] | 6 | 2020-02-13T08:09:28.000Z | 2022-02-09T23:35:55.000Z | scripts/data/topple_dataset.py | davrempe/predicting-physical-dynamics | b0abb385a7ac491e25d1df0b9a9a943621fc2d37 | [
"MIT"
] | 4 | 2020-04-22T09:46:55.000Z | 2021-04-15T06:17:48.000Z | import numpy as np
import pickle
from os.path import exists, realpath
import sys
import math
from topple_data_loader import ToppleData, ToppleDataLoader
import transforms3d
if __name__=='__main__':
# norm_info = ToppleNormalizationInfo()
# norm_info.load_from('../../data/sim/normalization_info/cube_train.pkl')
# norm_info.print_out()
topple_data = ToppleDataset(roots=['./data/sim/Cube/Cube30k_ObjSplit/Cube30kVal'], norm_info_file='./data/sim/normalization_info/cube_30k.pkl', \
batch_size=5, num_steps=10, shuffle=True, num_pts=None, perturb_pts=0.01)
count = 0
while topple_data.has_next_batch():
batch = topple_data.next_batch(random_window=True, focus_toppling=False)
count += 1
# print(batch.lin_vel[0])
# print(batch.toppled[0])
# print(batch.delta_rot_split[0])
# print(batch.delta_rot[0])
# print(batch.topple_label[0])
# print(batch.pos)
# print(batch.body_friction)
# print(batch.delta_quat[0])
# print(np.degrees(2*np.arccos(batch.delta_quat[0, :, 0])))
print('Total num batches: ' + str(count))
topple_data.reset()
count = 0
while topple_data.has_next_batch():
batch = topple_data.next_batch()
count += 1
print(batch.size)
print('Total num batches: ' + str(count))
| 45.010799 | 149 | 0.613964 |
9c0252cbabe1a0b566b1ac4670f0fdedec520c7a | 370 | py | Python | Part1/AverageAccuracy.py | efkandurakli/Graduation-Project1 | fd2cba89929da2cef49ec67214b54c310b57ce01 | [
"MIT"
] | 1 | 2019-12-18T08:16:55.000Z | 2019-12-18T08:16:55.000Z | Part1/AverageAccuracy.py | efkandurakli/Graduation-Project1 | fd2cba89929da2cef49ec67214b54c310b57ce01 | [
"MIT"
] | null | null | null | Part1/AverageAccuracy.py | efkandurakli/Graduation-Project1 | fd2cba89929da2cef49ec67214b54c310b57ce01 | [
"MIT"
] | null | null | null | import numpy as np
from operator import truediv
| 37 | 63 | 0.751351 |
9c029add7c4d9a1a7902d4f882039de120a387b3 | 13,193 | py | Python | scripts/sct_apply_transfo.py | YangHee-Min/spinalcordtoolbox | 38ca15aa99b03ca99b7885ddc98adf2755adc43d | [
"MIT"
] | null | null | null | scripts/sct_apply_transfo.py | YangHee-Min/spinalcordtoolbox | 38ca15aa99b03ca99b7885ddc98adf2755adc43d | [
"MIT"
] | null | null | null | scripts/sct_apply_transfo.py | YangHee-Min/spinalcordtoolbox | 38ca15aa99b03ca99b7885ddc98adf2755adc43d | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#########################################################################################
#
# Apply transformations. This function is a wrapper for sct_WarpImageMultiTransform
#
# ---------------------------------------------------------------------------------------
# Copyright (c) 2014 Polytechnique Montreal <www.neuro.polymtl.ca>
# Authors: Julien Cohen-Adad, Olivier Comtois
# Modified: 2014-07-20
#
# About the license: see the file LICENSE.TXT
#########################################################################################
# TODO: display message at the end
# TODO: interpolation methods
from __future__ import division, absolute_import
import sys, io, os, time, functools
from msct_parser import Parser
import sct_utils as sct
import sct_convert
import sct_image
import spinalcordtoolbox.image as msct_image
from sct_crop_image import ImageCropper
# PARSER
# ==========================================================================================
def get_parser():
# parser initialisation
parser = Parser(__file__)
parser.usage.set_description('Apply transformations. This function is a wrapper for antsApplyTransforms (ANTs).')
parser.add_option(name="-i",
type_value="file",
description="input image",
mandatory=True,
example="t2.nii.gz")
parser.add_option(name="-d",
type_value="file",
description="destination image",
mandatory=True,
example="out.nii.gz")
parser.add_option(name="-w",
type_value=[[','], "file"],
description="Transformation, which can be a warping field (nifti image) or an affine transformation matrix (text file).",
mandatory=True,
example="warp1.nii.gz,warp2.nii.gz")
parser.add_option(name="-crop",
type_value="multiple_choice",
description="Crop Reference. 0 : no reference. 1 : sets background to 0. 2 : use normal background",
mandatory=False,
default_value='0',
example=['0', '1', '2'])
parser.add_option(name="-c",
type_value=None,
description="Crop Reference. 0 : no reference. 1 : sets background to 0. 2 : use normal background",
mandatory=False,
deprecated_by='-crop')
parser.add_option(name="-o",
type_value="file_output",
description="registered source.",
mandatory=False,
default_value='',
example="dest.nii.gz")
parser.add_option(name="-x",
type_value="multiple_choice",
description="interpolation method",
mandatory=False,
default_value='spline',
example=['nn', 'linear', 'spline'])
parser.add_option(name="-r",
type_value="multiple_choice",
description="""Remove temporary files.""",
mandatory=False,
default_value='1',
example=['0', '1'])
parser.add_option(name="-v",
type_value="multiple_choice",
description="""Verbose.""",
mandatory=False,
default_value='1',
example=['0', '1', '2'])
return parser
# MAIN
# ==========================================================================================
def main(args=None):
# check user arguments
if not args:
args = sys.argv[1:]
# Get parser info
parser = get_parser()
arguments = parser.parse(args)
input_filename = arguments["-i"]
fname_dest = arguments["-d"]
warp_filename = arguments["-w"]
transform = Transform(input_filename=input_filename, fname_dest=fname_dest, warp=warp_filename)
if "-crop" in arguments:
transform.crop = arguments["-crop"]
if "-o" in arguments:
transform.output_filename = arguments["-o"]
if "-x" in arguments:
transform.interp = arguments["-x"]
if "-r" in arguments:
transform.remove_temp_files = int(arguments["-r"])
transform.verbose = int(arguments.get('-v'))
sct.init_sct(log_level=transform.verbose, update=True) # Update log level
transform.apply()
# START PROGRAM
# ==========================================================================================
if __name__ == "__main__":
sct.init_sct()
# # initialize parameters
param = Param()
# call main function
main()
| 42.973941 | 175 | 0.560373 |
9c04726d3c874dda1b944aacaee1f8285db82b6a | 3,132 | py | Python | tests/plugins/test_plugin_base.py | vurankar/mongo-connector | 202aa28743855643fddd77d3e66bf1a640df3ed6 | [
"Apache-2.0"
] | 1 | 2019-08-24T21:06:00.000Z | 2019-08-24T21:06:00.000Z | tests/plugins/test_plugin_base.py | vurankar/mongo-connector | 202aa28743855643fddd77d3e66bf1a640df3ed6 | [
"Apache-2.0"
] | 13 | 2017-08-07T04:36:25.000Z | 2021-02-08T17:37:27.000Z | tests/plugins/test_plugin_base.py | vurankar/mongo-connector | 202aa28743855643fddd77d3e66bf1a640df3ed6 | [
"Apache-2.0"
] | 4 | 2018-10-22T17:30:46.000Z | 2020-07-07T21:24:48.000Z | # Copyright 2013-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests methods in plugin_base.py
"""
import copy
import sys
sys.path[0:0] = [""]
from mongo_connector.plugins.plugin_base import PluginBase
from tests import unittest
from tests.plugins.helpers import (BAD_PLUGIN_CONFIGS, get_test_namespace)
if __name__ == '__main__':
unittest.main()
| 27 | 74 | 0.623244 |
9c048e3e8bdc9b4b95cc9e0528a68aa1fd3efcf5 | 10,365 | py | Python | address/models.py | PerchLive/django-address | edab73847ba95d4f7a71993bcd55ea6bf300693e | [
"BSD-3-Clause"
] | null | null | null | address/models.py | PerchLive/django-address | edab73847ba95d4f7a71993bcd55ea6bf300693e | [
"BSD-3-Clause"
] | null | null | null | address/models.py | PerchLive/django-address | edab73847ba95d4f7a71993bcd55ea6bf300693e | [
"BSD-3-Clause"
] | null | null | null | import logging
import sys
from django.core.exceptions import ValidationError
from django.db import models
from django.db.models.fields.related import ForeignObject
from django.utils.encoding import python_2_unicode_compatible
try:
from django.db.models.fields.related_descriptors import ForwardManyToOneDescriptor
except ImportError:
from django.db.models.fields.related import ReverseSingleRelatedObjectDescriptor as ForwardManyToOneDescriptor
logger = logging.getLogger(__name__)
if sys.version > '3':
long = int
basestring = (str, bytes)
unicode = str
__all__ = ['Country', 'State', 'Locality', 'Address', 'AddressField']
##
# Convert a dictionary to an address.
##
##
# A country.
##
##
# A state. Google refers to this as `administration_level_1`.
##
##
# A locality (suburb).
##
##
# An address. If for any reason we are unable to find a matching
# decomposed address we will store the raw address string in `raw`.
##
class AddressDescriptor(ForwardManyToOneDescriptor):
def __set__(self, inst, value):
super(AddressDescriptor, self).__set__(inst, to_python(value))
##
# A field for addresses in other models.
##
| 31.409091 | 117 | 0.625663 |
9c0520151db9426f768e41a165d1e59bc2354107 | 198 | py | Python | src/eavatar.ava/pod/mods/tasks/__init__.py | eavatar/ava | 4f09c5417b7187dd919b7edabb8c516d8efc0696 | [
"BSD-3-Clause"
] | null | null | null | src/eavatar.ava/pod/mods/tasks/__init__.py | eavatar/ava | 4f09c5417b7187dd919b7edabb8c516d8efc0696 | [
"BSD-3-Clause"
] | null | null | null | src/eavatar.ava/pod/mods/tasks/__init__.py | eavatar/ava | 4f09c5417b7187dd919b7edabb8c516d8efc0696 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Modules for exposing functions that can be run as tasks.
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
| 28.285714 | 57 | 0.651515 |
9c07713f7b2c917072be6181205756050fc2c5cb | 7,715 | py | Python | addons/hr_payroll_account/models/hr_payroll_account.py | jjiege/odoo | fd5b8ad387c1881f349d125cbd56433f4d49398f | [
"MIT"
] | null | null | null | addons/hr_payroll_account/models/hr_payroll_account.py | jjiege/odoo | fd5b8ad387c1881f349d125cbd56433f4d49398f | [
"MIT"
] | null | null | null | addons/hr_payroll_account/models/hr_payroll_account.py | jjiege/odoo | fd5b8ad387c1881f349d125cbd56433f4d49398f | [
"MIT"
] | null | null | null | #-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
from odoo.exceptions import UserError
from odoo.tools import float_compare, float_is_zero
| 46.757576 | 142 | 0.583279 |
9c08054f6ca4be429f3f9506fd1f8ea55ac7ad8b | 3,048 | py | Python | ml_datasets/utils.py | abkoesdw/ml-datasets | c8c7b85ba8ed9c0ea233b4092d499d5022952011 | [
"MIT"
] | 1 | 2020-07-05T04:58:07.000Z | 2020-07-05T04:58:07.000Z | ml_datasets/utils.py | abkoesdw/ml-datasets | c8c7b85ba8ed9c0ea233b4092d499d5022952011 | [
"MIT"
] | null | null | null | ml_datasets/utils.py | abkoesdw/ml-datasets | c8c7b85ba8ed9c0ea233b4092d499d5022952011 | [
"MIT"
] | null | null | null | import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import sys
import numpy as np
from matplotlib.colors import LinearSegmentedColormap
from matplotlib.colors import BoundaryNorm
| 27.709091 | 86 | 0.597113 |
9c09326eb5f4c01f6725f6f04b0ab3e2c2184c2f | 4,794 | py | Python | Simulator/simulator.py | MasterRadule/DefenceFirst | d3c3a652357ac433213c38fa6134780e286f6cf2 | [
"MIT"
] | null | null | null | Simulator/simulator.py | MasterRadule/DefenceFirst | d3c3a652357ac433213c38fa6134780e286f6cf2 | [
"MIT"
] | null | null | null | Simulator/simulator.py | MasterRadule/DefenceFirst | d3c3a652357ac433213c38fa6134780e286f6cf2 | [
"MIT"
] | 2 | 2020-08-02T10:47:17.000Z | 2021-08-31T06:00:44.000Z | import logging
import os
import random
from abc import ABC, abstractmethod
from random import randint
from time import sleep, strftime
HOSTNAME = ['defence-first.rs', 'defence-first.de', 'defence-first.ru']
HOSTIP = ['78.218.236.218', '87.236.11.212', '54.147.165.86']
SOURCEIP = ['163.189.141.53', '204.164.10.7', '213.166.160.236', '123.197.235.233', '77.28.21.14']
USERNAMES = ['user1', 'user2', 'user3', 'user4', 'user5']
FACILITY = ['KERN', 'USER', 'MAIL', 'DAEMON', 'AUTH', 'SYSLOG', 'LPR', 'NEWS',
'UUCP', 'CLOCK_DAEMON', 'AUTHPRIV', 'FTP', 'NTP', 'LOGAUDIT', 'LOGALERT',
'CRON', 'LOCAL0', 'LOCAL1', 'LOCAL2', 'LOCAL3', 'LOCAL4', 'LOCAL5', 'LOCAL6', 'LOCAL7']
SEVERITY = ['DEBUG', 'INFORMATIONAL', 'NOTICE', 'WARNING', 'ERROR', 'CRITICAL', 'ALERT', 'EMERGENCY']
FORMAT = '%(asctime)s %(hostname)s-Application-%(hostip)s-%(sourceip)s %(severity)s-%(facility)s %(' \
'message)s '
RESOURCES = ['index.html', 'document.xml', 'dashboard.html']
LOGS_PATH = 'logs'
if __name__ == '__main__':
sm = Context()
sm.run()
| 32.612245 | 118 | 0.569462 |
9c097a6d565de2831c6dc5b3639c640259afd08c | 50 | py | Python | bayes_race/pp/__init__.py | DaniMarts/bayesrace | 3d0d2b26dac2e33ad7e38513304cfb259abe351c | [
"MIT"
] | 23 | 2020-03-27T03:28:04.000Z | 2022-02-24T11:21:18.000Z | bayes_race/pp/__init__.py | DaniMarts/bayesrace | 3d0d2b26dac2e33ad7e38513304cfb259abe351c | [
"MIT"
] | 1 | 2021-07-08T22:02:15.000Z | 2021-07-08T22:02:15.000Z | bayes_race/pp/__init__.py | DaniMarts/bayesrace | 3d0d2b26dac2e33ad7e38513304cfb259abe351c | [
"MIT"
] | 17 | 2020-10-27T06:09:34.000Z | 2022-03-23T05:28:23.000Z | from bayes_race.pp.pure_pursuit import purePursuit | 50 | 50 | 0.9 |
9c0a0b0b086b2b7d8551997a7e4a8ba952ff7a5b | 793 | py | Python | pysteam/evaluator/vector_space_error_eval.py | utiasASRL/pysteam | c0c8809ee2a5e1dab5ce7f9e5ff9de91138ce68b | [
"BSD-3-Clause"
] | 5 | 2021-10-23T00:35:20.000Z | 2022-03-22T02:32:43.000Z | pysteam/evaluator/vector_space_error_eval.py | utiasASRL/pysteam | c0c8809ee2a5e1dab5ce7f9e5ff9de91138ce68b | [
"BSD-3-Clause"
] | null | null | null | pysteam/evaluator/vector_space_error_eval.py | utiasASRL/pysteam | c0c8809ee2a5e1dab5ce7f9e5ff9de91138ce68b | [
"BSD-3-Clause"
] | 1 | 2022-02-04T21:49:48.000Z | 2022-02-04T21:49:48.000Z | from typing import Optional
import numpy as np
from . import Evaluator
from ..state import VectorSpaceStateVar
| 24.78125 | 79 | 0.711223 |
9c0a5c02779cee26231b6b416177aadae209d132 | 83 | py | Python | torch_geometric/nn/unpool/__init__.py | mwussow/pytorch_geometric | 01c68f9b58c94d9efd1f6e39b9c85177aae521bb | [
"MIT"
] | 13 | 2019-11-07T02:57:41.000Z | 2021-12-28T08:19:56.000Z | torch_geometric/nn/unpool/__init__.py | mwussow/pytorch_geometric | 01c68f9b58c94d9efd1f6e39b9c85177aae521bb | [
"MIT"
] | 3 | 2019-10-30T20:20:27.000Z | 2022-03-12T22:56:11.000Z | torch_geometric/nn/unpool/__init__.py | mwussow/pytorch_geometric | 01c68f9b58c94d9efd1f6e39b9c85177aae521bb | [
"MIT"
] | 3 | 2020-10-19T02:53:20.000Z | 2022-01-31T04:31:02.000Z | from .knn_interpolate import knn_interpolate
__all__ = [
'knn_interpolate',
]
| 13.833333 | 44 | 0.746988 |
9c0adaf30cf08d06f29b5570721ed08bc9df2c6a | 937 | py | Python | bc4py/bip32/utils.py | namuyan/bc4py | 6484d356096261d0d57e9e1f5ffeae1f9a9865f3 | [
"MIT"
] | 12 | 2018-09-19T14:02:09.000Z | 2020-01-27T16:20:14.000Z | bc4py/bip32/utils.py | namuyan/bc4py | 6484d356096261d0d57e9e1f5ffeae1f9a9865f3 | [
"MIT"
] | 1 | 2019-09-09T23:58:47.000Z | 2019-09-16T09:33:20.000Z | bc4py/bip32/utils.py | namuyan/bc4py | 6484d356096261d0d57e9e1f5ffeae1f9a9865f3 | [
"MIT"
] | 6 | 2018-11-13T17:20:14.000Z | 2020-02-15T11:46:52.000Z | from bc4py_extension import PyAddress
import hashlib
def is_address(ck: PyAddress, hrp, ver):
"""check bech32 format and version"""
try:
if ck.hrp != hrp:
return False
if ck.version != ver:
return False
except ValueError:
return False
return True
def get_address(pk, hrp, ver) -> PyAddress:
"""get address from public key"""
identifier = hashlib.new('ripemd160', hashlib.sha256(pk).digest()).digest()
return PyAddress.from_param(hrp, ver, identifier)
def convert_address(ck: PyAddress, hrp, ver) -> PyAddress:
"""convert address's version"""
return PyAddress.from_param(hrp, ver, ck.identifier())
__all__ = [
"is_address",
"get_address",
"convert_address",
"dummy_address",
]
| 24.025641 | 79 | 0.662753 |
9c0b22579bc28f35e8719b18a2963cb1c1518847 | 2,687 | py | Python | cubi_tk/snappy/kickoff.py | LaborBerlin/cubi-tk | 4aa5306c547c38eb41d5623ff6e4bace828f85b1 | [
"MIT"
] | null | null | null | cubi_tk/snappy/kickoff.py | LaborBerlin/cubi-tk | 4aa5306c547c38eb41d5623ff6e4bace828f85b1 | [
"MIT"
] | null | null | null | cubi_tk/snappy/kickoff.py | LaborBerlin/cubi-tk | 4aa5306c547c38eb41d5623ff6e4bace828f85b1 | [
"MIT"
] | null | null | null | """``cubi-tk snappy kickoff``: kickoff SNAPPY pipeline."""
import argparse
import os
import subprocess
import typing
from logzero import logger
from toposort import toposort
from . import common
from cubi_tk.exceptions import ParseOutputException
def setup_argparse(parser: argparse.ArgumentParser) -> None:
"""Setup argument parser for ``cubi-tk snappy pull-sheet``."""
parser.add_argument("--hidden-cmd", dest="snappy_cmd", default=run, help=argparse.SUPPRESS)
parser.add_argument(
"--dry-run",
"-n",
default=False,
action="store_true",
help="Perform dry-run, do not do anything.",
)
parser.add_argument(
"--timeout", default=10, type=int, help="Number of seconds to wait for commands."
)
parser.add_argument(
"path",
nargs="?",
help="Path into SNAPPY directory (below a directory containing .snappy_pipeline).",
)
| 34.448718 | 97 | 0.633048 |
9c0b572f391a67c770410e50b8bf0631101d5372 | 4,152 | py | Python | tests/test_autotuner.py | RajatRasal/devito | 162abb6b318e77eaa4e8f719047327c45782056f | [
"MIT"
] | null | null | null | tests/test_autotuner.py | RajatRasal/devito | 162abb6b318e77eaa4e8f719047327c45782056f | [
"MIT"
] | null | null | null | tests/test_autotuner.py | RajatRasal/devito | 162abb6b318e77eaa4e8f719047327c45782056f | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from functools import reduce
from operator import mul
try:
from StringIO import StringIO
except ImportError:
# Python3 compatibility
from io import StringIO
import pytest
from conftest import skipif_yask
import numpy as np
from devito import Grid, Function, TimeFunction, Eq, Operator, configuration, silencio
from devito.logger import logger, logging
| 35.793103 | 87 | 0.67317 |
9c0c673d58dcba5d4585b62a8e7fbc1916ed2edb | 2,683 | py | Python | projects/CharGrid/data/bizcard2coco.py | timctho/detectron2-chargrid | 547479c88ad7d1de2348377706167a84d024a622 | [
"Apache-2.0"
] | 3 | 2020-03-15T18:33:21.000Z | 2020-03-28T18:06:45.000Z | projects/CharGrid/data/bizcard2coco.py | timctho/detectron2-chargrid | 547479c88ad7d1de2348377706167a84d024a622 | [
"Apache-2.0"
] | 2 | 2021-09-08T01:46:39.000Z | 2022-01-13T02:22:56.000Z | projects/CharGrid/data/bizcard2coco.py | timctho/detectron2-chargrid | 547479c88ad7d1de2348377706167a84d024a622 | [
"Apache-2.0"
] | null | null | null | from data.data_reader import BIZCARD_LABEL_MAP, BizcardDataParser
import argparse
from pathlib import Path
import os
import json
import cv2
import numpy as np
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--img_dir', type=str)
parser.add_argument('--gt_dir', type=str)
parser.add_argument('--data_list', type=str)
parser.add_argument('--out_dir', type=str)
parser.add_argument('--out_name', type=str)
args = parser.parse_args()
if not Path(args.out_dir).exists():
Path(args.out_dir).mkdir()
convert_bizcard_to_coco_format(
Path(args.img_dir),
Path(args.gt_dir),
args.data_list,
args.out_dir,
args.out_name)
| 31.197674 | 120 | 0.566157 |
9c0cd3c1e4e41a88f41a644df51b7c36f341d915 | 643 | py | Python | deckz/cli/run.py | m09/deckz | 0f97ef2a43c2c714ac18173a4fe3266cccba31e2 | [
"Apache-2.0"
] | null | null | null | deckz/cli/run.py | m09/deckz | 0f97ef2a43c2c714ac18173a4fe3266cccba31e2 | [
"Apache-2.0"
] | 41 | 2020-04-06T13:49:18.000Z | 2020-12-24T11:14:47.000Z | deckz/cli/run.py | m09/deckz | 0f97ef2a43c2c714ac18173a4fe3266cccba31e2 | [
"Apache-2.0"
] | null | null | null | from pathlib import Path
from typing import List, Optional
from typer import Argument
from deckz.cli import app
from deckz.paths import Paths
from deckz.running import run as running_run
| 22.964286 | 50 | 0.676516 |
9c0e837a9fbf8aa155047aa8574f5db7dc5ea5ad | 248 | py | Python | postgresqleu/confreg/templatetags/miscutil.py | dlangille/pgeu-system | 3f1910010063bab118e94a55ed757b23f1d36bf5 | [
"MIT"
] | null | null | null | postgresqleu/confreg/templatetags/miscutil.py | dlangille/pgeu-system | 3f1910010063bab118e94a55ed757b23f1d36bf5 | [
"MIT"
] | null | null | null | postgresqleu/confreg/templatetags/miscutil.py | dlangille/pgeu-system | 3f1910010063bab118e94a55ed757b23f1d36bf5 | [
"MIT"
] | null | null | null | from django import template
register = template.Library()
| 17.714286 | 36 | 0.754032 |
9c0e950fb4a4ebdf55176f8dd2da092d38504b70 | 2,171 | py | Python | chat.py | rchampa/chat-server | 34b5897e90b580754ad95b36bf7f23ac9baf3175 | [
"MIT"
] | null | null | null | chat.py | rchampa/chat-server | 34b5897e90b580754ad95b36bf7f23ac9baf3175 | [
"MIT"
] | null | null | null | chat.py | rchampa/chat-server | 34b5897e90b580754ad95b36bf7f23ac9baf3175 | [
"MIT"
] | null | null | null | import asyncio
import contextvars
import aioredis
import uvloop
from aioredis import Redis
from fastapi import FastAPI
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.staticfiles import StaticFiles
from RLog import rprint
from routers import apirest, websockets
REDIS_HOST = 'redis'
REDIS_PORT = 6379
PORT = 9080
HOST = "0.0.0.0"
cvar_redis = contextvars.ContextVar('redis', default=None)
# uvloop is written in Cython and is built on top of libuv http://magic.io/blog/uvloop-blazing-fast-python-networking/
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
app = FastAPI()
app.mount("/static", StaticFiles(directory="static"), name="static")
app.add_middleware(CustomHeaderMiddleware)
app.include_router(apirest.router)
app.include_router(websockets.router)
if __name__ == "__main__":
import uvicorn
rprint("Starting app")
rprint(dir(app))
rprint(app.url_path_for('websocket_endpoint'))
uvicorn.run('chat:app', host=HOST, port=PORT, log_level='info', reload=True)#, uds='uvicorn.sock')
| 31.463768 | 119 | 0.720405 |
9c0eb1c40d85566bde4854bf69d4592341ad2835 | 1,009 | py | Python | cli.py | abel-bernabeu/facecompressor | 9322f4e3d3f2787dc9dec2fad6b3f1995d052077 | [
"BSD-3-Clause"
] | 2 | 2020-10-20T09:35:56.000Z | 2021-04-27T11:27:47.000Z | cli.py | abel-bernabeu/facecompressor | 9322f4e3d3f2787dc9dec2fad6b3f1995d052077 | [
"BSD-3-Clause"
] | null | null | null | cli.py | abel-bernabeu/facecompressor | 9322f4e3d3f2787dc9dec2fad6b3f1995d052077 | [
"BSD-3-Clause"
] | null | null | null | import argparse
import autoencoder
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(dest="action")
encode_parser = subparsers.add_parser('encode')
addTrainablesArg(encode_parser)
encode_parser.add_argument('--input', dest='input', help='Input image file name', required=True)
addExchangeArg(encode_parser)
decode_parser = subparsers.add_parser('decode')
addTrainablesArg(decode_parser)
addExchangeArg(decode_parser)
decode_parser.add_argument('--output', dest='output', help='Output image file name', required=True)
opts = parser.parse_args()
if opts.action == 'encode':
autoencoder.encode(opts.model, opts.input, opts.exchange)
elif opts.action == 'decode':
autoencoder.decode(opts.model, opts.exchange, opts.output)
| 31.53125 | 102 | 0.769078 |
9c0f0c2835497cfafc1c97305175f1c3c60456a9 | 6,995 | py | Python | lib/bridgedb/email/request.py | liudonghua123/bridgedb | 94dd10673f9e6650e8a00e162f348e64f7a1ecab | [
"BSD-3-Clause-Clear"
] | null | null | null | lib/bridgedb/email/request.py | liudonghua123/bridgedb | 94dd10673f9e6650e8a00e162f348e64f7a1ecab | [
"BSD-3-Clause-Clear"
] | null | null | null | lib/bridgedb/email/request.py | liudonghua123/bridgedb | 94dd10673f9e6650e8a00e162f348e64f7a1ecab | [
"BSD-3-Clause-Clear"
] | null | null | null | # -*- coding: utf-8; test-case-name: bridgedb.test.test_email_request; -*-
#_____________________________________________________________________________
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :authors: Nick Mathewson <[email protected]>
# Isis Lovecruft <[email protected]> 0xA3ADB67A2CDB8B35
# Matthew Finkel <[email protected]>
# please also see AUTHORS file
# :copyright: (c) 2007-2015, The Tor Project, Inc.
# (c) 2013-2015, Isis Lovecruft
# :license: see LICENSE for licensing information
#_____________________________________________________________________________
"""
.. py:module:: bridgedb.email.request
:synopsis: Classes for parsing and storing information about requests for
bridges which are sent to the email distributor.
bridgedb.email.request
======================
Classes for parsing and storing information about requests for bridges
which are sent to the email distributor.
::
bridgedb.email.request
| |_ determineBridgeRequestOptions - Figure out which filters to apply, or
| offer help.
|_ EmailBridgeRequest - A request for bridges which was received through
the email distributor.
..
"""
from __future__ import print_function
from __future__ import unicode_literals
import logging
import re
from bridgedb import bridgerequest
from bridgedb.Dist import EmailRequestedHelp
from bridgedb.Dist import EmailRequestedKey
#: A regular expression for matching the Pluggable Transport method TYPE in
#: emailed requests for Pluggable Transports.
TRANSPORT_REGEXP = ".*transport ([a-z][_a-z0-9]*)"
TRANSPORT_PATTERN = re.compile(TRANSPORT_REGEXP)
#: A regular expression that matches country codes in requests for unblocked
#: bridges.
UNBLOCKED_REGEXP = ".*unblocked ([a-z]{2,4})"
UNBLOCKED_PATTERN = re.compile(UNBLOCKED_REGEXP)
def determineBridgeRequestOptions(lines):
"""Figure out which :class:`Bridges.BridgeFilter`s to apply, or offer help.
.. note:: If any ``'transport TYPE'`` was requested, or bridges not
blocked in a specific CC (``'unblocked CC'``), then the ``TYPE``
and/or ``CC`` will *always* be stored as a *lowercase* string.
:param list lines: A list of lines from an email, including the headers.
:raises EmailRequestedHelp: if the client requested help.
:raises EmailRequestedKey: if the client requested our GnuPG key.
:rtype: :class:`EmailBridgeRequest`
:returns: A :class:`~bridgerequst.BridgeRequest` with all of the requested
parameters set. The returned ``BridgeRequest`` will have already had
its filters generated via :meth:`~EmailBridgeRequest.generateFilters`.
"""
request = EmailBridgeRequest()
skippedHeaders = False
for line in lines:
line = line.strip().lower()
# Ignore all lines before the first empty line:
if not line: skippedHeaders = True
if not skippedHeaders: continue
if ("help" in line) or ("halp" in line):
raise EmailRequestedHelp("Client requested help.")
if "get" in line:
request.isValid(True)
logging.debug("Email request was valid.")
if "key" in line:
request.wantsKey(True)
raise EmailRequestedKey("Email requested a copy of our GnuPG key.")
if "ipv6" in line:
request.withIPv6()
if "transport" in line:
request.withPluggableTransportType(line)
if "unblocked" in line:
request.withoutBlockInCountry(line)
logging.debug("Generating hashring filters for request.")
request.generateFilters()
return request
| 37.406417 | 79 | 0.666905 |
9c0f818ceccab853bd37e280f6cc450a37f8fe46 | 267 | bzl | Python | test/com/facebook/buck/skylark/parser/testdata/rule_with_wrong_types/attr_value_type/subdir/foo.bzl | Unknoob/buck | 2dfc734354b326f2f66896dde7746a11965d5a13 | [
"Apache-2.0"
] | 8,027 | 2015-01-02T05:31:44.000Z | 2022-03-31T07:08:09.000Z | test/com/facebook/buck/skylark/parser/testdata/rule_with_wrong_types/attr_value_type/subdir/foo.bzl | Unknoob/buck | 2dfc734354b326f2f66896dde7746a11965d5a13 | [
"Apache-2.0"
] | 2,355 | 2015-01-01T15:30:53.000Z | 2022-03-30T20:21:16.000Z | test/com/facebook/buck/skylark/parser/testdata/rule_with_wrong_types/attr_value_type/subdir/foo.bzl | Unknoob/buck | 2dfc734354b326f2f66896dde7746a11965d5a13 | [
"Apache-2.0"
] | 1,280 | 2015-01-09T03:29:04.000Z | 2022-03-30T15:14:14.000Z | """ Module docstring """
def _impl(_ctx):
""" Function docstring """
pass
some_rule = rule(
attrs = {
"attr1": attr.int(
default = 2,
mandatory = False,
),
"attr2": 5,
},
implementation = _impl,
)
| 15.705882 | 30 | 0.468165 |
9c100a9a9e5db785c7efc1726ba5b0b98ff396a7 | 2,469 | py | Python | src/printReport.py | griimx/Summer-2016 | 08bf0a68a0e12ee81318409f68448adaf75983fe | [
"MIT"
] | null | null | null | src/printReport.py | griimx/Summer-2016 | 08bf0a68a0e12ee81318409f68448adaf75983fe | [
"MIT"
] | null | null | null | src/printReport.py | griimx/Summer-2016 | 08bf0a68a0e12ee81318409f68448adaf75983fe | [
"MIT"
] | null | null | null | from __future__ import print_function
from connection import *
from jinja2 import Environment, FileSystemLoader
import webbrowser
# self.entry_text(self.entry_name, result['firstName']+" "+result['lastName'] )
# self.entry_text(self.entry_EmpID, result['empID'])
# self.entry_text(self.entry_EmpName, result['firstName']+" "+result['lastName'])
# self.entry_text(self.entry_personalno, result['empID'])
# self.entry_text(self.entry_address,result['address'] )
# self.entry_text(self.entry_pin, result['pin'])
# self.entry_text(self.entry_state, result['state'])
# self.entry_text(self.entry_adhar, result['adharID'])
# self.entry_text(self.entry_pan, result['panID'])
# self.entry_text(self.entry_designation, result['designation'])
# self.entry_text(self.entry_unit, result['unit'])
# self.entry_text(self.entry_emailid, result['email'])
# self.entry_text(self.entry_mobile, result['mobile'])
# self.entry_text(self.entry_department, result['depName'])
# self.entry_text(self.entry_ifsc, result['IFSC'])
# self.entry_text(self.enrtry_acno, result['ACNo'])
# self.entry_text(self.entry_branch, result['BranchAdd'])
| 37.409091 | 86 | 0.654111 |
9c104172c7871ed658426c42c033c011c356f2f0 | 2,250 | py | Python | packages/pyre/schemata/Container.py | avalentino/pyre | 7e1f0287eb7eba1c6d1ef385e5160079283ac363 | [
"BSD-3-Clause"
] | 25 | 2018-04-23T01:45:39.000Z | 2021-12-10T06:01:23.000Z | packages/pyre/schemata/Container.py | avalentino/pyre | 7e1f0287eb7eba1c6d1ef385e5160079283ac363 | [
"BSD-3-Clause"
] | 53 | 2018-05-31T04:55:00.000Z | 2021-10-07T21:41:32.000Z | packages/pyre/schemata/Container.py | avalentino/pyre | 7e1f0287eb7eba1c6d1ef385e5160079283ac363 | [
"BSD-3-Clause"
] | 12 | 2018-04-23T22:50:40.000Z | 2022-02-20T17:27:23.000Z | # -*- coding: utf-8 -*-
#
# michael a.g. avzis
# orthologue
# (c) 1998-2021 all rights reserved
#
# superclass
from .Schema import Schema
# declaration
# meta-methods
def __init__(self, default=object, schema=Schema(), **kwds):
# adjust the default; carefully, so we don't all end up using the same global container
# checking for {None} is not appropriate here; the user may want {None} as the default
# value; we need a way to know that {default} was not supplied: use a TYPE (in this
# case object) as the marker
default = self.container() if default is object else default
# chain up with my default
super().__init__(default=default, **kwds)
# save my schema
self.schema = schema
# all done
return
# end of file
| 28.481013 | 95 | 0.597333 |
9c108597606416225c709da3b768b53eee32eb1f | 98,110 | py | Python | electronicparsers/exciting/parser.py | nomad-coe/electronic-parsers | defb47be6ac22b2e48d4fb9204c85390a3c2f328 | [
"Apache-2.0"
] | null | null | null | electronicparsers/exciting/parser.py | nomad-coe/electronic-parsers | defb47be6ac22b2e48d4fb9204c85390a3c2f328 | [
"Apache-2.0"
] | null | null | null | electronicparsers/exciting/parser.py | nomad-coe/electronic-parsers | defb47be6ac22b2e48d4fb9204c85390a3c2f328 | [
"Apache-2.0"
] | null | null | null | #
# Copyright The NOMAD Authors.
#
# This file is part of NOMAD.
# See https://nomad-lab.eu for further info.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import numpy as np
import os
import re
import logging
from nomad.units import ureg
from nomad.parsing.file_parser import TextParser, Quantity, XMLParser, DataTextParser
from nomad.datamodel.metainfo.simulation.run import Run, Program
from nomad.datamodel.metainfo.simulation.method import (
Method, DFT, Electronic, Smearing, XCFunctional, Functional,
GW as GWMethod, Scf, BasisSet
)
from nomad.datamodel.metainfo.simulation.system import (
System, Atoms
)
from nomad.datamodel.metainfo.simulation.calculation import (
Calculation, Dos, DosValues, BandStructure, BandEnergies, Energy, EnergyEntry, Charges,
Forces, ForcesEntry, ScfIteration, BandGap
)
from nomad.datamodel.metainfo.workflow import Workflow, GeometryOptimization
from .metainfo.exciting import x_exciting_section_MT_charge_atom, x_exciting_section_MT_moment_atom,\
x_exciting_section_spin, x_exciting_section_fermi_surface,\
x_exciting_section_atoms_group
re_float = r'[-+]?\d+\.\d*(?:[Ee][-+]\d+)?'
class DOSXMLParser(XMLParser):
def _get_dos(self, diagram):
dos = np.array(
[point.attrib.get(self._dos_key) for point in diagram], dtype=float)
return dos
def parse(self, key):
if self._results is None:
self._results = dict()
if 'total' in key:
if not self.total_dos:
return
res = np.zeros((self.number_of_spin_channels, self.number_of_dos))
for i in range(len(self.total_dos)):
spin = self.total_dos[i].attrib.get(self._nspin_key, i)
res[i] = self._get_dos(self._total_dos[i])
if self.energy_unit is not None:
res = res * (1 / self.energy_unit)
elif 'partial' in key:
if not self.partial_dos:
return
res = np.zeros((
self.number_of_lm, self.number_of_spin_channels, self.number_of_atoms, self.number_of_dos))
for i in range(len(self.partial_dos)):
spin = self.partial_dos[i].attrib.get(self._nspin_key, None)
if spin is None:
spin = (i % (self.number_of_spin_channels * self.number_of_lm)) // self.number_of_lm
else:
spin = int(spin) - 1
val_l = self.partial_dos[i].attrib.get(self._l_key, None)
val_m = self.partial_dos[i].attrib.get(self._m_key, None)
if val_l is None or val_m is None:
lm = i % self.number_of_lm
else:
lm = int(val_l) ** 2 + int(val_m) + int(val_l)
atom = i // (self.number_of_lm * self.number_of_spin_channels)
res[lm][spin][atom] = self._get_dos(self.partial_dos[i])
if self.energy_unit is not None:
res = res * (1 / self.energy_unit)
elif key == 'energies':
return self.energies
else:
res = None
self._results[key] = res
class ExcitingFermiSurfaceBxsfParser(TextParser):
class ExcitingEigenvalueParser(TextParser):
class ExcitingGWOutParser(TextParser):
class ExcitingInfoParser(TextParser):
class ExcitingParser:
def __init__(self):
self.info_parser = ExcitingInfoParser()
self.dos_parser = DOSXMLParser(energy_unit=ureg.hartree)
self.bandstructure_parser = BandstructureXMLParser(energy_unit=ureg.hartree)
self.eigval_parser = ExcitingEigenvalueParser()
self.fermisurf_parser = ExcitingFermiSurfaceBxsfParser()
self.evalqp_parser = ExcitingEvalqpParser()
self.dos_out_parser = DataTextParser()
self.bandstructure_dat_parser = BandstructureDatParser(energy_unit=ureg.hartree)
self.band_out_parser = BandOutParser(energy_unit=ureg.hartree)
self.info_gw_parser = GWInfoParser()
self.input_xml_parser = XMLParser()
self.data_xs_parser = DataTextParser()
self.data_clathrate_parser = DataTextParser(dtype=str)
# different names for different versions of exciting
self._energy_keys_mapping = {
'energy_total': ['Total energy', 'total energy'],
'x_exciting_fermi_energy': ['Fermi energy', 'Fermi'],
'energy_kinetic_electronic': ['Kinetic energy', 'electronic kinetic'],
'energy_coulomb': ['Coulomb energy', 'Coulomb'],
'x_exciting_coulomb_energy': ['Coulomb energy', 'Coulomb'],
'energy_exchange': ['Exchange energy', 'exchange'],
'x_exciting_exchange_energy': ['Exchange energy', 'exchange'],
'energy_correlation': ['Correlation energy', 'correlation'],
'x_exciting_correlation_energy': ['Correlation energy', 'correlation'],
'energy_sum_eigenvalues': ['Sum of eigenvalues', 'sum of eigenvalues'],
'x_exciting_effective_potential_energy': ['Effective potential energy'],
'x_exciting_coulomb_potential_energy': ['Coulomb potential energy', 'Coulomb potential'],
'energy_xc_potential': ['xc potential energy', 'xc potential'],
'energy_electrostatic': ['Hartree energy', 'Hartree'],
'x_exciting_hartree_energy': ['Hartree energy', 'Hartree'],
'x_exciting_electron_nuclear_energy': ['Electron-nuclear energy', 'electron-nuclear '],
'x_exciting_nuclear_nuclear_energy': ['Nuclear-nuclear energy', 'nuclear-nuclear'],
'x_exciting_madelung_energy': ['Madelung energy', 'Madelung'],
'x_exciting_core_electron_kinetic_energy': ['Core-electron kinetic energy', 'core electron kinetic'],
'x_exciting_dft_d2_dispersion_correction': ['DFT-D2 dispersion correction']
}
self._electron_charge_keys_mapping = {
'x_exciting_core_charge': ['core'],
'x_exciting_core_leakage': ['core leakage'],
'x_exciting_valence_charge': ['valence'],
'x_exciting_interstitial_charge': ['interstitial'],
'x_exciting_total_MT_charge': ['total charge in muffin-tins', 'total in muffin-tins'],
'charge_total': ['total charge'],
'x_exciting_section_MT_charge_atom': ['atom_resolved']
}
self._moment_keys_mapping = {
'x_exciting_interstitial_moment': ['interstitial'],
'x_exciting_total_MT_moment': ['total moment in muffin-tins'],
'x_exciting_total_moment': ['total moment'],
'x_exciting_section_MT_moment_atom': ['atom_resolved']
}
def file_exists(self, filename):
"""Checks if a the given filename exists and is accessible in the same
folder where the mainfile is stored.
"""
mainfile = os.path.basename(self.info_parser.mainfile)
suffix = mainfile.strip('INFO.OUT')
target = filename.rsplit('.', 1)
filepath = '%s%s' % (target[0], suffix)
if target[1:]:
filepath = '%s.%s' % (filepath, target[1])
filepath = os.path.join(self.info_parser.maindir, filepath)
if os.path.isfile(filepath) and os.access(filepath, os.F_OK):
return True
return False
| 42.768091 | 147 | 0.593742 |
9c10d305e4f704dfba7fd0b5306c365d4671b49e | 41,012 | py | Python | services/storage/client-sdk/python/simcore_service_storage_sdk/api/users_api.py | KZzizzle/osparc-simcore | 981bc8d193f3f5d507e3225f857e0308c339e163 | [
"MIT"
] | null | null | null | services/storage/client-sdk/python/simcore_service_storage_sdk/api/users_api.py | KZzizzle/osparc-simcore | 981bc8d193f3f5d507e3225f857e0308c339e163 | [
"MIT"
] | 17 | 2020-10-15T16:06:05.000Z | 2022-03-21T18:48:21.000Z | services/storage/client-sdk/python/simcore_service_storage_sdk/api/users_api.py | Surfict/osparc-simcore | 1e0b89574ec17ecb089674f9e5daa83d624430c8 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
simcore-service-storage API
API definition for simcore-service-storage service # noqa: E501
OpenAPI spec version: 0.1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from simcore_service_storage_sdk.api_client import ApiClient
| 41.552178 | 127 | 0.624768 |
9c10fde5b8d2158f51d8de9c32ce3619970e9b19 | 304 | py | Python | reservation_management/migrations/0021_delete_greenpass.py | mattiolato98/reservation-ninja | 0e50b218dd9d90f134868bade2ec2934283c12b5 | [
"MIT"
] | 1 | 2022-03-10T11:34:14.000Z | 2022-03-10T11:34:14.000Z | reservation_management/migrations/0021_delete_greenpass.py | mattiolato98/reservation-ninja | 0e50b218dd9d90f134868bade2ec2934283c12b5 | [
"MIT"
] | null | null | null | reservation_management/migrations/0021_delete_greenpass.py | mattiolato98/reservation-ninja | 0e50b218dd9d90f134868bade2ec2934283c12b5 | [
"MIT"
] | null | null | null | # Generated by Django 3.2.7 on 2021-10-22 14:23
from django.db import migrations
| 17.882353 | 53 | 0.615132 |
9c120e73dce5cc77395baa84f359e42ae9ad94b0 | 3,878 | py | Python | demos/iaf_pop_demo.py | bionet/ted.python | 1698a7f792db23123003ae4e2d39b4c18f25f347 | [
"BSD-3-Clause"
] | 4 | 2015-12-07T14:02:36.000Z | 2018-11-27T22:07:38.000Z | demos/iaf_pop_demo.py | bionet/ted.python | 1698a7f792db23123003ae4e2d39b4c18f25f347 | [
"BSD-3-Clause"
] | null | null | null | demos/iaf_pop_demo.py | bionet/ted.python | 1698a7f792db23123003ae4e2d39b4c18f25f347 | [
"BSD-3-Clause"
] | 2 | 2016-10-06T06:01:05.000Z | 2021-01-22T18:41:18.000Z | #!/usr/bin/env python
"""
Demos of encoding and decoding algorithms using populations of
IAF neurons.
"""
# Copyright (c) 2009-2015, Lev Givon
# All rights reserved.
# Distributed under the terms of the BSD license:
# http://www.opensource.org/licenses/bsd-license
import sys
import numpy as np
# Set matplotlib backend so that plots can be generated without a
# display:
import matplotlib
matplotlib.use('AGG')
from bionet.utils.misc import func_timer
import bionet.utils.band_limited as bl
import bionet.utils.plotting as pl
import bionet.ted.iaf as iaf
# For determining output plot file names:
output_name = 'iaf_pop_demo_'
output_count = 0
output_ext = '.png'
# Define algorithm parameters and input signal:
dur = 0.1
dt = 1e-6
f = 32
bw = 2*np.pi*f
t = np.arange(0, dur, dt)
np.random.seed(0)
noise_power = None
if noise_power == None:
fig_title = 'IAF Input Signal with No Noise'
else:
fig_title = 'IAF Input Signal with %d dB of Noise' % noise_power
print fig_title
u = func_timer(bl.gen_band_limited)(dur, dt, f, noise_power)
pl.plot_signal(t, u, fig_title,
output_name + str(output_count) + output_ext)
# Test leaky IAF algorithms:
b1 = 3.5 # bias
d1 = 0.7 # threshold
R1 = 10.0 # resistance
C1 = 0.01 # capacitance
try:
iaf.iaf_recoverable(u, bw, b1, d1, R1, C1)
except ValueError('reconstruction condition not satisfied'):
sys.exit()
b2 = 3.4 # bias
d2 = 0.8 # threshold
R2 = 9.0 # resistance
C2 = 0.01 # capacitance
try:
iaf.iaf_recoverable(u, bw, b2, d2, R2, C2)
except ValueError('reconstruction condition not satisfied'):
sys.exit()
b_list = np.array([b1, b2])
d_list = np.array([d1, d2])
R_list = np.array([R1, R2])
C_list = np.array([C1, C2])
output_count += 1
fig_title = 'Signal Encoded Using Leaky IAF Encoder'
print fig_title
s_list = func_timer(iaf.iaf_encode_pop)([u, u], dt, b_list, d_list, R_list, C_list)
pl.plot_encoded(t, u, s_list[0], fig_title + ' #1',
output_name + str(output_count) + output_ext)
output_count += 1
pl.plot_encoded(t, u, s_list[1], fig_title + ' #2',
output_name + str(output_count) + output_ext)
output_count += 1
fig_title = 'Signal Decoded Using Leaky IAF Population Decoder'
print fig_title
u_rec = func_timer(iaf.iaf_decode_pop)(s_list, dur, dt, bw,
b_list, d_list, R_list,
C_list)
pl.plot_compare(t, u, u_rec, fig_title,
output_name + str(output_count) + output_ext)
# Test ideal IAF algorithms:
b1 = 3.5 # bias
d1 = 0.7 # threshold
R1 = np.inf # resistance
C1 = 0.01 # capacitance
try:
iaf.iaf_recoverable(u, bw, b1, d1, R1, C1)
except ValueError('reconstruction condition not satisfied'):
sys.exit()
b2 = 3.4 # bias
d2 = 0.8 # threshold
R2 = np.inf # resistance
C2 = 0.01 # capacitance
try:
iaf.iaf_recoverable(u, bw, b2, d2, R2, C2)
except ValueError('reconstruction condition not satisfied'):
sys.exit()
b_list = [b1, b2]
d_list = [d1, d2]
R_list = [R1, R2]
C_list = [C1, C2]
output_count += 1
fig_title = 'Signal Encoded Using Ideal IAF Encoder'
print fig_title
s_list = func_timer(iaf.iaf_encode_pop)([u, u], dt, b_list, d_list, R_list, C_list)
pl.plot_encoded(t, u, s_list[0], fig_title + ' #1',
output_name + str(output_count) + output_ext)
output_count += 1
pl.plot_encoded(t, u, s_list[1], fig_title + ' #2',
output_name + str(output_count) + output_ext)
output_count += 1
fig_title = 'Signal Decoded Using Ideal IAF Population Decoder'
print fig_title
u_rec = func_timer(iaf.iaf_decode_pop)(s_list, dur, dt, bw,
b_list, d_list, R_list,
C_list)
pl.plot_compare(t, u, u_rec, fig_title,
output_name + str(output_count) + output_ext)
| 27.309859 | 83 | 0.65936 |
9c12bc71f759e734d137bff21cddbea3b1e6369f | 90 | py | Python | regtests/calling/function_expression.py | bpmbank/PythonJS | 591a80afd8233fb715493591db2b68f1748558d9 | [
"BSD-3-Clause"
] | 319 | 2015-01-02T11:34:16.000Z | 2022-03-25T00:43:33.000Z | regtests/calling/function_expression.py | bpmbank/PythonJS | 591a80afd8233fb715493591db2b68f1748558d9 | [
"BSD-3-Clause"
] | 10 | 2015-02-03T02:33:09.000Z | 2021-11-09T21:41:00.000Z | regtests/calling/function_expression.py | bpmbank/PythonJS | 591a80afd8233fb715493591db2b68f1748558d9 | [
"BSD-3-Clause"
] | 61 | 2015-01-02T12:01:56.000Z | 2021-12-08T07:16:16.000Z | """func expr"""
F = function( x,y ):
return x+y
def main():
TestError( F(1,2) == 3 )
| 10 | 25 | 0.522222 |
9c13630030f6d62b875010ab48a5f1a305094328 | 1,266 | py | Python | nadmin/plugins/sortable.py | A425/django-xadmin-1.8 | 9ab06192311b22ec654778935ce3e3c5ffd39a00 | [
"MIT"
] | 1 | 2015-10-10T08:04:26.000Z | 2015-10-10T08:04:26.000Z | nadmin/plugins/sortable.py | A425/django-xadmin-1.8 | 9ab06192311b22ec654778935ce3e3c5ffd39a00 | [
"MIT"
] | 1 | 2016-03-25T01:41:36.000Z | 2016-03-25T01:41:36.000Z | nadmin/plugins/sortable.py | A425/django-xadmin-1.8 | 9ab06192311b22ec654778935ce3e3c5ffd39a00 | [
"MIT"
] | null | null | null | #coding:utf-8
from nadmin.sites import site
from nadmin.views import BaseAdminPlugin, ListAdminView
SORTBY_VAR = '_sort_by'
site.register_plugin(SortablePlugin, ListAdminView)
| 34.216216 | 107 | 0.611374 |
9c13b63f316f27bf2445b7a4c746d0ccd26b4b27 | 2,644 | py | Python | batch-tmp.py | texastribune/donations | 45a75e528564b5fd502319ed7d512ca91bda7f37 | [
"MIT"
] | 6 | 2019-11-16T23:23:11.000Z | 2022-02-13T00:53:45.000Z | batch-tmp.py | texastribune/donations | 45a75e528564b5fd502319ed7d512ca91bda7f37 | [
"MIT"
] | 519 | 2018-11-20T22:22:16.000Z | 2022-03-31T11:11:32.000Z | batch-tmp.py | texastribune/donations | 45a75e528564b5fd502319ed7d512ca91bda7f37 | [
"MIT"
] | 6 | 2019-02-13T05:25:56.000Z | 2020-08-19T14:41:14.000Z | import logging
from config import ACCOUNTING_MAIL_RECIPIENT, LOG_LEVEL, REDIS_URL, TIMEZONE
from datetime import datetime, timedelta
from pytz import timezone
import celery
import redis
from charges import amount_to_charge, charge, ChargeException
from npsp import Opportunity
from util import send_email
zone = timezone(TIMEZONE)
log_level = logging.getLevelName(LOG_LEVEL)
root = logging.getLogger()
root.setLevel(log_level)
# TODO stop sending this email and just rely on Sentry and logs?
if __name__ == "__main__":
charge_cards()
| 22.793103 | 92 | 0.642209 |
9c1453b1473bf17ef5373079c50724a0067a38a2 | 3,311 | py | Python | rotkehlchen/tests/integration/test_blockchain.py | coblee/rotki | d675f5c2d0df5176337b7b10038524ee74923482 | [
"BSD-3-Clause"
] | null | null | null | rotkehlchen/tests/integration/test_blockchain.py | coblee/rotki | d675f5c2d0df5176337b7b10038524ee74923482 | [
"BSD-3-Clause"
] | 3 | 2021-01-28T21:30:46.000Z | 2022-03-25T19:17:00.000Z | rotkehlchen/tests/integration/test_blockchain.py | coblee/rotki | d675f5c2d0df5176337b7b10038524ee74923482 | [
"BSD-3-Clause"
] | null | null | null | import operator
import os
from unittest.mock import patch
import pytest
import requests
from rotkehlchen.chain.ethereum.manager import NodeName
from rotkehlchen.constants.assets import A_BTC
from rotkehlchen.tests.utils.blockchain import mock_etherscan_query
from rotkehlchen.typing import SupportedBlockchain
| 36.788889 | 119 | 0.735125 |
9c1460432a4091cc49024232356f533f6d3b650a | 366 | py | Python | __init__.py | LaptopBiologist/ReferenceAnalyzer | 109f1f58ee2b0173e7285156091ba2b11459ff85 | [
"MIT"
] | null | null | null | __init__.py | LaptopBiologist/ReferenceAnalyzer | 109f1f58ee2b0173e7285156091ba2b11459ff85 | [
"MIT"
] | null | null | null | __init__.py | LaptopBiologist/ReferenceAnalyzer | 109f1f58ee2b0173e7285156091ba2b11459ff85 | [
"MIT"
] | null | null | null | #-------------------------------------------------------------------------------
# Name: module1
# Purpose:
#
# Author: I am
#
# Created: 02/11/2017
# Copyright: (c) I am 2017
# Licence: <your licence>
#-------------------------------------------------------------------------------
if __name__ == '__main__':
main()
| 21.529412 | 80 | 0.300546 |
9c16016ffd51a0a5e8e9512b1d5a109ac8fa3665 | 2,405 | py | Python | app/__init__.py | jimmybutton/moviedb | 61028ac4db7f58a671ab3a1c2afd3bfb53372773 | [
"MIT"
] | null | null | null | app/__init__.py | jimmybutton/moviedb | 61028ac4db7f58a671ab3a1c2afd3bfb53372773 | [
"MIT"
] | null | null | null | app/__init__.py | jimmybutton/moviedb | 61028ac4db7f58a671ab3a1c2afd3bfb53372773 | [
"MIT"
] | null | null | null | from flask import Flask
from config import Config
from sqlalchemy import MetaData
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_login import LoginManager
from flask_moment import Moment
from flask_misaka import Misaka
from flask_bootstrap import Bootstrap
import os
import logging
from logging.handlers import RotatingFileHandler
from elasticsearch import Elasticsearch
convention = {
"ix": 'ix_%(column_0_label)s',
"uq": "uq_%(table_name)s_%(column_0_name)s",
"ck": "ck_%(table_name)s_%(constraint_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
"pk": "pk_%(table_name)s"
}
metadata = MetaData(naming_convention=convention)
db = SQLAlchemy(metadata=metadata)
migrate = Migrate()
login = LoginManager()
login.login_view = "auth.login"
moment = Moment()
md = Misaka()
bootstrap = Bootstrap()
| 28.630952 | 85 | 0.690644 |
9c161b198ce5d788684f6856cc66f4bdfc78c217 | 7,252 | py | Python | optimize.py | AranKomat/Sequential-Alpha-Zero | 21f78dc95e70b68b5fd18eb33d1ea2d5b5a853d4 | [
"Apache-2.0"
] | 7 | 2021-04-01T09:52:02.000Z | 2021-06-09T11:57:55.000Z | optimize.py | AranKomat/Alpha-Transformer | 21f78dc95e70b68b5fd18eb33d1ea2d5b5a853d4 | [
"Apache-2.0"
] | null | null | null | optimize.py | AranKomat/Alpha-Transformer | 21f78dc95e70b68b5fd18eb33d1ea2d5b5a853d4 | [
"Apache-2.0"
] | null | null | null | import numpy as np
import random
from time import time, sleep
import h5py
import torch
import torch.nn as nn
import torch.optim as optimizer
import glob
import os
#from scipy.stats import rankdata
from lstm import Model, initialize
from Optim import ScheduledOptim
# import _pickle as cPickle
# np.set_printoptions(threshold=np.nan)
| 37.968586 | 119 | 0.535714 |
9c16fad5499d60d29e8503364a688806e916a7fc | 2,031 | py | Python | src/bin_expr.py | Command-Master/MCCC | a49440bfd8542002aee35d41bee093dc8b51d781 | [
"MIT"
] | 6 | 2021-01-15T03:49:01.000Z | 2021-11-02T10:43:22.000Z | src/bin_expr.py | Command-Master/MCCC | a49440bfd8542002aee35d41bee093dc8b51d781 | [
"MIT"
] | null | null | null | src/bin_expr.py | Command-Master/MCCC | a49440bfd8542002aee35d41bee093dc8b51d781 | [
"MIT"
] | null | null | null | from c_int import Int
from casting import cast
from globals_consts import NAMESPACE
from temps import used_temps, get_temp, get_temp_func
| 39.823529 | 109 | 0.557361 |
9c17411640986aa0b93f332bd22849aaf0fdf53b | 3,080 | py | Python | tools/mkcodelet.py | bobmittmann/yard-ice | 3b27f94279d806d3a222de60adccf934994ed168 | [
"MIT"
] | 2 | 2019-04-08T19:00:23.000Z | 2019-11-30T23:42:58.000Z | tools/mkcodelet.py | bobmittmann/yard-ice | 3b27f94279d806d3a222de60adccf934994ed168 | [
"MIT"
] | null | null | null | tools/mkcodelet.py | bobmittmann/yard-ice | 3b27f94279d806d3a222de60adccf934994ed168 | [
"MIT"
] | 2 | 2016-02-12T14:12:41.000Z | 2019-09-18T14:50:29.000Z | #!/usr/bin/python
from struct import *
from getopt import *
import sys
import os
import re
if __name__ == "__main__":
main()
| 21.538462 | 80 | 0.566558 |
9c179ba2d16aa2d479920de1be09d5ac3e265384 | 1,186 | py | Python | utest/x3270/test_screenshot.py | MichaelSeeburger/Robot-Framework-Mainframe-3270-Library | 76b589d58c55a39f96c027a8ae28c41fa37ed445 | [
"MIT"
] | 3 | 2018-10-02T14:32:06.000Z | 2018-10-02T14:33:32.000Z | utest/x3270/test_screenshot.py | MichaelSeeburger/Robot-Framework-Mainframe-3270-Library | 76b589d58c55a39f96c027a8ae28c41fa37ed445 | [
"MIT"
] | null | null | null | utest/x3270/test_screenshot.py | MichaelSeeburger/Robot-Framework-Mainframe-3270-Library | 76b589d58c55a39f96c027a8ae28c41fa37ed445 | [
"MIT"
] | null | null | null | import os
from pytest_mock import MockerFixture
from robot.api import logger
from Mainframe3270.x3270 import x3270
| 28.238095 | 88 | 0.725126 |
9c17a59c22e1b7744bde1f37891a9b3e7d5581e6 | 35,752 | py | Python | splat/photometry.py | brackham/splat | 5ee0da82f19017e900ee83af94609dbe9f8a0ea4 | [
"MIT"
] | null | null | null | splat/photometry.py | brackham/splat | 5ee0da82f19017e900ee83af94609dbe9f8a0ea4 | [
"MIT"
] | null | null | null | splat/photometry.py | brackham/splat | 5ee0da82f19017e900ee83af94609dbe9f8a0ea4 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import print_function, division
"""
.. note::
These are the spectrophotometry functions for SPLAT
"""
# imports - internal
import copy
import os
# imports - external
import numpy
from astropy import units as u # standard units
from astropy import constants as const # physical constants in SI units
import matplotlib.patches as patches
import matplotlib.pyplot as plt
from scipy.integrate import trapz # for numerical integration
from scipy.interpolate import interp1d
# splat functions and constants
from .initialize import *
from .utilities import *
#####################################################
############### SPECTROPHOTOMETRY ###############
#####################################################
# this function has been obseleted
def filterProfile(filt,**kwargs):
'''
:Purpose: Retrieve the filter profile for a SPLAT filter. Returns two arrays: the filter wavelength and filter transmission curve.
:param filter: String giving the name of one of the predefined filters listed in splat.FILTERS.keys() (required)
:param filterFolder: folder containing the filter transmission files (optional, default = splat.FILTER_FOLDER)
:Example:
>>> import splat
>>> import splat.photometry as spphot
>>> sp = splat.getSpectrum(shortname='1507-1627')[0]
>>> sp.fluxCalibrate('2MASS J',14.5)
>>> spphot.filterMag(sp,'MKO J')
(14.345894376898123, 0.027596454828421831)
'''
# keyword parameters
filterFolder = kwargs.get('filterFolder',SPLAT_PATH+FILTER_FOLDER)
if not os.path.exists(filterFolder):
filterFolder = SPLAT_URL+FILTER_FOLDER
# check that requested filter is in list
f0 = checkFilterName(filt, verbose=True)
if f0 == False: raise ValueError
filt = f0
# read in filter
fwave,ftrans = numpy.genfromtxt(os.path.normpath(filterFolder+FILTERS[filt]['file']), comments='#', unpack=True, missing_values = ('NaN','nan'), filling_values = (numpy.nan))
# print(type(fwave),type(ftrans),isinstance(fwave,numpy.ndarray),isinstance(ftrans,numpy.ndarray),not isinstance(fwave,numpy.ndarray) or not isinstance(ftrans,numpy.ndarray))
if not isinstance(fwave,numpy.ndarray) or not isinstance(ftrans,numpy.ndarray):
raise ValueError('\nProblem reading in {}'.format(filterFolder+FILTERS[filt]['file']))
fwave = fwave[~numpy.isnan(ftrans)]*u.micron
ftrans = ftrans[~numpy.isnan(ftrans)]
return fwave,ftrans
def filterMag(sp,filt,*args,**kwargs):
'''
:Purpose:
Determine the photometric magnitude of a source based on its
spectrum. Spectral fluxes are convolved with the filter profile specified by
the ``filter`` input. By default this filter is also
convolved with a model of Vega to extract Vega magnitudes,
but the user can also specify AB magnitudes, photon flux or energy flux.
:Required Parameters:
**sp**: Spectrum class object, which should contain wave, flux and noise array elements.
**filter**: String giving name of filter, which can either be one of the predefined filters listed in splat.FILTERS.keys() or a custom filter name
:Optional Parameters:
**custom** = None: A 2 x N vector array specifying the wavelengths and transmissions for a custom filter
**notch** = None: A 2 element array that specifies the lower and upper wavelengths for a notch filter (100% transmission within, 0% transmission without)
**vega** = True: compute Vega magnitudes (may be set by filter)
**ab** = False: compute AB magnitudes (may be set by filter)
**energy** = False: compute energy flux
**photon** = False: compute photon flux
**filterFolder** = splat.FILTER_FOLDER: folder containing the filter transmission files
**vegaFile** = 'vega_kurucz.txt': name of file containing Vega flux file, must be within ``filterFolder``
**nsamples** = 100: number of samples to use in Monte Carlo error estimation
**info** = False: List the predefined filter names available
**verbose** = True: List the predefined filter names available
:Example:
>>> import splat
>>> import splat.photometry as spphot
>>> sp = splat.getSpectrum(shortname='1507-1627')[0]
>>> sp.fluxCalibrate('2MASS J',14.5)
>>> spphot.filterMag(sp,'MKO J')
(14.345894376898123, 0.027596454828421831)
'''
# keyword parameters
filterFolder = kwargs.get('filterFolder',SPLAT_PATH+FILTER_FOLDER)
if not os.path.exists(filterFolder):
filterFolder = SPLAT_URL+FILTER_FOLDER
vegaFile = kwargs.get('vegaFile',VEGAFILE)
info = kwargs.get('info',False)
custom = kwargs.get('custom',False)
notch = kwargs.get('notch',False)
vega = kwargs.get('vega',True)
ab = kwargs.get('ab',not vega)
rsr = kwargs.get('rsr',False)
nsamples = kwargs.get('nsamples',100)
verbose = kwargs.get('verbose',False)
# check that requested filter is in list
if isinstance(custom,bool) and isinstance(notch,bool):
f0 = checkFilterName(filt,verbose=True)
if f0 == False:
return numpy.nan, numpy.nan
filt = f0
# reset filter calculation methods based on filter design
if 'ab' in FILTERS[filt]['method']:
ab = kwargs.get('ab',True)
vega = not ab
if 'vega' in FILTERS[filt]['method']:
vega = kwargs.get('vega',True)
ab = not vega
rsr = FILTERS[filt]['rsr']
# other possibilities
photons = kwargs.get('photons',False)
photons = kwargs.get('photon',photons)
energy = kwargs.get('energy',False)
energy = kwargs.get('flux',energy)
if (photons or energy):
vega = False
ab = False
if photons: energy = False
if energy: photons = False
# Read in filter
if isinstance(custom,bool) and isinstance(notch,bool):
fwave,ftrans = filterProfile(filt,**kwargs)
# notch filter
elif isinstance(custom,bool) and isinstance(notch,list):
dn = (notch[1]-notch[0])/1000
fwave = numpy.arange(notch[0]-5.*dn,notch[1]+5.*dn,dn)
ftrans = numpy.zeros(len(fwave))
ftrans[numpy.where(numpy.logical_and(fwave >= notch[0],fwave <= notch[1]))] = 1.
# custom filter
else:
fwave,ftrans = custom[0],custom[1]
# units
if isinstance(fwave,u.quantity.Quantity) == True:
fwave = fwave.to(u.micron)
else:
fwave = fwave*u.micron
# check that spectrum and filter cover the same wavelength ranges
if numpy.nanmax(fwave) < numpy.nanmin(sp.wave) or numpy.nanmin(fwave) > numpy.nanmax(sp.wave):
if verbose==True: print('\nWarning: no overlap between spectrum for {} and filter {}'.format(sp.name,filt))
return numpy.nan, numpy.nan
if numpy.nanmin(fwave) < numpy.nanmin(sp.wave) or numpy.nanmax(fwave) > numpy.nanmax(sp.wave):
if verbose==True: print('\nWarning: spectrum for {} does not span full filter profile for {}'.format(sp.name,filt))
# interpolate spectrum onto filter wavelength function
wgood = numpy.where(~numpy.isnan(sp.noise))
if len(sp.wave[wgood]) > 0:
d = interp1d(sp.wave[wgood].value,sp.flux[wgood].value,bounds_error=False,fill_value=0.)
n = interp1d(sp.wave[wgood].value,sp.noise[wgood].value,bounds_error=False,fill_value=0)
# catch for models
else:
if verbose==True: print('\nWarning: data values in range of filter {} have no uncertainties'.format(filt))
d = interp1d(sp.wave.value,sp.flux.value,bounds_error=False,fill_value=0.)
n = interp1d(sp.wave.value,sp.flux.value*1.e-9,bounds_error=False,fill_value=0.)
result = []
if (vega):
# Read in Vega spectrum
vwave,vflux = numpy.genfromtxt(os.path.normpath(filterFolder+vegaFile), comments='#', unpack=True, \
missing_values = ('NaN','nan'), filling_values = (numpy.nan))
vwave = vwave[~numpy.isnan(vflux)]*u.micron
vflux = vflux[~numpy.isnan(vflux)]*(u.erg/(u.cm**2 * u.s * u.micron))
vflux.to(sp.flux_unit,equivalencies=u.spectral_density(vwave))
# interpolate Vega onto filter wavelength function
v = interp1d(vwave.value,vflux.value,bounds_error=False,fill_value=0.)
if rsr:
val = -2.5*numpy.log10(trapz(ftrans*fwave.value*d(fwave.value),fwave.value)/trapz(ftrans*fwave.value*v(fwave.value),fwave.value))
else:
val = -2.5*numpy.log10(trapz(ftrans*d(fwave.value),fwave.value)/trapz(ftrans*v(fwave.value),fwave.value))
for i in numpy.arange(nsamples):
# result.append(-2.5*numpy.log10(trapz(ftrans*numpy.random.normal(d(fwave),n(fwave))*sp.flux_unit,fwave)/trapz(ftrans*v(fwave)*sp.flux_unit,fwave)))
if rsr:
result.append(-2.5*numpy.log10(trapz(ftrans*fwave.value*(d(fwave.value)+numpy.random.normal(0,1.)*n(fwave.value)),fwave.value)/trapz(ftrans*fwave.value*v(fwave.value),fwave.value)))
else:
result.append(-2.5*numpy.log10(trapz(ftrans*(d(fwave.value)+numpy.random.normal(0,1.)*n(fwave.value)),fwave.value)/trapz(ftrans*v(fwave.value),fwave.value)))
outunit = 1.
elif (ab):
nu = sp.wave.to('Hz',equivalencies=u.spectral())
fnu = sp.flux.to('Jy',equivalencies=u.spectral_density(sp.wave))
noisenu = sp.noise.to('Jy',equivalencies=u.spectral_density(sp.wave))
filtnu = fwave.to('Hz',equivalencies=u.spectral())
fconst = 3631*u.jansky
d = interp1d(nu.value,fnu.value,bounds_error=False,fill_value=0.)
n = interp1d(nu.value,noisenu.value,bounds_error=False,fill_value=0.)
b = trapz((ftrans/filtnu.value)*fconst.value,filtnu.value)
val = -2.5*numpy.log10(trapz(ftrans*d(filtnu.value)/filtnu.value,filtnu.value)/b)
for i in numpy.arange(nsamples):
a = trapz(ftrans*(d(filtnu.value)+numpy.random.normal(0,1)*n(filtnu.value))/filtnu.value,filtnu.value)
result.append(-2.5*numpy.log10(a/b))
outunit = 1.
elif (energy):
outunit = u.erg/u.s/u.cm**2
if rsr:
a = trapz(ftrans*fwave.value*d(fwave.value),fwave.value)*sp.wave.unit*sp.flux.unit
b = trapz(ftrans*fwave.value,fwave.value)*sp.wave.unit
c = trapz(ftrans*fwave.value*fwave.value,fwave.value)*sp.wave.unit*sp.wave.unit
val = (a/b * c/b).to(outunit).value
else:
a = trapz(ftrans*d(fwave.value),fwave.value)*sp.wave.unit*sp.flux.unit
b = trapz(ftrans,fwave.value)*sp.wave.unit
c = trapz(ftrans*fwave.value,fwave.value)*sp.wave.unit*sp.wave.unit
val = (a/b * c/b).to(outunit).value
for i in numpy.arange(nsamples):
if rsr:
result.append((trapz(ftrans*fwave.value*(d(fwave.value)+numpy.random.normal(0,1.)*n(fwave.value)),fwave.value)*sp.wave.unit*sp.flux.unit).to(outunit).value)
else:
result.append((trapz(ftrans*(d(fwave.value)+numpy.random.normal(0,1.)*n(fwave.value)),fwave.value)*sp.wave.unit*sp.flux.unit).to(outunit).value)
elif (photons):
outunit = 1./u.s/u.cm**2
convert = const.h.to('erg s')*const.c.to('micron/s')
val = (trapz(ftrans*fwave.value*convert.value*d(fwave.value),fwave.value)*sp.wave.unit*sp.flux.unit*convert.unit).to(outunit).value
for i in numpy.arange(nsamples):
result.append((trapz(ftrans*fwave.value*convert.value*(d(fwave.value)+numpy.random.normal(0,1.)*n(fwave.value)),fwave.value)*sp.wave.unit*sp.flux.unit*convert.unit).to(outunit).value)
else:
raise NameError('\nfilterMag not given a correct physical quantity (vega, ab, energy, photons) to compute photometry\n\n')
# val = numpy.nanmean(result)*outunit
err = numpy.nanstd(result)
if len(sp.wave[wgood]) == 0:
err = 0.
return val*outunit,err*outunit
def filterInfo(*args,**kwargs):
'''
:Purpose: Prints out the current list of filters in the SPLAT reference library.
'''
verbose = kwargs.get('verbose',True)
if len(args) > 0:
fname = list(args)
elif kwargs.get('filter',False) != False:
fname = kwargs['filter']
else:
fname = sorted(list(FILTERS.keys()))
if isinstance(fname,list) == False:
fname = [fname]
output = {}
for k in fname:
f = checkFilterName(k)
if f != False:
output[f] = {}
output[f]['description'] = FILTERS[f]['description']
output[f]['zeropoint'] = FILTERS[f]['zeropoint']
fwave,ftrans = filterProfile(f,**kwargs)
try:
fwave = fwave.to(u.micron)
except:
fwave = fwave*u.micron
fw = fwave[numpy.where(ftrans > 0.01*numpy.nanmax(ftrans))]
ft = ftrans[numpy.where(ftrans > 0.01*numpy.nanmax(ftrans))]
fw05 = fwave[numpy.where(ftrans > 0.5*numpy.nanmax(ftrans))]
output[f]['lambda_mean'] = trapz(ft*fw,fw)/trapz(ft,fw)
output[f]['lambda_pivot'] = numpy.sqrt(trapz(fw*ft,fw)/trapz(ft/fw,fw))
output[f]['lambda_central'] = 0.5*(numpy.max(fw)+numpy.min(fw))
output[f]['lambda_fwhm'] = numpy.max(fw05)-numpy.min(fw05)
output[f]['lambda_min'] = numpy.min(fw)
output[f]['lambda_max'] = numpy.max(fw)
if verbose ==True:
print(f.replace('_',' ')+': '+output[f]['zeropoint'])
print('Zeropoint = {} Jy'.format(output[f]['zeropoint']))
print('Central wavelength: = {:.3f}'.format(output[f]['lambda_central']))
print('Mean wavelength: = {:.3f}'.format(output[f]['lambda_mean']))
print('Pivot point: = {:.3f}'.format(output[f]['lambda_pivot']))
print('FWHM = {:.3f}'.format(output[f]['lambda_fwhm']))
print('Wavelength range = {:.3f} to {:.3f}\n'.format(output[f]['lambda_min'],output[f]['lambda_max']))
else:
if verbose ==True: print(' Filter {} not in SPLAT filter list'.format(k))
kys = list(output.keys())
if len(kys) == 1: return output[kys[0]]
else: return output
def filterProperties(filt,**kwargs):
'''
:Purpose: Returns a dictionary containing key parameters for a particular filter.
:param filter: name of filter, must be one of the specifed filters given by splat.FILTERS.keys()
:type filter: required
:param verbose: print out information about filter to screen
:type verbose: optional, default = True
:Example:
>>> import splat
>>> data = splat.filterProperties('2MASS J')
Filter 2MASS J: 2MASS J-band
Zeropoint = 1594.0 Jy
Pivot point: = 1.252 micron
FWHM = 0.323 micron
Wavelength range = 1.066 to 1.442 micron
>>> data = splat.filterProperties('2MASS X')
Filter 2MASS X not among the available filters:
2MASS H: 2MASS H-band
2MASS J: 2MASS J-band
2MASS KS: 2MASS Ks-band
BESSEL I: Bessel I-band
FOURSTAR H: FOURSTAR H-band
FOURSTAR H LONG: FOURSTAR H long
FOURSTAR H SHORT: FOURSTAR H short
...
'''
filterFolder = kwargs.get('filterFolder',SPLAT_PATH+FILTER_FOLDER)
if not os.path.exists(filterFolder):
filterFolder = SPLAT_URL+FILTER_FOLDER
# check that requested filter is in list
filt = checkFilterName(filt)
if filt == False: return None
report = {}
report['name'] = filt
report['description'] = FILTERS[filt]['description']
report['zeropoint'] = FILTERS[filt]['zeropoint']
report['method'] = FILTERS[filt]['method']
report['rsr'] = FILTERS[filt]['rsr']
fwave,ftrans = filterProfile(filt,**kwargs)
try:
fwave = fwave.to(u.micron)
except:
fwave = fwave*u.micron
fw = fwave[numpy.where(ftrans > 0.01*numpy.nanmax(ftrans))]
ft = ftrans[numpy.where(ftrans > 0.01*numpy.nanmax(ftrans))]
fw05 = fwave[numpy.where(ftrans > 0.5*numpy.nanmax(ftrans))]
# print(trapz(ft,fw))
# print(trapz(fw*ft,fw))
report['lambda_mean'] = trapz(ft*fw,fw)/trapz(ft,fw)
report['lambda_pivot'] = numpy.sqrt(trapz(fw*ft,fw)/trapz(ft/fw,fw))
report['lambda_central'] = 0.5*(numpy.max(fw)+numpy.min(fw))
report['lambda_fwhm'] = numpy.max(fw05)-numpy.min(fw05)
report['lambda_min'] = numpy.min(fw)
report['lambda_max'] = numpy.max(fw)
report['wave'] = fwave
report['transmission'] = ftrans
# report values out
if kwargs.get('verbose',False):
print('\nFilter '+filt+': '+report['description'])
print('Zeropoint = {} Jy'.format(report['zeropoint']))
print('Pivot point: = {:.3f}'.format(report['lambda_pivot']))
print('FWHM = {:.3f}'.format(report['lambda_fwhm']))
print('Wavelength range = {:.3f} to {:.3f}\n'.format(report['lambda_min'],report['lambda_max']))
return report
def magToFlux(mag,filt,**kwargs):
'''
:Purpose: Converts a magnitude into an energy, and vice versa.
:param mag: magnitude on whatever system is defined for the filter or provided (required)
:param filter: name of filter, must be one of the specifed filters given by splat.FILTERS.keys() (required)
:param reverse: convert energy into magnitude instead (optional, default = False)
:param ab: magnitude is on the AB system (optional, default = filter preference)
:param vega: magnitude is on the Vega system (optional, default = filter preference)
:param rsr: magnitude is on the Vega system (optional, default = filter preference)
:param units: units for energy as an astropy.units variable; if this conversion does not work, the conversion is ignored (optional, default = erg/cm2/s)
:param verbose: print out information about filter to screen (optional, default = False)
WARNING: THIS CODE IS ONLY PARTIALLY COMPLETE
'''
# keyword parameters
filterFolder = kwargs.get('filterFolder',SPLAT_PATH+FILTER_FOLDER)
if not os.path.exists(filterFolder):
filterFolder = SPLAT_URL+FILTER_FOLDER
vegaFile = kwargs.get('vegaFile','vega_kurucz.txt')
vega = kwargs.get('vega',True)
ab = kwargs.get('ab',not vega)
rsr = kwargs.get('rsr',False)
nsamples = kwargs.get('nsamples',100)
custom = kwargs.get('custom',False)
notch = kwargs.get('notch',False)
base_unit = u.erg/(u.cm**2 * u.s)
return_unit = kwargs.get('unit',base_unit)
e_mag = kwargs.get('uncertainty',0.)
e_mag = kwargs.get('unc',e_mag)
e_mag = kwargs.get('e_mag',e_mag)
if not isinstance(mag,u.quantity.Quantity): mag=mag*u.s/u.s
if not isinstance(e_mag,u.quantity.Quantity): e_mag=e_mag*mag.unit
# check that requested filter is in list
filt = checkFilterName(filt)
if filt == False: return numpy.nan, numpy.nan
# reset filter calculation methods based on filter design
if 'ab' in FILTERS[filt]['method']:
ab = kwargs.get('ab',True)
vega = not ab
if 'vega' in FILTERS[filt]['method']:
vega = kwargs.get('vega',True)
ab = not vega
if 'rsr' in FILTERS[filt]['method']:
rsr = kwargs.get('rsr',True)
# Read in filter
if isinstance(custom,bool) and isinstance(notch,bool):
fwave,ftrans = filterProfile(filt,**kwargs)
# notch filter
elif isinstance(custom,bool) and isinstance(notch,list):
dn = (notch[1]-notch[0])/1000
fwave = numpy.arange(notch[0]-5.*dn,notch[1]+5.*dn,dn)*u.micron
ftrans = numpy.zeros(len(fwave))
ftrans[numpy.where(numpy.logical_and(fwave >= notch[0],fwave <= notch[1]))] = 1.
# custom filter
else:
fwave,ftrans = custom[0],custom[1]
if isinstance(fwave,u.quantity.Quantity) == False: fwave=fwave*u.micron
if isinstance(ftrans,u.quantity.Quantity) == True: ftrans=ftrans.value
fwave = fwave[~numpy.isnan(ftrans)]
ftrans = ftrans[~numpy.isnan(ftrans)]
result = []
err = 0.
# magnitude -> energy
if kwargs.get('reverse',False) == False:
if vega == True:
# Read in Vega spectrum
vwave,vflux = numpy.genfromtxt(os.path.normpath(filterFolder+vegaFile), comments='#', unpack=True, \
missing_values = ('NaN','nan'), filling_values = (numpy.nan))
vwave = vwave[~numpy.isnan(vflux)]*u.micron
vflux = vflux[~numpy.isnan(vflux)]*(u.erg/(u.cm**2 * u.s * u.micron))
# interpolate Vega onto filter wavelength function
v = interp1d(vwave.value,vflux.value,bounds_error=False,fill_value=0.)
if rsr: fact = trapz(ftrans*fwave.value*v(fwave.value),fwave.value)
else: fact = trapz(ftrans*v(fwave.value),fwave.value)
val = 10.**(-0.4*mag.value)*fact*u.erg/(u.cm**2 * u.s)
# calculate uncertainty
if e_mag.value > 0.:
for i in numpy.arange(nsamples): result.append(10.**(-0.4*(mag.value+numpy.random.normal(0,1.)*e_mag.value))*fact)
err = (numpy.nanstd(result))*u.erg/(u.cm**2 * u.s)
else: err = 0.*u.erg/(u.cm**2 * u.s)
elif ab == True:
fconst = 3631*u.jansky
ftrans = (ftrans*fconst).to(u.erg/(u.cm**2 * u.s * u.micron),equivalencies=u.spectral_density(fwave))
if rsr: fact = trapz(ftrans.value*fwave.value,fwave.value)
else: fact = trapz(ftrans.value,fwave.value)
val = (10.**(-0.4*mag.value)*fact)*u.erg/(u.cm**2 * u.s)
# calculate uncertainty
if e_mag.value > 0.:
for i in numpy.arange(nsamples): result.append(10.**(-0.4*(mag.value+numpy.random.normal(0,1.)*e_mag.value))*fact)
err = (numpy.nanstd(result))*u.erg/(u.cm**2 * u.s)
else: err = 0.*u.erg/(u.cm**2 * u.s)
else:
raise ValueError('\nmagToFlux needs vega or ab method specified')
# convert to desired energy units
# try:
val.to(return_unit)
err.to(return_unit)
# except:
# print('\nWarning: unit {} is not an energy flux unit'.format(return_unit))
try:
val.to(base_unit)
err.to(base_unit)
except:
print('\nWarning: cannot convert result to an energy flux unit'.format(base_unit))
return numpy.nan, numpy.nan
return val, err
# energy -> magnitude
# THIS NEEDS TO BE COMPLETED
else:
print('passed')
pass
# check that input is an energy flux
# try:
# mag.to(base_unit)
# e_mag.to(base_unit)
# except:
# raise ValueError('\nInput quantity unit {} is not a flux unit'.format(mag.unit))
def visualizeFilter(filters,verbose=True,xra=[],yra=[0,1.2],**kwargs):
'''
:Purpose: Plots a filter profile or set of filter profiles, optionally on top of a spectrum
WARNING: THIS CODE IS CURRENTLY UNDER DEVELOPMENT, BUGS MAY BE COMMON
'''
filt = copy.deepcopy(filters)
wave_unit = kwargs.get('wave_unit',DEFAULT_WAVE_UNIT)
# single filter name
if isinstance(filt,str):
filt = [filt]
if isinstance(filt,list):
# list of filter names
if isinstance(filt[0],str):
for f in filt:
fc = checkFilterName(f)
filt.remove(f)
if fc == False:
if verbose==True: print('Removed filter {}: not included in SPLAT'.format(f))
else:
filt.insert(len(filt),fc)
if len(filt) == 0:
raise ValueError('Did not recognize any of the input filters {}'.format(filters))
# prep parameters
fwave,ftrans = filterProfile(f,**kwargs)
if isUnit(fwave): wave_unit = kwargs.get('wave_unit',fwave.unit)
xl = kwargs.get('xlabel','Wavelength ({})'.format(wave_unit))
yl = kwargs.get('ylabel','Transmission Curve')
legend = []
fig = plt.figure(figsize=kwargs.get('figsize',[5,4]))
for i,f in enumerate(filt):
fwave,ftrans = filterProfile(f,**kwargs)
if isUnit(fwave): fwave.to(wave_unit)
else: fwave = fwave*wave_unit
if kwargs.get('normalize',False): ftrans = ftrans/numpy.nanmax(ftrans)
plt.plot(fwave,ftrans)
if len(xra) == 0: xra = [numpy.nanmin(fwave.value),numpy.nanmax(fwave.value)]
xra = [numpy.nanmin([xra[0],numpy.nanmin(fwave.value)]),numpy.nanmax([xra[1],numpy.nanmax(fwave.value)])]
yra = [yra[0],numpy.nanmax([yra[1],numpy.nanmax(ftrans)])]
legend.append(FILTERS[f]['description'])
if FILTERS[f]['rsr'] == True: yl = kwargs.get('ylabel','Transmission Curve')
# list of notch ranges
if isinstance(filt[0],int) or isinstance(filt[0],float):
filt = [filt]
# list of notch ranges
if isinstance(filt[0],list):
xl = kwargs.get('xlabel','Wavelength ({})'.format(wave_unit))
yl = kwargs.get('ylabel','Transmission Curve')
legend = []
fig = plt.figure(figsize=kwargs.get('figsize',[5,4]))
for i,f in enumerate(filt):
fwave,ftrans = numpy.linspace(f[0],f[1],1000)*wave_unit,numpy.ones(1000)
plt.plot(fwave,ftrans)
if len(xra) == 0: xra = [numpy.nanmin(fwave.value),numpy.nanmax(fwave.value)]
xra = [numpy.nanmin([xra[0],numpy.nanmin(fwave.value)]),numpy.nanmax([xra[1],numpy.nanmax(fwave.value)])]
yra = [yra[0],numpy.nanmax([yra[1],numpy.nanmax(ftrans)])]
legend.append('Filter {}'.format(i+1))
else:
raise ValueError('Could not parse input {}'.format(filt))
# add a comparison spectrum
sp = kwargs.get('spectrum',None)
sp = kwargs.get('comparison',sp)
if isinstance(sp,splat.core.Spectrum) == True:
print(xra)
sp.normalize(xra)
sp.scale(numpy.nanmax(ftrans)*kwargs.get('comparison_scale',0.8))
plt.plot(sp.wave,sp.flux,color=kwargs.get('comparison_color','k'),alpha=kwargs.get('comparison_alpha',0.5))
legend.append(sp.name)
yra = [yra[0],yra[1]*1.1]
# finish up
plt.xlim(xra)
plt.ylim(yra)
plt.xlabel(xl)
plt.ylabel(yl)
plt.legend(legend)
# save if desired
file = kwargs.get('file','')
file = kwargs.get('filename',file)
file = kwargs.get('output',file)
if file != '': plt.savefig(file)
return fig
#########################################
######## SED FITTING TOOLS #########
### WARNING: THESE ARE EXPERIMENTAL!! ###
#########################################
# plan:
def modelMagnitudes(verbose=True):
'''
this will be a code that calculates a set of magnitudes for a model set's SED models
saves to file that could be uploaded
pre-save some model magnitudes
'''
pass
def interpolateMagnitudes(verbose=True):
'''
produces an interpolated value for a grid set of model magnitudes
'''
pass
def compareMagnitudes(mags1,mags2,unc=None,unc2=None,ignore=[],verbose=True):
'''
this code compares a set of magnitudes using one of several statistics
'''
chi = 0.
dm,em = [],[]
for f in list(mags1.keys()):
if f in list(mags2.keys()) and f in list(unc.keys()) and f not in ignore:
dm.append(mags1[f]-mags2[f])
em.append(unc[f])
# find best scale factor
dm = numpy.array(dm)
em = numpy.array(em)
offset = numpy.sum(dm/em**2)/numpy.sum (1./em**2)
dmo = numpy.array([m-offset for m in dm])
return numpy.sum((dmo/em)**2), offset
def SEDFitGrid(verbose=True):
'''
this code will compare a set of magnitudes to a grid of model magnitudes and choose the
closest match based on various statistics
'''
pass
def SEDFitMCMC(verbose=True):
'''
this code will conduct a comparison of a set of magnitudes to model magnitudes using an
MCMC wrapper, and choose best/average/distribution of parameters
'''
pass
def SEDFitAmoeba(verbose=True):
'''
this code will conduct a comparison of a set of magnitudes to model magnitudes using an
Amoeba (Nelder-Mead) wrapper, and choose the closest match
'''
pass
def SEDVisualize(verbose=True):
'''
Visualizes magnitudes on SED scale (flux = lam x F_lam), with option of also comparing to SED spectrum
'''
pass
#####################################################
############### MAGNITUDE CLASS ###############
#####################################################
| 39.857302 | 197 | 0.623881 |
9c17deaa4c71e1a4a4c819492abf40341d7c1817 | 629 | py | Python | helpers/time_utils.py | mandalorian-101/badger-system | 2b0ee9bd77a2cc6f875b9b984ae4dfe713bbc55c | [
"MIT"
] | null | null | null | helpers/time_utils.py | mandalorian-101/badger-system | 2b0ee9bd77a2cc6f875b9b984ae4dfe713bbc55c | [
"MIT"
] | null | null | null | helpers/time_utils.py | mandalorian-101/badger-system | 2b0ee9bd77a2cc6f875b9b984ae4dfe713bbc55c | [
"MIT"
] | null | null | null | import datetime
ONE_MINUTE = 60
ONE_HOUR = 3600
ONE_DAY = 24 * ONE_HOUR
ONE_YEAR = 1 * 365 * ONE_DAY
| 18.5 | 87 | 0.702703 |
9c18aa829131bc05a668cd4d7a72da450336ed4f | 4,766 | py | Python | example_scripts/profile_validation/plot_validation_gridded_data.py | British-Oceanographic-Data-Centre/NEMO-ENTRUST | 41ed278e56428404ab8ec41d74a9a3a761e308ae | [
"MIT"
] | null | null | null | example_scripts/profile_validation/plot_validation_gridded_data.py | British-Oceanographic-Data-Centre/NEMO-ENTRUST | 41ed278e56428404ab8ec41d74a9a3a761e308ae | [
"MIT"
] | null | null | null | example_scripts/profile_validation/plot_validation_gridded_data.py | British-Oceanographic-Data-Centre/NEMO-ENTRUST | 41ed278e56428404ab8ec41d74a9a3a761e308ae | [
"MIT"
] | null | null | null | """
Plot up surface or bottom (or any fixed level) errors from a profile object
with no z_dim (vertical dimension). Provide an array of netcdf files and
mess with the options to get a figure you like.
You can define how many rows and columns the plot will have. This script will
plot the provided list of netcdf datasets from left to right and top to bottom.
A colorbar will be placed right of the figure.
"""
import xarray as xr
import matplotlib.pyplot as plt
import numpy as np
import sys
sys.path.append("/Users/dbyrne/code/COAsT")
import coast
import pandas as pd
#%% File settings
run_name = "test"
# List of analysis output files. Profiles from each will be plotted
# on each axis of the plot
fn_list = [
"~/transfer/test_grid.nc",
"~/transfer/test_grid.nc",
]
# Filename for the output
fn_out = "/Users/dbyrne/transfer/surface_gridded_errors_{0}.png".format(run_name)
#%% General Plot Settings
var_name = "abs_diff_temperature" # Variable name in analysis file to plot
# If you used var modified to make gridded data
# then this is where to select season etc.
save_plot = False
# Masking out grid cells that don't contain many points
min_points_in_average = 5
name_of_count_variable = "grid_N"
# Subplot axes settings
n_r = 2 # Number of subplot rows
n_c = 2 # Number of subplot columns
figsize = (10, 5) # Figure size
lonbounds = [-15, 9.5] # Longitude bounds
latbounds = [45, 64] # Latitude bounds
subplot_padding = 0.5 # Amount of vertical and horizontal padding between plots
fig_pad = (0.075, 0.075, 0.1, 0.1) # Figure padding (left, top, right, bottom)
# Leave some space on right for colorbar
# Scatter opts
marker_size = 3 # Marker size
cmap = "bwr" # Colormap for normal points
clim = (-1, 1) # Color limits for normal points
discrete_cmap = True # Discretize colormap
cmap_levels = 14
# Labels and Titles
fig_title = "SST Errors" # Whole figure title
title_fontsize = 13 # Fontsize of title
title_fontweight = "bold" # Fontweight to use for title
dataset_names = ["CO9p0", "CO9p0", "CO9p0"] # Names to use for labelling plots
subtitle_fontsize = 11 # Fontsize for dataset subtitles
subtitle_fontweight = "normal" # Fontweight for dataset subtitles
# PLOT SEASONS. Make sure n_r = 2 and n_c = 2
# If this option is true, only the first dataset will be plotted, with seasonal
# variables on each subplot. The season_suffixes will be added to var_name
# for each subplot panel.
plot_seasons = True
season_suffixes = ["DJF", "MAM", "JJA", "SON"]
#%% Read and plotdata
# Read all datasets into list
ds_list = [xr.open_dataset(dd) for dd in fn_list]
n_ds = len(ds_list)
n_ax = n_r * n_c
# Create plot and flatten axis array
f, a = coast.plot_util.create_geo_subplots(lonbounds, latbounds, n_r, n_c, figsize=figsize)
a_flat = a.flatten()
# Dicretize colormap maybe
if discrete_cmap:
cmap = plt.cm.get_cmap(cmap, cmap_levels)
# Determine if we will extend the colormap or not
extend_cbar = []
# Loop over dataset
for ii in range(n_ax):
ur_index = np.unravel_index(ii, (n_r, n_c))
# Select season if required
if plot_seasons:
ds = ds_list[0]
var_ii = var_name + "_{0}".format(season_suffixes[ii])
N_var = "{0}_{1}".format(name_of_count_variable, season_suffixes[ii])
a_flat[ii].text(0.05, 1.02, season_suffixes[ii], transform=a_flat[ii].transAxes, fontweight="bold")
else:
ds = ds_list[ii]
var_ii = var_name
a_flat[ii].set_title(dataset_names[ii], fontsize=subtitle_fontsize, fontweight=subtitle_fontweight)
N_var = name_of_count_variable
data = ds[var_ii].values
count_var = ds[N_var]
data[count_var < min_points_in_average] = np.nan
# Scatter and set title
pc = a_flat[ii].pcolormesh(
ds.longitude,
ds.latitude,
data,
cmap=cmap,
vmin=clim[0],
vmax=clim[1],
)
# Will we extend the colorbar for this dataset?
extend_cbar.append(coast.plot_util.determine_colorbar_extension(data, clim[0], clim[1]))
# Set Figure title
f.suptitle(fig_title, fontsize=title_fontsize, fontweight=title_fontweight)
# Set tight figure layout
f.tight_layout(w_pad=subplot_padding, h_pad=subplot_padding)
f.subplots_adjust(left=(fig_pad[0]), bottom=(fig_pad[1]), right=(1 - fig_pad[2]), top=(1 - fig_pad[3]))
# Handle colorbar -- will we extend it?
if "both" in extend_cbar:
extend = "both"
elif "max" in extend_cbar and "min" in extend_cbar:
extend = "both"
elif "max" in extend_cbar:
extend = "max"
elif "min" in extend_cbar:
extend = "min"
else:
extend = "neither"
cbar_ax = f.add_axes([(1 - fig_pad[2] + fig_pad[2] * 0.15), 0.15, 0.025, 0.7])
f.colorbar(pc, cax=cbar_ax, extend=extend)
# Save plot maybe
if save_plot:
f.savefig(fn_out)
| 31.562914 | 107 | 0.712547 |
9c191035667faa5283a1d949656c67ee58df9705 | 500 | py | Python | feature-engineering/samples/statistical_features.py | jeury301/text-classifier | d86f658ef3368e4a3f6fd74328fa862e2881ac3b | [
"MIT"
] | null | null | null | feature-engineering/samples/statistical_features.py | jeury301/text-classifier | d86f658ef3368e4a3f6fd74328fa862e2881ac3b | [
"MIT"
] | null | null | null | feature-engineering/samples/statistical_features.py | jeury301/text-classifier | d86f658ef3368e4a3f6fd74328fa862e2881ac3b | [
"MIT"
] | null | null | null | from sklearn.feature_extraction.text import TfidfVectorizer
def compute_tf_idf(corpus):
"""Computing term frequency (tf) - inverse document frequency (idf).
:param corpus: List of documents.
:returns: tf-idf of corpus.
"""
return TfidfVectorizer().fit_transform(corpus)
if __name__ == '__main__':
sample_corpus = [
'This is sample document.',
'another random document.',
'third sample document text'
]
print(compute_tf_idf(sample_corpus))
| 26.315789 | 72 | 0.682 |
9c194afa3b23b40f44a756d8271ecc2b2b439fa6 | 18,197 | py | Python | Gds/src/fprime_gds/executables/tcpserver.py | hunterpaulson/fprime | 70560897b56dc3037dc966c99751b708b1cc8a05 | [
"Apache-2.0"
] | null | null | null | Gds/src/fprime_gds/executables/tcpserver.py | hunterpaulson/fprime | 70560897b56dc3037dc966c99751b708b1cc8a05 | [
"Apache-2.0"
] | null | null | null | Gds/src/fprime_gds/executables/tcpserver.py | hunterpaulson/fprime | 70560897b56dc3037dc966c99751b708b1cc8a05 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
from __future__ import print_function
import socket
import threading
try:
import socketserver
except ImportError:
import SocketServer as socketserver
import time
import os
import signal
import sys
import struct
import errno
from fprime.constants import DATA_ENCODING
from optparse import OptionParser
__version__ = 0.1
__date__ = "2015-04-03"
__updated__ = "2016-04-07"
# Universal server id global
SERVER = None
LOCK = None
shutdown_event = threading.Event()
FSW_clients = []
GUI_clients = []
FSW_ids = []
GUI_ids = []
def main(argv=None):
global SERVER, LOCK
program_name = os.path.basename(sys.argv[0])
program_license = "Copyright 2015 user_name (California Institute of Technology) \
ALL RIGHTS RESERVED. U.S. Government Sponsorship acknowledged."
program_version = "v0.1"
program_build_date = "%s" % __updated__
program_version_string = "%%prog %s (%s)" % (program_version, program_build_date)
program_longdesc = (
"""""" # optional - give further explanation about what the program does
)
if argv is None:
argv = sys.argv[1:]
try:
parser = OptionParser(
version=program_version_string,
epilog=program_longdesc,
description=program_license,
)
parser.add_option(
"-p",
"--port",
dest="port",
action="store",
type="int",
help="Set threaded tcp socket server port [default: %default]",
default=50007,
)
parser.add_option(
"-i",
"--host",
dest="host",
action="store",
type="string",
help="Set threaded tcp socket server ip [default: %default]",
default="127.0.0.1",
)
# process options
(opts, args) = parser.parse_args(argv)
HOST = opts.host
PORT = opts.port
server = ThreadedTCPServer((HOST, PORT), ThreadedTCPRequestHandler)
udp_server = ThreadedUDPServer((HOST, PORT), ThreadedUDPRequestHandler)
# Hopefully this will allow address reuse and server to restart immediately
server.allow_reuse_address = True
SERVER = server
LOCK = server.lock_obj
ip, port = server.server_address
print("TCP Socket Server listening on host addr %s, port %s" % (HOST, PORT))
# Start a thread with the server -- that thread will then start one
# more thread for each request
server_thread = threading.Thread(target=server.serve_forever)
udp_server_thread = threading.Thread(target=udp_server.serve_forever)
signal.signal(signal.SIGINT, signal_handler)
server_thread.daemon = False
server_thread.start()
udp_server_thread.daemon = False
udp_server_thread.start()
while not shutdown_event.is_set():
server_thread.join(timeout=5.0)
udp_server_thread.join(timeout=5.0)
print("shutdown from main thread")
SERVER.shutdown()
SERVER.server_close()
udp_server.shutdown()
udp_server.server_close()
time.sleep(1)
except Exception as e:
indent = len(program_name) * " "
sys.stderr.write(program_name + ": " + repr(e) + "\n")
sys.stderr.write(indent + " for help use --help\n")
return 2
if __name__ == "__main__":
sys.exit(main())
| 32.093474 | 129 | 0.555531 |
9c1abecce40e385182d4e13996f277a150f1a3f4 | 453 | py | Python | btb_manager_telegram/__init__.py | haivle/BTB-manager-telegram | c0f71c5a98a3d128ad03578930932737dc580ed1 | [
"MIT"
] | 3 | 2021-09-24T10:49:23.000Z | 2021-11-18T13:38:17.000Z | btb_manager_telegram/__init__.py | haivle/BTB-manager-telegram | c0f71c5a98a3d128ad03578930932737dc580ed1 | [
"MIT"
] | 1 | 2021-09-01T14:40:35.000Z | 2021-09-01T14:40:35.000Z | btb_manager_telegram/__init__.py | haivle/BTB-manager-telegram | c0f71c5a98a3d128ad03578930932737dc580ed1 | [
"MIT"
] | 2 | 2021-11-03T17:57:07.000Z | 2022-02-01T11:55:54.000Z | import logging
import sched
import time
(
MENU,
EDIT_COIN_LIST,
EDIT_USER_CONFIG,
DELETE_DB,
UPDATE_TG,
UPDATE_BTB,
PANIC_BUTTON,
CUSTOM_SCRIPT,
) = range(8)
BOUGHT, BUYING, SOLD, SELLING = range(4)
logging.basicConfig(
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", level=logging.INFO
)
logger = logging.getLogger("btb_manager_telegram_logger")
scheduler = sched.scheduler(time.time, time.sleep)
| 18.875 | 85 | 0.697572 |
9c1b5e47507c017924313ed766676fc6ec9af4a7 | 316 | py | Python | tests/test_data/lazy_mod.py | brettcannon/modutil | a34794ffee9b6217a9ced41baddab09b4f034cbb | [
"BSD-3-Clause"
] | 17 | 2018-04-21T01:15:52.000Z | 2021-01-16T23:58:51.000Z | tests/test_data/lazy_mod.py | brettcannon/modutil | a34794ffee9b6217a9ced41baddab09b4f034cbb | [
"BSD-3-Clause"
] | 9 | 2018-04-20T23:29:07.000Z | 2019-07-25T17:21:29.000Z | tests/test_data/lazy_mod.py | brettcannon/modutil | a34794ffee9b6217a9ced41baddab09b4f034cbb | [
"BSD-3-Clause"
] | 3 | 2020-02-27T18:10:01.000Z | 2021-01-05T07:22:19.000Z | import modutil
mod, __getattr__ = modutil.lazy_import(__name__,
['tests.test_data.A', '.B', '.C as still_C'])
| 17.555556 | 84 | 0.607595 |
9c1b6d5a187986e544d8284aa99d48f0a66a5c3a | 10,390 | py | Python | test.py | xiaohuaibaoguigui/EllSeg | ff56b255f8e650856aec9af23792e105897eba5c | [
"MIT"
] | 1 | 2021-05-26T05:45:42.000Z | 2021-05-26T05:45:42.000Z | test.py | xiaohuaibaoguigui/EllSeg | ff56b255f8e650856aec9af23792e105897eba5c | [
"MIT"
] | null | null | null | test.py | xiaohuaibaoguigui/EllSeg | ff56b255f8e650856aec9af23792e105897eba5c | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
import tqdm
import torch
import pickle
import resource
import numpy as np
import matplotlib.pyplot as plt
from args import parse_args
from modelSummary import model_dict
from pytorchtools import load_from_file
from torch.utils.data import DataLoader
from helperfunctions import mypause, stackall_Dict
from loss import get_seg2ptLoss
from utils import get_nparams, get_predictions
from utils import getSeg_metrics, getPoint_metric, generateImageGrid, unnormPts
sys.path.append(os.path.abspath(os.path.join(os.getcwd(), os.pardir)))
rlimit = resource.getrlimit(resource.RLIMIT_NOFILE)
resource.setrlimit(resource.RLIMIT_NOFILE, (2048*10, rlimit[1]))
#%%
if __name__ == '__main__':
args = parse_args()
device=torch.device("cuda")
torch.cuda.manual_seed(12)
if torch.cuda.device_count() > 1:
print('Moving to a multiGPU setup.')
args.useMultiGPU = True
else:
args.useMultiGPU = False
torch.backends.cudnn.deterministic=False
if args.model not in model_dict:
print("Model not found.")
print("valid models are: {}".format(list(model_dict.keys())))
exit(1)
LOGDIR = os.path.join(os.getcwd(), 'logs', args.model, args.expname)
path2model = os.path.join(LOGDIR, 'weights')
path2checkpoint = os.path.join(LOGDIR, 'checkpoints')
path2writer = os.path.join(LOGDIR, 'TB.lock')
path2op = os.path.join(os.getcwd(), 'op', str(args.curObj))
os.makedirs(LOGDIR, exist_ok=True)
os.makedirs(path2model, exist_ok=True)
os.makedirs(path2checkpoint, exist_ok=True)
os.makedirs(path2writer, exist_ok=True)
os.makedirs(path2op, exist_ok=True)
model = model_dict[args.model]
netDict = load_from_file([args.loadfile,
os.path.join(path2checkpoint, 'checkpoint.pt')])
startEp = netDict['epoch'] if 'epoch' in netDict.keys() else 0
if 'state_dict' in netDict.keys():
model.load_state_dict(netDict['state_dict'])
print('Parameters: {}'.format(get_nparams(model)))
model = model if not args.useMultiGPU else torch.nn.DataParallel(model)
model = model.to(device).to(args.prec)
f = open(os.path.join('curObjects',
'baseline',
'cond_'+str(args.curObj)+'.pkl'), 'rb')
_, _, testObj = pickle.load(f)
testObj.path2data = os.path.join(args.path2data, 'Datasets', 'All')
testObj.augFlag = False
testloader = DataLoader(testObj,
batch_size=args.batchsize,
shuffle=False,
num_workers=args.workers,
drop_last=False)
if args.disp:
fig, axs = plt.subplots(nrows=1, ncols=1)
#%%
accLoss = 0.0
imCounter = 0
ious = []
dists_pupil_latent = []
dists_pupil_seg = []
dists_iris_latent = []
dists_iris_seg = []
model.eval()
opDict = {'id':[], 'archNum': [], 'archName': [], 'code': [],
'scores':{'iou':[], 'lat_dst':[], 'seg_dst':[]},
'pred':{'pup_latent_c':[],
'pup_seg_c':[],
'iri_latent_c':[],
'iri_seg_c':[],
'mask':[]},
'gt':{'pup_c':[], 'mask':[]}}
with torch.no_grad():
for bt, batchdata in enumerate(tqdm.tqdm(testloader)):
img, labels, spatialWeights, distMap, pupil_center, iris_center, elNorm, cond, imInfo = batchdata
out_tup = model(img.to(device).to(args.prec),
labels.to(device).long(),
pupil_center.to(device).to(args.prec),
elNorm.to(device).to(args.prec),
spatialWeights.to(device).to(args.prec),
distMap.to(device).to(args.prec),
cond.to(device).to(args.prec),
imInfo[:, 2].to(device).to(torch.long),
0.5)
output, elOut, latent, loss = out_tup
latent_pupil_center = elOut[:, 0:2].detach().cpu().numpy()
latent_iris_center = elOut[:, 5:7].detach().cpu().numpy()
_, seg_pupil_center = get_seg2ptLoss(output[:, 2, ...].cpu(), pupil_center, temperature=4)
_, seg_iris_center = get_seg2ptLoss(-output[:, 0, ...].cpu(), iris_center, temperature=4)
loss = loss if args.useMultiGPU else loss.mean()
accLoss += loss.detach().cpu().item()
predict = get_predictions(output)
iou, iou_bySample = getSeg_metrics(labels.numpy(),
predict.numpy(),
cond[:, 1].numpy())[1:]
latent_pupil_dist, latent_pupil_dist_bySample = getPoint_metric(pupil_center.numpy(),
latent_pupil_center,
cond[:,0].numpy(),
img.shape[2:],
True) # Unnormalizes the points
seg_pupil_dist, seg_pupil_dist_bySample = getPoint_metric(pupil_center.numpy(),
seg_pupil_center,
cond[:,1].numpy(),
img.shape[2:],
True) # Unnormalizes the points
latent_iris_dist, latent_iris_dist_bySample = getPoint_metric(iris_center.numpy(),
latent_iris_center,
cond[:,1].numpy(),
img.shape[2:],
True) # Unnormalizes the points
seg_iris_dist, seg_iris_dist_bySample = getPoint_metric(iris_center.numpy(),
seg_iris_center,
cond[:,1].numpy(),
img.shape[2:],
True) # Unnormalizes the points
dists_pupil_latent.append(latent_pupil_dist)
dists_iris_latent.append(latent_iris_dist)
dists_pupil_seg.append(seg_pupil_dist)
dists_iris_seg.append(seg_iris_dist)
ious.append(iou)
pup_latent_c = unnormPts(latent_pupil_center,
img.shape[2:])
pup_seg_c = unnormPts(seg_pupil_center,
img.shape[2:])
iri_latent_c = unnormPts(latent_iris_center,
img.shape[2:])
iri_seg_c = unnormPts(seg_iris_center,
img.shape[2:])
dispI = generateImageGrid(img.numpy().squeeze(),
predict.numpy(),
elOut.detach().cpu().numpy().reshape(-1, 2, 5),
pup_seg_c,
cond.numpy(),
override=True,
heatmaps=False)
for i in range(0, img.shape[0]):
archNum = testObj.imList[imCounter, 1]
opDict['id'].append(testObj.imList[imCounter, 0])
opDict['code'].append(latent[i,...].detach().cpu().numpy())
opDict['archNum'].append(archNum)
opDict['archName'].append(testObj.arch[archNum])
opDict['pred']['pup_latent_c'].append(pup_latent_c[i, :])
opDict['pred']['pup_seg_c'].append(pup_seg_c[i, :])
opDict['pred']['iri_latent_c'].append(iri_latent_c[i, :])
opDict['pred']['iri_seg_c'].append(iri_seg_c[i, :])
if args.test_save_op_masks:
opDict['pred']['mask'].append(predict[i,...].numpy().astype(np.uint8))
opDict['scores']['iou'].append(iou_bySample[i, ...])
opDict['scores']['lat_dst'].append(latent_pupil_dist_bySample[i, ...])
opDict['scores']['seg_dst'].append(seg_pupil_dist_bySample[i, ...])
opDict['gt']['pup_c'].append(pupil_center[i,...].numpy())
if args.test_save_op_masks:
opDict['gt']['mask'].append(labels[i,...].numpy().astype(np.uint8))
imCounter+=1
if args.disp:
if bt == 0:
h_im = plt.imshow(dispI.permute(1, 2, 0))
plt.pause(0.01)
else:
h_im.set_data(dispI.permute(1, 2, 0))
mypause(0.01)
opDict = stackall_Dict(opDict)
ious = np.stack(ious, axis=0)
ious = np.nanmean(ious, axis=0)
print('mIoU: {}. IoUs: {}'.format(np.mean(ious), ious))
print('Latent space PUPIL dist. Med: {}, STD: {}'.format(np.nanmedian(dists_pupil_latent),
np.nanstd(dists_pupil_latent)))
print('Segmentation PUPIL dist. Med: {}, STD: {}'.format(np.nanmedian(dists_pupil_seg),
np.nanstd(dists_pupil_seg)))
print('Latent space IRIS dist. Med: {}, STD: {}'.format(np.nanmedian(dists_iris_latent),
np.nanstd(dists_iris_latent)))
print('Segmentation IRIS dist. Med: {}, STD: {}'.format(np.nanmedian(dists_iris_seg),
np.nanstd(dists_iris_seg)))
print('--- Saving output directory ---')
f = open(os.path.join(path2op, 'opDict.pkl'), 'wb')
pickle.dump(opDict, f)
f.close()
| 42.933884 | 109 | 0.487777 |
9c1c20ef193d7e2a2b62ae78d7b0e1c3d0bfeffb | 21,072 | py | Python | tests/test_util.py | meskio/tuf | 09c3ceb993d40f7339bbbaf4eae617f95b972708 | [
"MIT"
] | 1 | 2015-02-16T22:53:00.000Z | 2015-02-16T22:53:00.000Z | tests/test_util.py | meskio/tuf | 09c3ceb993d40f7339bbbaf4eae617f95b972708 | [
"MIT"
] | null | null | null | tests/test_util.py | meskio/tuf | 09c3ceb993d40f7339bbbaf4eae617f95b972708 | [
"MIT"
] | 1 | 2019-09-12T02:32:54.000Z | 2019-09-12T02:32:54.000Z | #!/usr/bin/env python
"""
<Program Name>
test_util.py
<Author>
Konstantin Andrianov.
<Started>
February 1, 2013.
<Copyright>
See LICENSE for licensing information.
<Purpose>
Unit test for 'util.py'
"""
# Help with Python 3 compatibility, where the print statement is a function, an
# implicit relative import is invalid, and the '/' operator performs true
# division. Example: print 'hello world' raises a 'SyntaxError' exception.
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import gzip
import shutil
import logging
import tempfile
import unittest
import tuf
import tuf.log
import tuf.hash
import tuf.util
import tuf.unittest_toolbox as unittest_toolbox
import tuf._vendor.six as six
logger = logging.getLogger('tuf.test_util')
# Run unit test.
if __name__ == '__main__':
unittest.main()
| 36.968421 | 96 | 0.689683 |
9c1cb579481d40405b2d799f908616a3f2aa3b49 | 2,769 | py | Python | background/forms.py | BFlameSwift/AirplaneReservationSystem | bbabb0e258c72eb50fcbbf7ade437e38a39e6f02 | [
"MIT"
] | 3 | 2021-06-19T09:40:13.000Z | 2021-06-19T17:09:54.000Z | background/forms.py | BFlameSwift/AirplaneReservationSystem | bbabb0e258c72eb50fcbbf7ade437e38a39e6f02 | [
"MIT"
] | null | null | null | background/forms.py | BFlameSwift/AirplaneReservationSystem | bbabb0e258c72eb50fcbbf7ade437e38a39e6f02 | [
"MIT"
] | 1 | 2021-12-05T14:51:51.000Z | 2021-12-05T14:51:51.000Z |
from django import forms | 60.195652 | 129 | 0.679307 |
9c1e4d053b5156945879fda6f1eb646b81a07f71 | 8,282 | py | Python | cams/propressing/data_rotate.py | boliqq07/cam3d | 8b66681166a8ce0ef3304309385c1b899f1d2bb9 | [
"BSD-3-Clause"
] | 1 | 2020-11-23T08:20:38.000Z | 2020-11-23T08:20:38.000Z | cams/propressing/data_rotate.py | boliqq07/cam3d | 8b66681166a8ce0ef3304309385c1b899f1d2bb9 | [
"BSD-3-Clause"
] | null | null | null | cams/propressing/data_rotate.py | boliqq07/cam3d | 8b66681166a8ce0ef3304309385c1b899f1d2bb9 | [
"BSD-3-Clause"
] | null | null | null | from functools import lru_cache
from math import cos, sin
import scipy
from scipy.ndimage import affine_transform
import numpy as np
def rotation_axis_by_angle(data, angles=(90, 90, 90), times=(2, 2, 2)):
"""
Get true data matrix(Cartesian coordinates) from relative data matrix (Non-Cartesian coordinates).
Parameters
----------
data: np.ndarray
data with shape (nx,ny,nz).
angles:tuple
3 angle of x, y, z
z angle is the intersection angle of x,y,
y angle is the intersection angle of x,z,
x angle is the intersection angle of y,z.
times: tuple
expand the multiple of the matrix.
"""
matrix = get_matrix(angles=angles, inverse=True)
return rotation_axis_by_matrix(data, matrix, times=times)
def rotation_axis_by_matrix(data, matrix, times=(2, 2, 2)):
"""
Get true data matrix(Cartesian coordinates) from relative data matrix (Non-Cartesian coordinates).
Parameters
----------
data: np.ndarray
data with shape (nx,ny,nz).
matrix:tuple
See Also ``get_matrix``
times: tuple
expand the multiple of the matrix.
"""
dims_old = data.shape
dims = tuple([int(i * j) for i, j in zip(dims_old, times)])
n_data = np.zeros(dims)
d0s = int((dims[0] - dims_old[0]) / 2)
d1s = int((dims[1] - dims_old[1]) / 2)
d2s = int((dims[2] - dims_old[2]) / 2)
n_data[d0s:d0s + dims_old[0], d1s:d1s + dims_old[1], d2s:d2s + dims_old[2]] = data
coords = np.meshgrid(range(dims[0]), range(dims[1]), range(dims[2]), indexing="ij")
xy_coords = np.vstack([coords[0].reshape(-1), coords[1].reshape(-1), coords[2].reshape(-1)])
# apply the transformation matrix
# please note: the coordinates are not homogeneous.
# for the 3D case, I've added code for homogeneous coordinates, you might want to look at that
# please also note: rotation is always around the origin:
# since I want the origin to be in the image center, I had to substract dim/2, rotate, then add it again
dims2 = np.array([i / 2 for i in dims])
dims2 = dims2.reshape(-1, 1)
xy_coords = np.dot(matrix, xy_coords - dims2) + dims2
#
# # undo the stacking and reshaping
x = xy_coords[0, :]
y = xy_coords[1, :]
z = xy_coords[2, :]
x = x.reshape(dims, order="A")
y = y.reshape(dims, order="A")
z = z.reshape(dims, order="A")
new_coords = [x, y, z]
# use map_coordinates to sample values for the new image
new_img = scipy.ndimage.map_coordinates(n_data, new_coords, order=2)
return new_img
def _coords(points, angles=(90, 90, 90), times=(2, 2, 2)):
"""
Parameters
----------
points: np.darray
percent of shape.
key points with shape(n_sample,3)
angles:tuple
3 angle of x, y, z
z angle is the intersection angle of x,y,
y angle is the intersection angle of x,z,
x angle is the intersection angle of y,z.
times: tuple
expand the multiple of the matrix.
"""
dims_old = [1, 1, 1]
matrix = get_matrix(angles=angles)
times = np.array(list(times))
times = times.reshape((-1, 1))
dims_old = np.array(dims_old)
dims_old = dims_old.reshape(-1, 1)
dims2 = dims_old / 2
points = points.T * dims_old
xy_coords = np.dot(matrix, points - dims2) + dims2
xy_coords = xy_coords + (times / 2 - 0.5)
return xy_coords
def rote_index(points, data, angles=(90, 90, 90), times=(2, 2, 2), data_init=True, return_type="float"):
"""
Parameters
----------
points: np.darray
key points with shape(n_sample,3)
percent of shape.
data: np.ndarray or tuple
data or data.shape
data_init:bool
The data is the init data (relative location) or Cartesian coordinates.(rotation_axis_by_angle)
angles:tuple
3 angle of x, y, z
z angle is the intersection angle of x,y,
y angle is the intersection angle of x,z,
x angle is the intersection angle of y,z.
times: tuple
expand the multiple of the matrix.
return_type:str
"float", "int", "percent"
for "float", "int" return the new index
for "percent" return the new percent.
"""
data_shape = data.shape if isinstance(data, np.ndarray) else data
if data_init:
times_np = np.array([1,1,1])
else:
times_np = np.array(times)
dims = data_shape
dims = np.array(dims).reshape((-1, 1))
xy_coords = _coords(points, angles=angles, times=times)
if return_type == "percent":
return xy_coords
if return_type == "float":
return (dims * xy_coords/times_np).T
else:
return np.round((dims * xy_coords/times_np).T).astype(int) # for rounding off: .4 -, .5 +
def rote_value(points, data, angles=(90, 90, 90), times=(2, 2, 2), method="in", data_type="td"):
"""
Parameters
----------
points: np.darray
key points with shape(n_sample,3)
percent of shape.
data: np.ndarray
data
angles:tuple
3 angle of x, y, z
z angle is the intersection angle of x,y,
y angle is the intersection angle of x,z,
x angle is the intersection angle of y,z.
times: tuple
expand the multiple of the matrix.
data_type:str
if "init" the data accept init data (elfcar, chgcar). see rotation_axis_by_angle.
if "td" the data accept true matrix data . see rotation_axis_by_angle.
method:str
if "near" , return nearest site's value.
if "inter" , return the interpolation value.
"""
if data_type == "td":
new_data = data
else:
new_data = rotation_axis_by_angle(data, angles=angles, times=times)
if method == "near":
ind = rote_index(points, data, angles=angles, times=times, return_type="int")
new_value = np.array([new_data[tuple(i)] for i in ind.T])
return new_value
else:
ind = rote_index(points, data, angles=angles, times=times, return_type="float")
new_value = scipy.ndimage.map_coordinates(new_data, ind, order=2)
return new_value
| 32.225681 | 108 | 0.584762 |
9c1f7437cc31d152ad0f3a65db16fb0d7effff6e | 596 | py | Python | playground/conversions/parser/lola2dot.py | flange/esp | 78925925daf876e4936ca7af046b4f884e8a4233 | [
"MIT"
] | null | null | null | playground/conversions/parser/lola2dot.py | flange/esp | 78925925daf876e4936ca7af046b4f884e8a4233 | [
"MIT"
] | null | null | null | playground/conversions/parser/lola2dot.py | flange/esp | 78925925daf876e4936ca7af046b4f884e8a4233 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import sys
#lolafile = open("ex-small.graph", "r")
source = 0
target = 0
lowlink = 0
trans = "bla"
print("digraph {")
with open(sys.argv[1]) as lolafile:
for line in lolafile:
if len(line) == 1:
continue
linelist = line.split(" ")
if "STATE" in linelist:
source = linelist[1]
lowlink = linelist[3].rstrip()
if "->" in linelist:
trans = linelist[0]
target = linelist[2].rstrip()
print(''' {} -> {} [label="{}", lowlink="{}"];'''.format(source, target, trans, lowlink))
print("}")
| 17.028571 | 98 | 0.540268 |
9c1f830b50d1855accf6647797b5fa3fed845091 | 3,098 | py | Python | engkor/views.py | takeshixx/dprkdict | 7f436eb99a855ae8037b2219fc97944f5c000f68 | [
"MIT"
] | 10 | 2017-09-25T09:30:02.000Z | 2021-12-10T13:38:55.000Z | engkor/views.py | takeshixx/dprkdict | 7f436eb99a855ae8037b2219fc97944f5c000f68 | [
"MIT"
] | null | null | null | engkor/views.py | takeshixx/dprkdict | 7f436eb99a855ae8037b2219fc97944f5c000f68 | [
"MIT"
] | null | null | null | import re
import urllib.parse
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse, JsonResponse
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
from .models import Definition
RE_HANGUL = re.compile(r'[(]*[\uAC00-\uD7AF]+[\uAC00-\uD7AF (),;]*', re.IGNORECASE)
| 35.204545 | 83 | 0.548741 |
9c2000bb0df619412795ffbe35ee177921174a1f | 1,867 | py | Python | appdaemon/apps/toggle_switch/toggle_switch.py | Mithras/ha | d37f8673eed27a85f76c97ee3e924d2ddc033ee5 | [
"MIT"
] | 3 | 2019-10-27T06:10:26.000Z | 2020-07-21T01:27:11.000Z | appdaemon/apps/toggle_switch/toggle_switch.py | Mithras/ha | d37f8673eed27a85f76c97ee3e924d2ddc033ee5 | [
"MIT"
] | null | null | null | appdaemon/apps/toggle_switch/toggle_switch.py | Mithras/ha | d37f8673eed27a85f76c97ee3e924d2ddc033ee5 | [
"MIT"
] | null | null | null | import globals
| 38.895833 | 121 | 0.619175 |
9c20a7fb2067e672343540ce82ccf23d80253a6c | 336 | py | Python | templates_deepdive_app_bagofwords/udf/dd_extract_features.py | charlieccarey/rdoc | 2e857f29e128f893706d042d583eec698c0bc56a | [
"CC-BY-4.0"
] | null | null | null | templates_deepdive_app_bagofwords/udf/dd_extract_features.py | charlieccarey/rdoc | 2e857f29e128f893706d042d583eec698c0bc56a | [
"CC-BY-4.0"
] | 5 | 2016-05-07T04:42:06.000Z | 2018-04-19T01:08:38.000Z | templates_deepdive_app_bagofwords/udf/dd_extract_features.py | charlieccarey/rdoc | 2e857f29e128f893706d042d583eec698c0bc56a | [
"CC-BY-4.0"
] | null | null | null | #!/usr/bin/env python
from __future__ import print_function
'''
1\taaaa~^~bbbb~^~cccc
2\tdddd~^~EEEE~^~ffff
'''
import sys
ARR_DELIM = '~^~'
for row in sys.stdin:
row = row.strip()
sent_id, lemmas = row.split('\t')
lemmas = lemmas.split(ARR_DELIM)
for lemma in lemmas:
print('{}\t{}'.format(sent_id, lemma))
| 17.684211 | 46 | 0.625 |
9c2147f6458e9854c24fb91bf25b8791fe2188ff | 528 | py | Python | src/supplier/templates/supplier/urls.py | vandana0608/Pharmacy-Managament | f99bdec11c24027a432858daa19247a21cecc092 | [
"bzip2-1.0.6"
] | null | null | null | src/supplier/templates/supplier/urls.py | vandana0608/Pharmacy-Managament | f99bdec11c24027a432858daa19247a21cecc092 | [
"bzip2-1.0.6"
] | null | null | null | src/supplier/templates/supplier/urls.py | vandana0608/Pharmacy-Managament | f99bdec11c24027a432858daa19247a21cecc092 | [
"bzip2-1.0.6"
] | null | null | null | from django.urls import path
from . import views
urlpatterns = [
path('', views.SupplierList.as_view(), name='supplier_list'),
path('view/<int:pk>', views.SupplierView.as_view(), name='supplier_view'),
path('new', views.SupplierCreate.as_view(), name='supplier_new'),
path('view/<int:pk>', views.SupplierView.as_view(), name='supplier_view'),
path('edit/<int:pk>', views.SupplierUpdate.as_view(), name='supplier_edit'),
path('delete/<int:pk>', views.SupplierDelete.as_view(), name='supplier_delete'),
] | 44 | 84 | 0.69697 |
9c22036370e0f940a80ab34156b825acd98d5b1a | 205 | py | Python | web_scraper/extract/common.py | rarc41/web_scraper_pro | f297c785617c6b1617ced8f29ad11afec31f2968 | [
"MIT"
] | null | null | null | web_scraper/extract/common.py | rarc41/web_scraper_pro | f297c785617c6b1617ced8f29ad11afec31f2968 | [
"MIT"
] | null | null | null | web_scraper/extract/common.py | rarc41/web_scraper_pro | f297c785617c6b1617ced8f29ad11afec31f2968 | [
"MIT"
] | null | null | null | import yaml
__config=None
| 15.769231 | 48 | 0.585366 |
9c2218dead1aba52c67ce0344013c8ad1bd43a62 | 109 | py | Python | engine/config/constant.py | infiniteloop98/lazies-cmd | 46ac58f9b31942c6fa63b7ffa8d409e3a6b4df26 | [
"MIT"
] | 1 | 2022-02-19T08:39:17.000Z | 2022-02-19T08:39:17.000Z | engine/config/constant.py | infiniteloop98/lazies-cmd | 46ac58f9b31942c6fa63b7ffa8d409e3a6b4df26 | [
"MIT"
] | null | null | null | engine/config/constant.py | infiniteloop98/lazies-cmd | 46ac58f9b31942c6fa63b7ffa8d409e3a6b4df26 | [
"MIT"
] | null | null | null | APP_PROFILE_DIRECTORY_NAME = 'lazies-cmd'
DOSKEY_FILE_NAME = 'doskey.bat'
AUTO_RUN_REGISTRY_NAME = 'AutoRun'
| 27.25 | 41 | 0.816514 |
9c231907fc5c90a542b71605f474b278cba43d2d | 777 | py | Python | sequence/get_seqs_from_list.py | fanglu01/cDNA_Cupcake | 60f56dc291661a2b84e40b64d469fba658889c34 | [
"BSD-3-Clause-Clear"
] | 1 | 2018-09-21T06:20:50.000Z | 2018-09-21T06:20:50.000Z | sequence/get_seqs_from_list.py | fanglu01/cDNA_Cupcake | 60f56dc291661a2b84e40b64d469fba658889c34 | [
"BSD-3-Clause-Clear"
] | null | null | null | sequence/get_seqs_from_list.py | fanglu01/cDNA_Cupcake | 60f56dc291661a2b84e40b64d469fba658889c34 | [
"BSD-3-Clause-Clear"
] | null | null | null | #!/usr/bin/env python
import os, sys
from Bio import SeqIO
if __name__ == "__main__":
from argparse import ArgumentParser
parser = ArgumentParser("Get sequences from a fasta file from a list")
parser.add_argument("fasta_filename", help="Input fasta filename to extract sequences from")
parser.add_argument("list_filename", help="List of sequence IDs to extract")
args = parser.parse_args()
get_seqs_from_list(args.fasta_filename, args.list_filename)
| 37 | 96 | 0.693694 |
9c232026bb42fe3062506f2b3ba59f07439cb07f | 7,482 | py | Python | ppos_dex_data.py | cusma/pposdex | 31b834ffcb1a43958ccc57b444c7b9337a5623c9 | [
"MIT"
] | 10 | 2021-01-06T20:09:17.000Z | 2022-01-07T09:38:02.000Z | ppos_dex_data.py | cusma/pposdex | 31b834ffcb1a43958ccc57b444c7b9337a5623c9 | [
"MIT"
] | null | null | null | ppos_dex_data.py | cusma/pposdex | 31b834ffcb1a43958ccc57b444c7b9337a5623c9 | [
"MIT"
] | 1 | 2021-07-17T09:47:18.000Z | 2021-07-17T09:47:18.000Z |
import time
import json
import base64
import msgpack
from schema import Schema, And, Optional
from datetime import datetime
from algosdk import mnemonic
from algosdk.account import address_from_private_key
from algosdk.error import *
from algosdk.future.transaction import PaymentTxn
from inequality_indexes import *
from algo_query import *
def wait_for_confirmation(algod_client, transaction_id, timeout):
"""Wait until the transaction is confirmed or rejected, or until 'timeout'
number of rounds have passed.
Args:
algod_client (AlgodClient): Algod Client
transaction_id (str): the transaction to wait for
timeout (int): maximum number of rounds to wait
Returns:
(dict): pending transaction information, or throws an error if the
transaction is not confirmed or rejected in the next timeout rounds
"""
start_round = algod_client.status()["last-round"] + 1
current_round = start_round
while current_round < start_round + timeout:
algod_client.status_after_block(current_round)
try:
pending_txn = algod_client.pending_transaction_info(transaction_id)
except Exception:
return
if pending_txn.get("confirmed-round", 0) > 0:
return pending_txn
elif pending_txn["pool-error"]:
raise Exception(
'pool error: {}'.format(pending_txn["pool-error"]))
current_round += 1
raise Exception(
'pending tx not found in timeout rounds, timeout value = : {}'.format(
timeout))
| 36.320388 | 79 | 0.625234 |
9c2374c54ba5c88122db3264039ec597c5b837e3 | 1,530 | py | Python | src/test/python/programmingtheiot/part01/unit/system/SystemMemUtilTaskTest.py | Zhengrui-Liu/FireAlarmingSysCDA | 26db6375a21ee9bdccba3d137e30d2e63ad6395c | [
"MIT"
] | null | null | null | src/test/python/programmingtheiot/part01/unit/system/SystemMemUtilTaskTest.py | Zhengrui-Liu/FireAlarmingSysCDA | 26db6375a21ee9bdccba3d137e30d2e63ad6395c | [
"MIT"
] | null | null | null | src/test/python/programmingtheiot/part01/unit/system/SystemMemUtilTaskTest.py | Zhengrui-Liu/FireAlarmingSysCDA | 26db6375a21ee9bdccba3d137e30d2e63ad6395c | [
"MIT"
] | null | null | null | #####
#
# This class is part of the Programming the Internet of Things
# project, and is available via the MIT License, which can be
# found in the LICENSE file at the top level of this repository.
#
# Copyright (c) 2020 by Andrew D. King
#
import logging
import unittest
from programmingtheiot.cda.system.SystemMemUtilTask import SystemMemUtilTask
if __name__ == "__main__":
unittest.main()
| 26.842105 | 105 | 0.739869 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.