ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | 1a47bdadebdc84026712e30533b08820250cc1b0 | """Define constants."""
import logging
LOGGER = logging.getLogger(__package__)
# Possible data points:
DATA_POINT_24HOURRAIN = "24hourrain"
DATA_POINT_24HOURRAININ = "24hourrainin"
DATA_POINT_BAROMABS = "baromabs"
DATA_POINT_BAROMABSIN = "baromabsin"
DATA_POINT_BAROMREL = "baromrel"
DATA_POINT_BAROMRELIN = "baromrelin"
DATA_POINT_CO2 = "co2"
DATA_POINT_DAILYRAIN = "dailyrain"
DATA_POINT_DAILYRAININ = "dailyrainin"
DATA_POINT_DEWPOINT = "dewpoint"
DATA_POINT_EVENTRAIN = "eventrain"
DATA_POINT_EVENTRAININ = "eventrainin"
DATA_POINT_FEELSLIKE = "feelslike"
DATA_POINT_HEATINDEX = "heatindex"
DATA_POINT_HOURLYRAIN = "hourlyrain"
DATA_POINT_HOURLYRAININ = "hourlyrainin"
DATA_POINT_HUMIDITY = "humidity"
DATA_POINT_HUMIDITY1 = "humidity1"
DATA_POINT_HUMIDITY10 = "humidity10"
DATA_POINT_HUMIDITY2 = "humidity2"
DATA_POINT_HUMIDITY3 = "humidity3"
DATA_POINT_HUMIDITY4 = "humidity4"
DATA_POINT_HUMIDITY5 = "humidity5"
DATA_POINT_HUMIDITY6 = "humidity6"
DATA_POINT_HUMIDITY7 = "humidity7"
DATA_POINT_HUMIDITY8 = "humidity8"
DATA_POINT_HUMIDITY9 = "humidity9"
DATA_POINT_HUMIDITYIN = "humidityin"
DATA_POINT_LASTRAIN = "lastrain"
DATA_POINT_LASTRAIN = "lastrain"
DATA_POINT_MAXDAILYGUST = "maxdailygust"
DATA_POINT_MONTHLYRAIN = "monthlyrain"
DATA_POINT_MONTHLYRAININ = "monthlyrainin"
DATA_POINT_PM25 = "pm25"
DATA_POINT_PM25_24H = "pm25_24h"
DATA_POINT_RAINRATE = "rainrate"
DATA_POINT_RAINRATEIN = "rainratein"
DATA_POINT_SOILMOISTURE1 = "soilmoisture1"
DATA_POINT_SOILMOISTURE10 = "soilmoisture10"
DATA_POINT_SOILMOISTURE2 = "soilmoisture2"
DATA_POINT_SOILMOISTURE3 = "soilmoisture3"
DATA_POINT_SOILMOISTURE4 = "soilmoisture4"
DATA_POINT_SOILMOISTURE5 = "soilmoisture5"
DATA_POINT_SOILMOISTURE6 = "soilmoisture6"
DATA_POINT_SOILMOISTURE7 = "soilmoisture7"
DATA_POINT_SOILMOISTURE8 = "soilmoisture8"
DATA_POINT_SOILMOISTURE9 = "soilmoisture9"
DATA_POINT_SOILTEMP1 = "soiltemp1"
DATA_POINT_SOILTEMP10 = "soiltemp10"
DATA_POINT_SOILTEMP10F = "soiltemp10f"
DATA_POINT_SOILTEMP1F = "soiltemp1f"
DATA_POINT_SOILTEMP2 = "soiltemp2"
DATA_POINT_SOILTEMP2F = "soiltemp2f"
DATA_POINT_SOILTEMP3 = "soiltemp3"
DATA_POINT_SOILTEMP3F = "soiltemp3f"
DATA_POINT_SOILTEMP4 = "soiltemp4"
DATA_POINT_SOILTEMP4F = "soiltemp4f"
DATA_POINT_SOILTEMP5 = "soiltemp5"
DATA_POINT_SOILTEMP5F = "soiltemp5f"
DATA_POINT_SOILTEMP6 = "soiltemp6"
DATA_POINT_SOILTEMP6F = "soiltemp6f"
DATA_POINT_SOILTEMP7 = "soiltemp7"
DATA_POINT_SOILTEMP7F = "soiltemp7f"
DATA_POINT_SOILTEMP8 = "soiltemp8"
DATA_POINT_SOILTEMP8F = "soiltemp8f"
DATA_POINT_SOILTEMP9 = "soiltemp9"
DATA_POINT_SOILTEMP9F = "soiltemp9f"
DATA_POINT_SOLARRADIATION = "solarradiation"
DATA_POINT_TEMP = "temp"
DATA_POINT_TEMP1 = "temp1"
DATA_POINT_TEMP10 = "temp10"
DATA_POINT_TEMP10F = "temp10f"
DATA_POINT_TEMP1F = "temp1f"
DATA_POINT_TEMP2 = "temp2"
DATA_POINT_TEMP2F = "temp2f"
DATA_POINT_TEMP3 = "temp3"
DATA_POINT_TEMP3F = "temp3f"
DATA_POINT_TEMP4 = "temp4"
DATA_POINT_TEMP4F = "temp4f"
DATA_POINT_TEMP5 = "temp5"
DATA_POINT_TEMP5F = "temp5f"
DATA_POINT_TEMP6 = "temp6"
DATA_POINT_TEMP6F = "temp6f"
DATA_POINT_TEMP7 = "temp7"
DATA_POINT_TEMP7F = "temp7f"
DATA_POINT_TEMP8 = "temp8"
DATA_POINT_TEMP8F = "temp8f"
DATA_POINT_TEMP9 = "temp9"
DATA_POINT_TEMP9F = "temp9f"
DATA_POINT_TEMPF = "tempf"
DATA_POINT_TEMPIN = "tempin"
DATA_POINT_TEMPINF = "tempinf"
DATA_POINT_TOTALRAIN = "totalrain"
DATA_POINT_TOTALRAININ = "totalrainin"
DATA_POINT_UV = "uv"
DATA_POINT_WEEKLYRAIN = "weeklyrain"
DATA_POINT_WEEKLYRAININ = "weeklyrainin"
DATA_POINT_WINDCHILL = "windchill"
DATA_POINT_WINDDIR = "winddir"
DATA_POINT_WINDGUST = "windgust"
DATA_POINT_WINDGUSTMPH = "windgustmph"
DATA_POINT_WINDSPDMPH_AVG10M = "windspdmph_avg10m"
DATA_POINT_WINDSPDMPH_AVG2M = "windspdmph_avg2m"
DATA_POINT_WINDSPD_AVG10M = "windspd_avg10m"
DATA_POINT_WINDSPD_AVG2M = "windspd_avg2m"
DATA_POINT_WINDSPEED = "windspeed"
DATA_POINT_WINDSPEEDMPH = "windspeedmph"
DATA_POINT_YEARLYRAIN = "yearlyrain"
DATA_POINT_YEARLYRAININ = "yearlyrainin"
# Unit systems:
UNIT_SYSTEM_IMPERIAL = "imperial"
UNIT_SYSTEM_METRIC = "metric"
|
py | 1a47bdd4053aca36705ed4d58e60c21aacfb2da2 |
# coding: utf-8
def write_info(amr):
#import fortranformat as ff
#nout = amr.nout
aexp = amr.aexp
h0 = amr.h0 * 1e-2
rhoc = 1.88e-29
boxlen = 1.0
f = open("info_" + str(nout).zfill(5) + ".txt", 'w')
for name, val in zip(["ncpu", "ndim", "levelmin", "levelmax", "ngridmax", "nstep_coarse"],
[amr.ncpu, amr.ndim, levelmin, amr.nlevelmax, amr.ngridmax, amr.nstep_coarse]):
f.write("{:<12s}={:11d} \n".format(name, val))
f.write("\n")
#lineformat = ff.FortranRecordWriter('(1E23.15)')
scale_d = amr.Om * rhoc * h0**2 / aexp**3
scale_t = aexp**2 / (h0*1e5/3.08e24)
scale_l = aexp* amr.boxlen * 3.08e24/(h0)
for name, val in zip(["boxlen", "time", "aexp", "H0", "omega_m", "omega_l", "omega_k", "omega_b",
"unit_l", "unit_d", "unit_t"],
[boxlen, amr.t, aexp, h0, amr.Om, amr.Ol, amr.Ok, amr.Ob, scale_l, scale_d, scale_t]):
f.write("{:<12s}={:.15E} \n".format(name,val))
f.write("\n")
f.write("ordering type=" + ah.ordering[0].decode("UTF-8"))
f.write("\n DOMAIN ind_min ind_max \n")
for i in range(amr.ncpu):
f.write("{:8d} {:.15E} {:.15E}\n".format(i+1, amr.bound_key[i],amr.bound_key[i+1]))
f.close()
"""
This can generate 'header' of info.
But it is not trivial to read 128-bit floating point (QUADHILBERT) numbers from binary bits in Python.
Instead, I used a fortran program to read amr.00001 and output hilbert keys in the info format.
"""
wdir = "./"
from pyram import load
nouts = range(113, 120)
for nout in nouts:
ah = load.sim.AmrHeader()
snout = str(nout).zfill(5)
ah._read_amr_header(open(wdir + "output_"+snout+"/amr_"+snout+".out00001", 'rb'), skip_header=False)
levelmin = 8 # From other info file
write_info(ah)
|
py | 1a47bf53200ad2ade8f998a405261c686b6d894d | from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from payu.enumerators import Currency, MessagePol, StatePol
from payulatam.settings import payulatam_settings as settings
from payulatam.signals import invalid_notification_received, payment_was_approved, payment_was_declined, \
payment_was_expired, payment_was_flagged, valid_notification_received
from payulatam.utils import get_signature
CURRENCY = tuple(map(lambda x: (x.value, x.value), Currency))
class AbstractAdministrativeSegment(models.Model):
administrative_fee = models.DecimalField(max_digits=64, decimal_places=2, default=0)
administrative_fee_tax = models.DecimalField(max_digits=64, decimal_places=2, default=0)
administrative_fee_base = models.DecimalField(max_digits=64, decimal_places=2, default=0)
class Meta:
abstract = True
class AbstractBankSegment(models.Model):
bank_id = models.CharField(max_length=255)
bank_referenced_name = models.CharField(max_length=100)
error_code_bank = models.CharField(max_length=255)
error_message_bank = models.CharField(max_length=255)
class Meta:
abstract = True
class AbstractBillingSegment(models.Model):
billing_address = models.TextField()
billing_city = models.CharField(max_length=255)
billing_country = models.CharField(max_length=2)
class Meta:
abstract = True
class AbstractCreditCardSegment(models.Model):
cc_number = models.CharField(max_length=100)
cc_holder = models.CharField(max_length=100)
franchise = models.CharField(max_length=100)
installments_number = models.IntegerField()
class Meta:
abstract = True
class AbstractPolSegment(models.Model):
response_code_pol = models.CharField(max_length=255)
response_message_pol = models.CharField(max_length=255)
state_pol = models.CharField(max_length=32)
reference_pol = models.CharField(max_length=255)
commision_pol = models.DecimalField(max_digits=64, decimal_places=2, default=0)
commision_pol_currency = models.CharField(max_length=3)
class Meta:
abstract = True
@property
def is_state_approved(self):
return self.get_state() == StatePol.APPROVED
@property
def is_state_declined(self):
return self.get_state() == StatePol.DECLINED
@property
def is_state_expired(self):
return self.get_state() == StatePol.EXPIRED
@property
def is_approved(self):
"""
Transaction approved
Returns:
"""
return self.get_response_message() == MessagePol.APPROVED
@property
def is_payment_network_rejected(self):
"""
Transaction rejected by financial institution
Returns:
"""
return self.get_response_message() == MessagePol.PAYMENT_NETWORK_REJECTED
@property
def is_entity_declined(self):
"""
Transaction rejected by the bank
Returns:
"""
return self.get_response_message() == MessagePol.ENTITY_DECLINED
@property
def is_insufficient_funds(self):
"""
Insufficient funds
Returns:
"""
return self.get_response_message() == MessagePol.INSUFFICIENT_FUNDS
@property
def is_invalid_card(self):
"""
Invalid card
Returns:
"""
return self.get_response_message() == MessagePol.INVALID_CARD
@property
def is_contact_the_entity(self):
"""
Contact the financial institution
Returns:
"""
return self.get_response_message() == MessagePol.CONTACT_THE_ENTITY
@property
def is_bank_account_activation_error(self):
"""
Automatic debit is not allowed
Returns:
"""
return self.get_response_message() == MessagePol.BANK_ACCOUNT_ACTIVATION_ERROR
@property
def is_bank_account_not_authorized_for_automatic_debit(self):
"""
Automatic debit is not allowed
Returns:
"""
return self.get_response_message() == MessagePol.BANK_ACCOUNT_NOT_AUTHORIZED_FOR_AUTOMATIC_DEBIT
@property
def is_invalid_agency_bank_account(self):
"""
Automatic debit is not allowed
Returns:
"""
return self.get_response_message() == MessagePol.INVALID_AGENCY_BANK_ACCOUNT
@property
def is_invalid_bank_account(self):
"""
Automatic debit is not allowed
Returns:
"""
return self.get_response_message() == MessagePol.INVALID_BANK_ACCOUNT
@property
def is_invalid_invalid_bank(self):
"""
Automatic debit is not allowed
Returns:
"""
return self.get_response_message() == MessagePol.INVALID_BANK
@property
def is_expired_card(self):
"""
Expired card
Returns:
"""
return self.get_response_message() == MessagePol.EXPIRED_CARD
@property
def is_restricted_card(self):
"""
Restricted card
Returns:
"""
return self.get_response_message() == MessagePol.RESTRICTED_CARD
@property
def is_invalid_expiration_date_or_security_code(self):
"""
Invalid expiration date or security code
Returns:
"""
return self.get_response_message() == MessagePol.INVALID_EXPIRATION_DATE_OR_SECURITY_CODE
@property
def is_repeat_transaction(self):
"""
Retry payment
Returns:
"""
return self.get_response_message() == MessagePol.REPEAT_TRANSACTION
@property
def is_invalid_transaction(self):
"""
Invalid transaction
Returns:
"""
return self.get_response_message() == MessagePol.INVALID_TRANSACTION
@property
def is_exceeded_amount(self):
"""
The value exceeds the maximum allowed by the entity
Returns:
"""
return self.get_response_message() == MessagePol.EXCEEDED_AMOUNT
@property
def is_abandoned_transaction(self):
"""
Transaction abandoned by the payer
Returns:
"""
return self.get_response_message() == MessagePol.ABANDONED_TRANSACTION
@property
def is_credit_card_not_authorized_for_internet_transaction(self):
"""
Card not authorized to buy online
Returns:
"""
return self.get_response_message() == MessagePol.CREDIT_CARD_NOT_AUTHORIZED_FOR_INTERNET_TRANSACTIONS
@property
def is_antifraud_rejected(self):
"""
Transaction refused because of suspected fraud
Returns:
"""
return self.get_response_message() == MessagePol.ANTIFRAUD_REJECTED
@property
def is_digital_certificate_not_found(self):
"""
Digital certificate not found
Returns:
"""
return self.get_response_message() == MessagePol.DIGITAL_CERTIFICATE_NOT_FOUND
@property
def is_bank_unreachable(self):
"""
Error trying to communicate with the bank
Returns:
"""
return self.get_response_message() == MessagePol.BANK_UNREACHABLE
@property
def is_payment_network_no_connection(self):
"""
Unable to communicate with the financial institution
Returns:
"""
return self.get_response_message() == MessagePol.PAYMENT_NETWORK_NO_CONNECTION
@property
def is_payment_network_no_response(self):
"""
No response was received from the financial institution
Returns:
"""
return self.get_response_message() == MessagePol.PAYMENT_NETWORK_NO_RESPONSE
@property
def is_entity_messaging_error(self):
"""
Error communicating with the financial institution
Returns:
"""
return self.get_response_message() == MessagePol.ENTITY_MESSAGING_ERROR
@property
def is_not_accepted_transaction(self):
"""
Transaction not permitted
Returns:
"""
return self.get_response_message() == MessagePol.NOT_ACCEPTED_TRANSACTION
@property
def is_internal_payment_provider_error(self):
"""
Error
Returns:
"""
return self.get_response_message() == MessagePol.INTERNAL_PAYMENT_PROVIDER_ERROR
@property
def is_inactive_payment_provider(self):
"""
Error
Returns:
"""
return self.get_response_message() == MessagePol.INACTIVE_PAYMENT_PROVIDER
@property
def is_error(self):
"""
Error
Returns:
"""
return self.get_response_message() == MessagePol.ERROR
@property
def is_error_converting_transactions_amounts(self):
"""
Error
Returns:
"""
return self.get_response_message() == MessagePol.ERROR_CONVERTING_TRANSACTION_AMOUNTS
@property
def is_fix_not_required(self):
"""
Error
Returns:
"""
return self.get_response_message() == MessagePol.FIX_NOT_REQUIRED
@property
def is_automatically_fixed_and_success_reversal(self):
"""
Error
Returns:
"""
return self.get_response_message() == MessagePol.AUTOMATICALLY_FIXED_AND_SUCCESS_REVERSAL
@property
def is_automatically_fixed_and_unsuccess_reversal(self):
"""
Error
Returns:
"""
return self.get_response_message() == MessagePol.AUTOMATICALLY_FIXED_AND_UNSUCCESS_REVERSAL
@property
def is_automatic_fixed_not_supported(self):
"""
Error
Returns:
"""
return self.get_response_message() == MessagePol.AUTOMATIC_FIXED_NOT_SUPPORTED
@property
def is_not_fixed_for_error_state(self):
"""
Error
Returns:
"""
return self.get_response_message() == MessagePol.NOT_FIXED_FOR_ERROR_STATE
@property
def is_error_fixing_and_reversing(self):
"""
Error
Returns:
"""
return self.get_response_message() == MessagePol.ERROR_FIXING_AND_REVERSING
@property
def is_error_fixing_incomplete_data(self):
"""
Error
Returns:
"""
return self.get_response_message() == MessagePol.ERROR_FIXING_INCOMPLETE_DATA
@property
def is_payment_network_bad_response(self):
"""
Error
Returns:
"""
return self.get_response_message() == MessagePol.PAYMENT_NETWORK_BAD_RESPONSE
@property
def is_expired_transaction(self):
"""
Expired transaction
Returns:
"""
return self.get_response_message() == MessagePol.EXPIRED_TRANSACTION
def get_state(self):
try:
return StatePol(self.state_pol)
except ValueError:
return self.state_pol
def get_state_name(self):
state = self.get_state()
return state.name if isinstance(state, StatePol) else state
def get_response_message(self):
try:
return MessagePol(self.response_message_pol)
except ValueError:
return self.response_message_pol
class AbstractPSESegment(models.Model):
cus = models.CharField(max_length=64)
pse_bank = models.CharField(max_length=255)
pse_reference1 = models.CharField(max_length=255)
pse_reference3 = models.CharField(max_length=255)
pse_reference2 = models.CharField(max_length=255)
class Meta:
abstract = True
class AbstractShippingSegment(models.Model):
shipping_address = models.TextField()
shipping_city = models.CharField(max_length=255)
shipping_country = models.CharField(max_length=2)
class Meta:
abstract = True
class AbstractTransactionSegment(models.Model):
transaction_id = models.CharField(max_length=36, db_index=True)
transaction_date = models.DateTimeField()
transaction_bank_id = models.CharField(max_length=255)
class Meta:
abstract = True
def __str__(self):
return self.transaction_id
class AbstractValueSegment(models.Model):
value = models.DecimalField(max_digits=64, decimal_places=2, default=0)
additional_value = models.DecimalField(max_digits=64, decimal_places=2, default=0)
tax = models.DecimalField(max_digits=64, decimal_places=2, default=0)
exchange_rate = models.DecimalField(max_digits=64, decimal_places=2, default=0)
currency = models.CharField(max_length=3, choices=CURRENCY)
class Meta:
abstract = True
class AbstractFlagSegment(models.Model):
DUPLICATE_TRANSACTION = '1001'
INVALID_SIGN = '1002'
FLAG_CODES = (
(DUPLICATE_TRANSACTION, 'Duplicate Transaction'),
(INVALID_SIGN, 'Invalid Sign'),
)
flag = models.BooleanField(default=False)
flag_code = models.CharField(max_length=4, choices=FLAG_CODES)
flag_info = models.CharField(max_length=100)
class Meta:
abstract = True
@property
def is_flagged(self):
return self.flag
def save(self, *args, **kwargs):
exists = PaymentNotification.objects.filter(transaction_id=self.transaction_id).exists()
if not self.id and exists:
self.flag = True
self.flag_code = self.DUPLICATE_TRANSACTION
self.flag_info = 'Duplicate transaction_id. ({})'.format(self.transaction_id)
super().save(*args, **kwargs)
class AbstractPaymentNotification(AbstractAdministrativeSegment,
AbstractBankSegment,
AbstractBillingSegment,
AbstractCreditCardSegment,
AbstractPolSegment,
AbstractPSESegment,
AbstractShippingSegment,
AbstractTransactionSegment,
AbstractValueSegment,
AbstractFlagSegment,
):
payment_method = models.IntegerField()
payment_method_id = models.IntegerField()
payment_method_type = models.IntegerField()
payment_method_name = models.CharField(max_length=255)
payment_request_state = models.CharField(max_length=32)
class Meta:
abstract = True
class PaymentNotification(AbstractPaymentNotification):
reference_sale = models.CharField(max_length=255)
description = models.TextField()
risk = models.DecimalField(max_digits=64, decimal_places=2, default=0, blank=True, null=True)
sign = models.CharField(max_length=255)
email_buyer = models.CharField(max_length=255)
phone = models.CharField(max_length=20)
office_phone = models.CharField(max_length=20)
merchant_id = models.IntegerField()
customer_number = models.IntegerField(blank=True, null=True)
nickname_seller = models.CharField(max_length=150)
nickname_buyer = models.CharField(max_length=150)
antifraud_merchant_id = models.CharField(max_length=100)
airline_code = models.CharField(max_length=4)
authorization_code = models.CharField(max_length=12)
extra1 = models.CharField(max_length=255)
extra2 = models.CharField(max_length=255)
extra3 = models.CharField(max_length=255)
attempts = models.IntegerField()
ip = models.CharField(max_length=39)
date = models.DateTimeField()
test = models.BooleanField()
raw = models.TextField()
date_modified = models.DateTimeField(auto_now=True)
date_created = models.DateTimeField(auto_now_add=True)
class Meta:
db_table = 'payu_payment_notification'
def save(self, *args, **kwargs):
if not self.id:
# Si el segundo decimal del parámetro value es cero, ejemplo: 150.00
# El nuevo valor new_value para generar la firma debe ir con sólo un decimal así: 150.0.
# Si el segundo decimal del parámetro value es diferente a cero, ejemplo: 150.26
# El nuevo valor new_value para generar la firma debe ir con los dos decimales así: 150.26.
value = None
first_decimal = str(self.value).split('.')[-1][0]
if first_decimal == '0':
value = '{}.0'.format(str(self.value).split('.')[0])
sign = get_signature(settings.API_KEY, self.merchant_id, self.reference_sale, value, self.currency,
self.state_pol)
if self.sign != sign:
self.flag = True
self.flag_code = AbstractFlagSegment.INVALID_SIGN
self.flag_info = 'Invalid sign. ({})'.format(self.sign)
super().save(*args, **kwargs)
@receiver(post_save, sender=PaymentNotification)
def payment_notification_save(sender, instance, created, **kwargs):
if created:
if instance.is_flagged:
invalid_notification_received.send(sender=PaymentNotification, instance=instance)
payment_was_flagged.send(sender=PaymentNotification, instance=instance)
return
else:
valid_notification_received.send(sender=PaymentNotification, instance=instance)
if instance.is_state_approved:
payment_was_approved.send(sender=PaymentNotification, instance=instance)
elif instance.is_state_declined:
payment_was_declined.send(sender=PaymentNotification, instance=instance)
elif instance.is_state_expired:
payment_was_expired.send(sender=PaymentNotification, instance=instance)
else:
# TODO raise error
pass
|
py | 1a47c0ae67a6699236b40138f58ff89a54eb324c | from scipy.stats import genpareto, norm
import numpy as np
import gym
from gym import spaces
from gym.utils import seeding
def flip(edge, np_random):
return 1 if np_random.uniform() < edge else -1
class KellyCoinflipEnv(gym.Env):
"""The Kelly coinflip game is a simple gambling introduced by Haghani & Dewey 2016's
'Rational Decision-Making Under Uncertainty: Observed Betting Patterns on a Biased
Coin' (https://papers.ssrn.com/sol3/papers.cfm?abstract_id=2856963), to test human
decision-making in a setting like that of the stock market: positive expected value
but highly stochastic; they found many subjects performed badly, often going broke,
even though optimal play would reach the maximum with ~95% probability. In the
coinflip game, the player starts with $25.00 to gamble over 300 rounds; each round,
they can bet anywhere up to their net worth (in penny increments), and then a coin is
flipped; with P=0.6, the player wins twice what they bet, otherwise, they lose it.
$250 is the maximum players are allowed to have. At the end of the 300 rounds, they
keep whatever they have. The human subjects earned an average of $91; a simple use of
the Kelly criterion (https://en.wikipedia.org/wiki/Kelly_criterion), giving a
strategy of betting 20% until the cap is hit, would earn $240; a decision tree
analysis shows that optimal play earns $246 (https://www.gwern.net/Coin-flip).
The game short-circuits when either wealth = $0 (since one can never recover) or
wealth = cap (trivial optimal play: one simply bets nothing thereafter).
In this implementation, we default to the paper settings of $25, 60% odds, wealth cap
of $250, and 300 rounds. To specify the action space in advance, we multiply the
wealth cap (in dollars) by 100 (to allow for all penny bets); should one attempt to
bet more money than one has, it is rounded down to one's net worth. (Alternately, a
mistaken bet could end the episode immediately; it's not clear to me which version
would be better.) For a harder version which randomizes the 3 key parameters, see the
Generalized Kelly coinflip game."""
metadata = {"render.modes": ["human"]}
def __init__(self, initial_wealth=25.0, edge=0.6, max_wealth=250.0, max_rounds=300):
self.action_space = spaces.Discrete(int(max_wealth * 100)) # betting in penny
# increments
self.observation_space = spaces.Tuple(
(
spaces.Box(0, max_wealth, [1], dtype=np.float32), # (w,b)
spaces.Discrete(max_rounds + 1),
)
)
self.reward_range = (0, max_wealth)
self.edge = edge
self.wealth = initial_wealth
self.initial_wealth = initial_wealth
self.max_rounds = max_rounds
self.max_wealth = max_wealth
self.np_random = None
self.rounds = None
self.seed()
self.reset()
def seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def step(self, action):
bet_in_dollars = min(
action / 100.0, self.wealth
) # action = desired bet in pennies
self.rounds -= 1
coinflip = flip(self.edge, self.np_random)
self.wealth = min(self.max_wealth, self.wealth + coinflip * bet_in_dollars)
done = self.wealth < 0.01 or self.wealth == self.max_wealth or not self.rounds
reward = self.wealth if done else 0.0
return self._get_obs(), reward, done, {}
def _get_obs(self):
return np.array([self.wealth]), self.rounds
def reset(self):
self.rounds = self.max_rounds
self.wealth = self.initial_wealth
return self._get_obs()
def render(self, mode="human"):
print("Current wealth: ", self.wealth, "; Rounds left: ", self.rounds)
class KellyCoinflipGeneralizedEnv(gym.Env):
"""The Generalized Kelly coinflip game is an extension by ArthurB & Gwern Branwen
which expands the Kelly coinflip game MDP into a POMDP, where the 3 key parameters
(edge, maximum wealth, and number of rounds) are unknown random variables drawn
from 3 distributions: a Beta(7,3) for the coinflip edge 0-1, a N(300,25) the total
number of rounds, and a Pareto(5,200) for the wealth cap. These distributions are
chosen to be conjugate & easily updatable, to allow for inference (other choices
like the geometric for number of rounds wouldn't make observations informative),
and to loosely reflect what a human might expect in the original Kelly coinflip
game given that the number of rounds wasn't strictly fixed and they weren't told
the wealth cap until they neared it. With these particular distributions, the
entire history of the game can be summarized into a few sufficient statistics of
rounds-elapsed/wins/losses/max-wealth-ever-reached, from which the Bayes-optimal
decision can (in theory) be made; to avoid all agents having to tediously track
those sufficient statistics manually in the same way, the observation space is
augmented from wealth/rounds-left (rounds-left is deleted because it is a hidden
variable) to current-wealth/rounds-elapsed/wins/losses/maximum-observed-wealth.
The simple Kelly coinflip game can easily be solved by calculating decision trees,
but the Generalized Kelly coinflip game may be intractable (although the analysis
for the edge case alone suggests that the Bayes-optimal value may be very close to
what one would calculate using a decision tree for any specific case), and
represents a good challenge for RL agents."""
metadata = {"render.modes": ["human"]}
def __init__(
self,
initial_wealth=25.0,
edge_prior_alpha=7,
edge_prior_beta=3,
max_wealth_alpha=5.0,
max_wealth_m=200.0,
max_rounds_mean=300.0,
max_rounds_sd=25.0,
reseed=True,
clip_distributions=False,
):
# clip_distributions=True asserts that state and action space are not modified at reset()
# store the hyper-parameters for passing back into __init__() during resets so
# the same hyper-parameters govern the next game's parameters, as the user
# expects:
# TODO: this is boilerplate, is there any more elegant way to do this?
self.initial_wealth = float(initial_wealth)
self.edge_prior_alpha = edge_prior_alpha
self.edge_prior_beta = edge_prior_beta
self.max_wealth_alpha = max_wealth_alpha
self.max_wealth_m = max_wealth_m
self.max_rounds_mean = max_rounds_mean
self.max_rounds_sd = max_rounds_sd
self.clip_distributions = clip_distributions
if reseed or not hasattr(self, "np_random"):
self.seed()
# draw this game's set of parameters:
edge = self.np_random.beta(edge_prior_alpha, edge_prior_beta)
if self.clip_distributions:
# (clip/resample some parameters to be able to fix obs/action space sizes/bounds)
max_wealth_bound = round(
genpareto.ppf(0.85, max_wealth_alpha, max_wealth_m)
)
max_wealth = max_wealth_bound + 1.0
while max_wealth > max_wealth_bound:
max_wealth = round(
genpareto.rvs(
max_wealth_alpha, max_wealth_m, random_state=self.np_random
)
)
max_rounds_bound = int(
round(norm.ppf(0.99, max_rounds_mean, max_rounds_sd))
)
max_rounds = max_rounds_bound + 1
while max_rounds > max_rounds_bound:
max_rounds = int(
round(self.np_random.normal(max_rounds_mean, max_rounds_sd))
)
else:
max_wealth = round(
genpareto.rvs(
max_wealth_alpha, max_wealth_m, random_state=self.np_random
)
)
max_wealth_bound = max_wealth
max_rounds = int(
round(self.np_random.normal(max_rounds_mean, max_rounds_sd))
)
max_rounds_bound = max_rounds
# add an additional global variable which is the sufficient statistic for the
# Pareto distribution on wealth cap; alpha doesn't update, but x_m does, and
# simply is the highest wealth count we've seen to date:
self.max_ever_wealth = float(self.initial_wealth)
# for the coinflip edge, it is total wins/losses:
self.wins = 0
self.losses = 0
# for the number of rounds, we need to remember how many rounds we've played:
self.rounds_elapsed = 0
# the rest proceeds as before:
self.action_space = spaces.Discrete(int(max_wealth_bound * 100))
self.observation_space = spaces.Tuple(
(
spaces.Box(
0, max_wealth_bound, shape=[1], dtype=np.float32
), # current wealth
spaces.Discrete(max_rounds_bound + 1), # rounds elapsed
spaces.Discrete(max_rounds_bound + 1), # wins
spaces.Discrete(max_rounds_bound + 1), # losses
spaces.Box(0, max_wealth_bound, [1], dtype=np.float32),
)
) # maximum observed wealth
self.reward_range = (0, max_wealth)
self.edge = edge
self.wealth = self.initial_wealth
self.max_rounds = max_rounds
self.rounds = self.max_rounds
self.max_wealth = max_wealth
def seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def step(self, action):
bet_in_dollars = min(action / 100.0, self.wealth)
self.rounds -= 1
coinflip = flip(self.edge, self.np_random)
self.wealth = min(self.max_wealth, self.wealth + coinflip * bet_in_dollars)
self.rounds_elapsed += 1
if coinflip:
self.max_ever_wealth = max(self.wealth, self.max_ever_wealth)
self.wins += 1
else:
self.losses += 1
done = self.wealth < 0.01 or self.wealth == self.max_wealth or not self.rounds
reward = self.wealth if done else 0.0
return self._get_obs(), reward, done, {}
def _get_obs(self):
return (
np.array([float(self.wealth)]),
self.rounds_elapsed,
self.wins,
self.losses,
np.array([float(self.max_ever_wealth)]),
)
def reset(self):
# re-init everything to draw new parameters etc, but preserve the RNG for
# reproducibility and pass in the same hyper-parameters as originally specified:
self.__init__(
initial_wealth=self.initial_wealth,
edge_prior_alpha=self.edge_prior_alpha,
edge_prior_beta=self.edge_prior_beta,
max_wealth_alpha=self.max_wealth_alpha,
max_wealth_m=self.max_wealth_m,
max_rounds_mean=self.max_rounds_mean,
max_rounds_sd=self.max_rounds_sd,
reseed=False,
clip_distributions=self.clip_distributions,
)
return self._get_obs()
def render(self, mode="human"):
print(
"Current wealth: ",
self.wealth,
"; Rounds left: ",
self.rounds,
"; True edge: ",
self.edge,
"; True max wealth: ",
self.max_wealth,
"; True stopping time: ",
self.max_rounds,
"; Rounds left: ",
self.max_rounds - self.rounds_elapsed,
)
|
py | 1a47c23049be3cfed20c264e42e63afcec7049d0 | """
Compute the mass transfer rate between two stars in a binary:
$$ \frac{ \dot{P} }{ P } = 3 \dot{M_1} \frac{ M_1 - M_2 }{ M_1 M_2 } $$
P is the period of the binary, M_1 and M_2 are the masses, and dM_1 / dt is the
mass transfer rate.
Copyright 2012, Casey W. Stark. See LICENSE.txt for more information.
"""
# Import the Quantity class
from dimensionful import Quantity, Msun, s, day, yr
# Supply the period, change in period observed over some time, and the masses.
P = Quantity(2.49, day)
dP = Quantity(20, s)
dt = Quantity(100, yr)
M1 = Quantity(2.9, Msun)
M2 = Quantity(1.4, Msun)
# Calculate it and convert to Msun / yr
Mdot = dP * M1 * M2 / (3 * P * dt * (M1 - M2))
Mdot.convert_to(Msun / yr)
# Report
print ""
print "The mass transfer rate is %s." % Mdot
print ""
# prints "The mass transfer rate is 8.38745930223e-07 Msun/yr."
|
py | 1a47c2c1495f581bd041c8cd47c4f70bf498e4e4 | #!/usr/bin/env python
"""
Simple application that logs on to the APIC and displays all
of the Tenants.
Leverages the DevNet Sandbox - APIC Simulator Always On
Information at https://developer.cisco.com/site/devnet/sandbox/available-labs/data-center/index.gsp
Code sample based off the ACI-Toolkit Code sample
https://github.com/datacenter/acitoolkit/blob/master/samples/aci-show-tenants.py
"""
import sys
import acitoolkit.acitoolkit as ACI
# Credentials and information for the DevNet ACI Simulator Always-On Sandbox
APIC_URL = "https://sandboxapicdc.cisco.com/"
APIC_USER = "admin"
APIC_PASSWORD = "C1sco12345"
def main():
"""
Main execution routine
:return: None
"""
# Login to APIC
session = ACI.Session(APIC_URL, APIC_USER, APIC_PASSWORD)
resp = session.login()
if not resp.ok:
print('%% Could not login to APIC')
sys.exit(0)
# Download all of the tenants
print("TENANT")
print("------")
tenants = ACI.Tenant.get(session)
for tenant in tenants:
print(tenant.name)
if __name__ == '__main__':
main()
|
py | 1a47c38409901555d7ff5de943ec7f9b97b2feee | #!/usr/bin/env python
import cv2
import datautils.structures.mp
import montage
from .... import log
from .. import utils
logger = log.get_logger(__name__)
#logger.addHandler(logging.StreamHandler())
#logger.setLevel(logging.DEBUG)
class NormSerf(datautils.structures.mp.TimedSerf):
def setup(self, config, grab_buffers, norm_buffers, bg_buffer):
logger.debug(
"NormSerf[%s] setup: %s, %s, %s, %s",
self, config, grab_buffers, norm_buffers, bg_buffer)
self.config = config
self.image_size = config['crop'][:2]
self.grab_buffers = grab_buffers
self.norm_buffers = norm_buffers
self.bg_buffer = bg_buffer
self.setup_buffers()
if 'log_serfs' in config:
utils.log_serf_to_directory(self, config['log_serfs'])
def set_config(self, config):
logger.debug("NormSerf[%s] set_config: %s", self, config)
self.config = config
def setup_buffers(self):
logger.debug("NormSerf[%s] setup_buffers", self)
h, w, s = self.config['crop']
self.grabs = [
montage.io.Image(utils.buffer_as_array(b, 'u2', (h, w, s)))
for b in self.grab_buffers]
self.norms = [
utils.buffer_as_array(b, 'f4', (h, w)) for b in self.norm_buffers]
self.bg = montage.io.Image(
utils.buffer_as_array(self.bg_buffer, 'f4', (h, w)))
def normalize_grab(self, buffer_index):
logger.debug("NormSerf[%s] normalize_grab: %s", self, buffer_index)
# tests on camera node show cv2 is faster (7 ms vs 12 ms)
cv2.multiply(
self.grabs[buffer_index], self.bg,
self.norms[buffer_index], dtype=cv2.CV_32F)
#self.norms[buffer_index][:, :] = self.grabs[buffer_index] * self.bg
self.send('norm', buffer_index)
class NormLord(datautils.structures.mp.Lord):
def __init__(self, config, buffers):
logger.debug(
"NormLord[%s] __init__: %s, %s", self, config, buffers)
datautils.structures.mp.Lord.__init__(self)
self.config = config
self.buffers = buffers
def start(self, wait=True):
logger.debug("NormLord[%s] start", self)
datautils.structures.mp.Lord.start(
self, NormSerf, (
self.config, self.buffers.grab_buffers,
self.buffers.norm_buffers, self.buffers.bg_buffer), wait=wait)
def set_config(self, config):
logger.debug("NormLord[%s] set_config: %s", self, config)
self.send('set_config', config)
def normalize_grab(self, index):
logger.debug("NormLord[%s] normalize_grab: %s", self, index)
self.buffers.lock_grab(index)
self.send('normalize_grab', index)
def norm(self, index):
logger.debug("NormLord[%s] norm: %s", self, index)
self.buffers.unlock_grab(index)
|
py | 1a47c3a0a1d36b7deff64032fe6cde83f4cef94e | #!/usr/bin/env python
from operator import itemgetter
import sys
current_year = 0
max_temp = 0
temp = 0
# input comes from STDIN
for line in sys.stdin:
# remove leading and trailing whitespace
line = line.strip()
# parse the input we got from mapper.py
year, temp = line.split('\t', 1)
# convert count (currently a string) to int
try:
year = int(year)
temp = int(temp)
except ValueError:
# count was not a number, so silently
# ignore/discard this line
continue
# this IF-switch only works because Hadoop sorts map output
# by key (here: word) before it is passed to the reducer
if current_year == year:
max_temp = max(max_temp, temp)
else:
if current_year != 0:
# write result to STDOUT
print('%d\t%d' % (current_year, max_temp))
max_temp = temp
current_year = year
# do not forget to output the last word if needed!
if current_year == year:
print('%d\t%d' % (current_year, max_temp)) |
py | 1a47c46f2936f41a42d93016550c2de4549809c1 | # GNU MediaGoblin -- federated, autonomous media hosting
# Copyright (C) 2011, 2012 MediaGoblin contributors. See AUTHORS.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
This module contains some Mixin classes for the db objects.
A bunch of functions on the db objects are really more like
"utility functions": They could live outside the classes
and be called "by hand" passing the appropiate reference.
They usually only use the public API of the object and
rarely use database related stuff.
These functions now live here and get "mixed in" into the
real objects.
"""
import uuid
import re
from datetime import datetime
from pytz import UTC
from werkzeug.utils import cached_property
from mediagoblin.media_types import FileTypeNotSupported
from mediagoblin.tools import common, licenses
from mediagoblin.tools.pluginapi import hook_handle
from mediagoblin.tools.text import cleaned_markdown_conversion
from mediagoblin.tools.url import slugify
from mediagoblin.tools.translate import pass_to_ugettext as _
class CommentingMixin:
"""
Mixin that gives classes methods to get and add the comments on/to it
This assumes the model has a "comments" class which is a ForeignKey to the
Collection model. This will hold a Collection of comments which are
associated to this model. It also assumes the model has an "actor"
ForeignKey which points to the creator/publisher/etc. of the model.
NB: This is NOT the mixin for the Comment Model, this is for
other models which support commenting.
"""
def get_comment_link(self):
# Import here to avoid cyclic imports
from mediagoblin.db.models import Comment, GenericModelReference
gmr = GenericModelReference.query.filter_by(
obj_pk=self.id,
model_type=self.__tablename__
).first()
if gmr is None:
return None
link = Comment.query.filter_by(comment_id=gmr.id).first()
return link
def get_reply_to(self):
link = self.get_comment_link()
if link is None or link.target_id is None:
return None
return link.target()
def soft_delete(self, *args, **kwargs):
link = self.get_comment_link()
if link is not None:
link.delete()
super().soft_delete(*args, **kwargs)
class GeneratePublicIDMixin:
"""
Mixin that ensures that a the public_id field is populated.
The public_id is the ID that is used in the API, this must be globally
unique and dereferencable. This will be the URL for the API view of the
object. It's used in several places, not only is it used to give out via
the API but it's also vital information stored when a soft_deletion occurs
on the `Graveyard.public_id` field, this is needed to follow the spec which
says we have to be able to provide a shell of an object and return a 410
(rather than a 404) when a deleted object has been deleted.
This requires a the urlgen off the request object (`request.urlgen`) to be
provided as it's the ID is a URL.
"""
def get_public_id(self, urlgen):
# Verify that the class this is on actually has a public_id field...
if "public_id" not in self.__table__.columns.keys():
raise Exception("Model has no public_id field")
# Great! the model has a public id, if it's None, let's create one!
if self.public_id is None:
# We need the internal ID for this so ensure we've been saved.
self.save(commit=False)
# Create the URL
self.public_id = urlgen(
"mediagoblin.api.object",
object_type=self.object_type,
id=str(uuid.uuid4()),
qualified=True
)
self.save()
return self.public_id
class UserMixin:
object_type = "person"
@property
def bio_html(self):
return cleaned_markdown_conversion(self.bio)
def url_for_self(self, urlgen, **kwargs):
"""Generate a URL for this User's home page."""
return urlgen('mediagoblin.user_pages.user_home',
user=self.username, **kwargs)
class GenerateSlugMixin:
"""
Mixin to add a generate_slug method to objects.
Depends on:
- self.slug
- self.title
- self.check_slug_used(new_slug)
"""
def generate_slug(self):
"""
Generate a unique slug for this object.
This one does not *force* slugs, but usually it will probably result
in a niceish one.
The end *result* of the algorithm will result in these resolutions for
these situations:
- If we have a slug, make sure it's clean and sanitized, and if it's
unique, we'll use that.
- If we have a title, slugify it, and if it's unique, we'll use that.
- If we can't get any sort of thing that looks like it'll be a useful
slug out of a title or an existing slug, bail, and don't set the
slug at all. Don't try to create something just because. Make
sure we have a reasonable basis for a slug first.
- If we have a reasonable basis for a slug (either based on existing
slug or slugified title) but it's not unique, first try appending
the entry's id, if that exists
- If that doesn't result in something unique, tack on some randomly
generated bits until it's unique. That'll be a little bit of junk,
but at least it has the basis of a nice slug.
"""
#Is already a slug assigned? Check if it is valid
if self.slug:
slug = slugify(self.slug)
# otherwise, try to use the title.
elif self.title:
# assign slug based on title
slug = slugify(self.title)
else:
# We don't have any information to set a slug
return
# We don't want any empty string slugs
if slug == "":
return
# Otherwise, let's see if this is unique.
if self.check_slug_used(slug):
# It looks like it's being used... lame.
# Can we just append the object's id to the end?
if self.id:
slug_with_id = "{}-{}".format(slug, self.id)
if not self.check_slug_used(slug_with_id):
self.slug = slug_with_id
return # success!
# okay, still no success;
# let's whack junk on there till it's unique.
slug += '-' + uuid.uuid4().hex[:4]
# keep going if necessary!
while self.check_slug_used(slug):
slug += uuid.uuid4().hex[:4]
# self.check_slug_used(slug) must be False now so we have a slug that
# we can use now.
self.slug = slug
class MediaEntryMixin(GenerateSlugMixin, GeneratePublicIDMixin):
def check_slug_used(self, slug):
# import this here due to a cyclic import issue
# (db.models -> db.mixin -> db.util -> db.models)
from mediagoblin.db.util import check_media_slug_used
return check_media_slug_used(self.actor, slug, self.id)
@property
def object_type(self):
""" Converts media_type to pump-like type - don't use internally """
return self.media_type.split(".")[-1]
@property
def description_html(self):
"""
Rendered version of the description, run through
Markdown and cleaned with our cleaning tool.
"""
return cleaned_markdown_conversion(self.description)
def get_display_media(self):
"""Find the best media for display.
We try checking self.media_manager.fetching_order if it exists to
pull down the order.
Returns:
(media_size, media_path)
or, if not found, None.
"""
fetch_order = self.media_manager.media_fetch_order
# No fetching order found? well, give up!
if not fetch_order:
return None
media_sizes = self.media_files.keys()
for media_size in fetch_order:
if media_size in media_sizes:
return media_size, self.media_files[media_size]
def get_all_media(self):
"""
Returns all available qualties of a media (except original)
"""
fetch_order = self.media_manager.media_fetch_order
# No fetching order found? well, give up!
if not fetch_order:
return None
media_sizes = self.media_files.keys()
all_media_path = []
for media_size in fetch_order:
if media_size in media_sizes and media_size != 'original':
file_metadata = self.get_file_metadata(media_size)
size = file_metadata['medium_size']
if media_size != 'webm_video':
all_media_path.append((media_size[5:], size,
self.media_files[media_size]))
else:
all_media_path.append(('default', size,
self.media_files[media_size]))
return all_media_path
def main_mediafile(self):
pass
@property
def slug_or_id(self):
if self.slug:
return self.slug
else:
return 'id:%s' % self.id
def url_for_self(self, urlgen, **extra_args):
"""
Generate an appropriate url for ourselves
Use a slug if we have one, else use our 'id'.
"""
uploader = self.get_actor
return urlgen(
'mediagoblin.user_pages.media_home',
user=uploader.username,
media=self.slug_or_id,
**extra_args)
@property
def thumb_url(self):
"""Return the thumbnail URL (for usage in templates)
Will return either the real thumbnail or a default fallback icon."""
# TODO: implement generic fallback in case MEDIA_MANAGER does
# not specify one?
if 'thumb' in self.media_files:
thumb_url = self._app.public_store.file_url(
self.media_files['thumb'])
else:
# No thumbnail in media available. Get the media's
# MEDIA_MANAGER for the fallback icon and return static URL
# Raises FileTypeNotSupported in case no such manager is enabled
manager = self.media_manager
thumb_url = self._app.staticdirector(manager['default_thumb'])
return thumb_url
@property
def original_url(self):
""" Returns the URL for the original image
will return self.thumb_url if original url doesn't exist"""
if "original" not in self.media_files:
return self.thumb_url
return self._app.public_store.file_url(
self.media_files["original"]
)
@property
def icon_url(self):
'''Return the icon URL (for usage in templates) if it exists'''
try:
return self._app.staticdirector(
self.media_manager['type_icon'])
except AttributeError:
return None
@cached_property
def media_manager(self):
"""Returns the MEDIA_MANAGER of the media's media_type
Raises FileTypeNotSupported in case no such manager is enabled
"""
manager = hook_handle(('media_manager', self.media_type))
if manager:
return manager(self)
# Not found? Then raise an error
raise FileTypeNotSupported(
"MediaManager not in enabled types. Check media_type plugins are"
" enabled in config?")
def get_fail_exception(self):
"""
Get the exception that's appropriate for this error
"""
if self.fail_error:
try:
return common.import_component(self.fail_error)
except ImportError:
# TODO(breton): fail_error should give some hint about why it
# failed. fail_error is used as a path to import().
# Unfortunately, I didn't know about that and put general error
# message there. Maybe it's for the best, because for admin,
# we could show even some raw python things. Anyway, this
# should be properly resolved. Now we are in a freeze, that's
# why I simply catch ImportError.
return None
def get_license_data(self):
"""Return license dict for requested license"""
return licenses.get_license_by_url(self.license or "")
def exif_display_iter(self):
if not self.media_data:
return
exif_all = self.media_data.get("exif_all")
for key in exif_all:
label = re.sub('(.)([A-Z][a-z]+)', r'\1 \2', key)
yield label.replace('EXIF', '').replace('Image', ''), exif_all[key]
def exif_display_data_short(self):
"""Display a very short practical version of exif info"""
if not self.media_data:
return
exif_all = self.media_data.get("exif_all")
exif_short = {}
if 'Image DateTimeOriginal' in exif_all:
# format date taken
takendate = datetime.strptime(
exif_all['Image DateTimeOriginal']['printable'],
'%Y:%m:%d %H:%M:%S').date()
taken = takendate.strftime('%B %d %Y')
exif_short.update({'Date Taken': taken})
aperture = None
if 'EXIF FNumber' in exif_all:
fnum = str(exif_all['EXIF FNumber']['printable']).split('/')
# calculate aperture
if len(fnum) == 2:
aperture = "f/%.1f" % (float(fnum[0])/float(fnum[1]))
elif fnum[0] != 'None':
aperture = "f/%s" % (fnum[0])
if aperture:
exif_short.update({'Aperture': aperture})
short_keys = [
('Camera', 'Image Model', None),
('Exposure', 'EXIF ExposureTime', lambda x: '%s sec' % x),
('ISO Speed', 'EXIF ISOSpeedRatings', None),
('Focal Length', 'EXIF FocalLength', lambda x: '%s mm' % x)]
for label, key, fmt_func in short_keys:
try:
val = fmt_func(exif_all[key]['printable']) if fmt_func \
else exif_all[key]['printable']
exif_short.update({label: val})
except KeyError:
pass
return exif_short
class TextCommentMixin(GeneratePublicIDMixin):
object_type = "comment"
@property
def content_html(self):
"""
the actual html-rendered version of the comment displayed.
Run through Markdown and the HTML cleaner.
"""
return cleaned_markdown_conversion(self.content)
def __unicode__(self):
return '<{klass} #{id} {actor} "{comment}">'.format(
klass=self.__class__.__name__,
id=self.id,
actor=self.get_actor,
comment=self.content)
def __repr__(self):
return '<{klass} #{id} {actor} "{comment}">'.format(
klass=self.__class__.__name__,
id=self.id,
actor=self.get_actor,
comment=self.content)
class CollectionMixin(GenerateSlugMixin, GeneratePublicIDMixin):
object_type = "collection"
def check_slug_used(self, slug):
# import this here due to a cyclic import issue
# (db.models -> db.mixin -> db.util -> db.models)
from mediagoblin.db.util import check_collection_slug_used
return check_collection_slug_used(self.actor, slug, self.id)
@property
def description_html(self):
"""
Rendered version of the description, run through
Markdown and cleaned with our cleaning tool.
"""
return cleaned_markdown_conversion(self.description)
@property
def slug_or_id(self):
return (self.slug or self.id)
def url_for_self(self, urlgen, **extra_args):
"""
Generate an appropriate url for ourselves
Use a slug if we have one, else use our 'id'.
"""
creator = self.get_actor
return urlgen(
'mediagoblin.user_pages.user_collection',
user=creator.username,
collection=self.slug_or_id,
**extra_args)
def add_to_collection(self, obj, content=None, commit=True):
""" Adds an object to the collection """
# It's here to prevent cyclic imports
from mediagoblin.db.models import CollectionItem
# Need the ID of this collection for this so check we've got one.
self.save(commit=False)
# Create the CollectionItem
item = CollectionItem()
item.collection = self.id
item.get_object = obj
if content is not None:
item.note = content
self.num_items = self.num_items + 1
# Save both!
self.save(commit=commit)
item.save(commit=commit)
return item
class CollectionItemMixin:
@property
def note_html(self):
"""
the actual html-rendered version of the note displayed.
Run through Markdown and the HTML cleaner.
"""
return cleaned_markdown_conversion(self.note)
class ActivityMixin(GeneratePublicIDMixin):
object_type = "activity"
VALID_VERBS = ["add", "author", "create", "delete", "dislike", "favorite",
"follow", "like", "post", "share", "unfavorite", "unfollow",
"unlike", "unshare", "update", "tag"]
def get_url(self, request):
return request.urlgen(
"mediagoblin.user_pages.activity_view",
username=self.get_actor.username,
id=self.id,
qualified=True
)
def generate_content(self):
""" Produces a HTML content for object """
# some of these have simple and targetted. If self.target it set
# it will pick the targetted. If they DON'T have a targetted version
# the information in targetted won't be added to the content.
verb_to_content = {
"add": {
"simple" : _("{username} added {object}"),
"targetted": _("{username} added {object} to {target}"),
},
"author": {"simple": _("{username} authored {object}")},
"create": {"simple": _("{username} created {object}")},
"delete": {"simple": _("{username} deleted {object}")},
"dislike": {"simple": _("{username} disliked {object}")},
"favorite": {"simple": _("{username} favorited {object}")},
"follow": {"simple": _("{username} followed {object}")},
"like": {"simple": _("{username} liked {object}")},
"post": {
"simple": _("{username} posted {object}"),
"targetted": _("{username} posted {object} to {target}"),
},
"share": {"simple": _("{username} shared {object}")},
"unfavorite": {"simple": _("{username} unfavorited {object}")},
"unfollow": {"simple": _("{username} stopped following {object}")},
"unlike": {"simple": _("{username} unliked {object}")},
"unshare": {"simple": _("{username} unshared {object}")},
"update": {"simple": _("{username} updated {object}")},
"tag": {"simple": _("{username} tagged {object}")},
}
object_map = {
"image": _("an image"),
"comment": _("a comment"),
"collection": _("a collection"),
"video": _("a video"),
"audio": _("audio"),
"person": _("a person"),
}
obj = self.object()
target = None if self.target_id is None else self.target()
actor = self.get_actor
content = verb_to_content.get(self.verb, None)
if content is None or self.object is None:
return
# Decide what to fill the object with
if hasattr(obj, "title") and obj.title.strip(" "):
object_value = obj.title
elif obj.object_type in object_map:
object_value = object_map[obj.object_type]
else:
object_value = _("an object")
# Do we want to add a target (indirect object) to content?
if target is not None and "targetted" in content:
if hasattr(target, "title") and target.title.strip(" "):
target_value = target.title
elif target.object_type in object_map:
target_value = object_map[target.object_type]
else:
target_value = _("an object")
self.content = content["targetted"].format(
username=actor.username,
object=object_value,
target=target_value
)
else:
self.content = content["simple"].format(
username=actor.username,
object=object_value
)
return self.content
def serialize(self, request):
href = request.urlgen(
"mediagoblin.api.object",
object_type=self.object_type,
id=self.id,
qualified=True
)
published = UTC.localize(self.published)
updated = UTC.localize(self.updated)
obj = {
"id": href,
"actor": self.get_actor.serialize(request),
"verb": self.verb,
"published": published.isoformat(),
"updated": updated.isoformat(),
"content": self.content,
"url": self.get_url(request),
"object": self.object().serialize(request),
"objectType": self.object_type,
"links": {
"self": {
"href": href,
},
},
}
if self.generator:
obj["generator"] = self.get_generator.serialize(request)
if self.title:
obj["title"] = self.title
if self.target_id is not None:
obj["target"] = self.target().serialize(request)
return obj
def unseralize(self, data):
"""
Takes data given and set it on this activity.
Several pieces of data are not written on because of security
reasons. For example changing the author or id of an activity.
"""
if "verb" in data:
self.verb = data["verb"]
if "title" in data:
self.title = data["title"]
if "content" in data:
self.content = data["content"]
|
py | 1a47c54ac7be71c746281b92da35c9a70ba5e52c | import requests
import re
import pytesseract
from PIL import Image
def getPage(baseUrl):
r = requests.get(baseUrl)
if r.status_code != 200:
print("Page does not seem to be online. Could you double check it?")
return r.text
def searchHackWords(content):
comp = re.compile('h[a4]ck[e3]d', re.IGNORECASE)
res = comp.findall(content)
if bool(res):
return res
return None
def checkTextDefacement(baseUrl):
content = getPage(baseUrl)
res = searchHackWords(content)
return res
def checkImgDefacement(baseUrl):
im = Image.open("/home/valle/Downloads/4.jpg")
text = pytesseract.image_to_string(im)
res = searchHackWords(text)
return res
def checkDefacement(baseUrl):
resTxt = checkTextDefacement(baseUrl)
resImg = checkImgDefacement(baseUrl)
if resTxt or resImg:
print("### Possibly hacked ###")
print("Matched terms:")
if resTxt:
for t in resTxt:
print("\t"+t)
if resImg:
for t in resImg:
print("\t"+t)
else:
print("This page seems to be clean")
|
py | 1a47c63b95aa7ca28d4be38c8712fea7ad3dcb61 | # -*- coding: utf-8 -*-
"""
Created on Thu Mar 15 14:00:36 2018
@author: Eric
"""
import glob
import random
import pandas as pd
import numpy as np
def more_work(n, user):
all_txt = glob.glob("*.txt")
all_pmc_files = set()
# Gets all of the files that we have done into a set
for txt in all_txt:
file_data = np.genfromtxt(txt, dtype = int)
for pmc_file in file_data:
all_pmc_files.add(pmc_file)
# Gets all of the PMC files
pmcs = glob.glob("..\\..\\annotation_test\\*.html")
# Determines which PMC files we haven't done, and creates a list of N of them
to_do = []
for file in pmcs:
loc = '..\\..\\annotation_test\\PMC'
file_num = int(file.replace(loc, "").replace(".html", "")) # trim the file to have just the number
if not(file_num in all_pmc_files):
to_do.append(file_num)
5
random.shuffle(to_do)
to_do = to_do[:n]
for i in range(len(to_do)):
print(to_do[i])
loc = ".//ordering_list_" + user + ".txt"
done = np.loadtxt(loc, dtype = int)
new_list = np.append(done, to_do)
np.savetxt(loc, new_list)
|
py | 1a47c68b9aef7c023d626b33af061e139ae447ea | from collections import OrderedDict
class ParseError(ValueError):
pass
class WpaSupplicantConf:
"""This class parses a wpa_supplicant configuration file, allows
manipulation of the configured networks and then writing out of
the updated file.
WARNING: Although care has been taken to preserve ordering,
comments will be lost for any wpa_supplicant.conf which is
round-tripped through this class.
"""
def __init__(self, lines):
self._fields = OrderedDict()
self._networks = OrderedDict()
network = None
for line in lines:
line = line.strip()
if not line or line.startswith('#'):
continue
if line == "}":
if network is None:
raise ParseError("unxpected '}'")
ssid = network.pop('ssid', None)
if ssid is None:
raise ParseError('missing "ssid" for network')
self._networks[dequote(ssid)] = network
network = None
continue
parts = [x.strip() for x in line.split('=', 1)]
if len(parts) != 2:
raise ParseError("invalid line: %{!r}".format(line))
left, right = parts
if right == '{':
if left != 'network':
raise ParseError('unsupported section: "{}"'.format(left))
if network is not None:
raise ParseError("can't nest networks")
network = OrderedDict()
else:
if network is None:
self._fields[left] = right
else:
network[left] = right
def fields(self):
return self._fields
def networks(self):
return self._networks
def add_network(self, ssid, **attrs):
self._networks[ssid] = attrs
def remove_network(self, ssid):
self._networks.pop(ssid, None)
def write(self, f):
for name, value in self._fields.items():
f.write("{}={}\n".format(name, value))
for ssid, info in self._networks.items():
f.write("\nnetwork={\n")
f.write(' ssid="{}"\n'.format(ssid))
for name, value in info.items():
f.write(" {}={}\n".format(name, value))
f.write("}\n")
def dequote(v):
if len(v) < 2:
return v
if v.startswith('"') and v.endswith('"'):
return v[1:-1]
return v
|
py | 1a47c70f5cdea9badc1580d45b8a33299c1f3be6 | import logging
import socket
log = logging.getLogger(__name__)
POLICY = (
'<cross-domain-policy><allow-access-from domain="*" '
'to-ports="*" /></cross-domain-policy>\0'
)
POLICYREQUEST = "<policy-file-request/>"
def client_handle(sock, address):
log.info("%s:%s: Connection accepted." % address)
sock.settimeout(3)
try:
input_data = sock.recv(128)
if input_data.startswith(POLICYREQUEST):
sock.sendall(POLICY)
log.info("%s:%s: Policy sent. Closing connection." % address)
else:
log.info("%s:%s: Wrong payload. Closing connection." % address)
except socket.timeout:
log.info("%s:%s: Timeout" % address)
sock.close()
|
py | 1a47c8318194d3c5f54d15981f64e514e12cb302 | import os
import subprocess
import logging
log = logging.getLogger('grocer-utils')
log.setLevel(logging.INFO)
ch = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
log.addHandler(ch)
def foodcritic(fc_bin, path, fc_strict=False):
"""
Execute foodcritic
:rtype : tuple
:param fc_bin: path to food critic binary
:param path: dir path to exectue FC on
:param fc_strict: bool. true if foodcritic should fail if any of the checks do not pass
:return: tpl. output, errors, returncode
"""
if fc_strict:
cmd = '{0} -f any {1}'.format(fc_bin, path)
else:
cmd = '{0} {1}'.format(fc_bin, path)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, shell=True)
output, errors = p.communicate()
return output, errors, p.returncode
def rubocop(rubocop_bin, path):
"""
Execute rubocop
:rtype : tuple
:param rubocop_bin: path to food critic binary
:param path: dir path to exectue rubocop on
:return: tpl. output, errors, returncode
"""
cmd = '{0} {1}'.format(rubocop_bin, path)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, shell=True)
output, errors = p.communicate()
return output, errors, p.returncode
def berks(berks_bin, path, action='update'):
"""
Execute various berks commands
:rtype : tuple
:param berks_bin: path to berks bin
:param path: path to change directory to before running berks commands (berks is a dir context aware tool)
:param action: berks action to run, e.g. berks install
:return: tpl. output, errors, returncode
"""
cmd = 'cd {0} && {1} {2}'.format(path, berks_bin, action)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, shell=True)
output, errors = p.communicate()
return output, errors, p.returncode
def ruby_syntax(ruby_bin, path):
"""
Check ruby syntax using ruby interpreter -c flag
:rtype : tuple
:param ruby_bin: path to ruby bin
:param path: file path to ruby code to check
:return: tpl. output, errors, returncode
"""
cmd = '{0} -c {1}'.format(ruby_bin, path)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, shell=True)
output, errors = p.communicate()
return output, errors, p.returncode
def chefspec(chefspec_bin, path):
raise NotImplementedError
def get_file_types(dir_path):
"""
Get the files in a directory based on type
:rtype : tuple
:param dir_path: str. path to directory to search
:return: 4-part tuple. ruby_files, json_files, md_files, other_type
"""
ruby_files = []
json_files = []
md_files = []
other_type = []
for root, dirs, files in os.walk(dir_path):
if "git" in root:
pass
else:
for _file in files:
if _file[-3:] == '.rb':
ruby_files.append(os.path.join(root,_file))
elif _file[-5:] == '.json':
json_files.append(os.path.join(root,_file))
elif _file[-3:] == '.md':
md_files.append(os.path.join(root,_file))
else:
other_type.append(_file)
return ruby_files, json_files, md_files, other_type
def rspec_test(rspec_bin, path):
"""
excute rspec tests
:param rspec_bin: path to rspec bin
:param path: dir path to recipe dir root
:return: tpl. output, errors, returncode
"""
path = os.path.join(path,'test/integration/default')
if not os.path.isdir(path):
return "No rspec tests found in {0}".format(path), None, 0
cmd = '{0} -c {1}/*'.format(rspec_bin, path)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, shell=True)
output, errors = p.communicate()
return output, errors, p.returncode
|
py | 1a47c894d0761a2fe0dda8278f5bc66297d90bea | """
Characters
Characters are (by default) Objects setup to be puppeted by Players.
They are what you "see" in game. The Character class in this module
is setup to be the "default" character type created by the default
creation commands.
"""
from evennia.objects.objects import DefaultCharacter
from typeclasses.mixins import MsgMixins, ObjectMixins, NameMixins
from typeclasses.wearable.mixins import UseEquipmentMixins
from world.msgs.messagehandler import MessageHandler
from world.msgs.languagehandler import LanguageHandler
from evennia.utils.utils import lazy_property, variable_from_module
import time
from world.stats_and_skills import do_dice_check
class Character(UseEquipmentMixins, NameMixins, MsgMixins, ObjectMixins, DefaultCharacter):
"""
The Character defaults to reimplementing some of base Object's hook methods with the
following functionality:
at_basetype_setup - always assigns the DefaultCmdSet to this object type
(important!)sets locks so character cannot be picked up
and its commands only be called by itself, not anyone else.
(to change things, use at_object_creation() instead).
at_after_move - Launches the "look" command after every move.
at_post_unpuppet(player) - when Player disconnects from the Character, we
store the current location in the pre_logout_location Attribute and
move it to a None-location so the "unpuppeted" character
object does not need to stay on grid. Echoes "Player has disconnected"
to the room.
at_pre_puppet - Just before Player re-connects, retrieves the character's
pre_logout_location Attribute and move it back on the grid.
at_post_puppet - Echoes "PlayerName has entered the game" to the room.
"""
def at_object_creation(self):
"""
Called once, when this object is first created.
"""
# setting up custom attributes for ArxMUSH
# BriefMode is for toggling brief descriptions from rooms
self.db.briefmode = False
self.db.gender = "Female"
self.db.age = 20
self.db.concept = "None"
self.db.fealty = "None"
self.db.marital_status = "single"
self.db.family = "None"
self.db.dice_string = "Default Dicestring"
self.db.health_status = "alive"
self.db.sleep_status = "awake"
self.db.skills = {}
self.db.abilities = {}
self.at_init()
self.locks.add("delete:perm(Immortals);tell:all()")
@property
def is_character(self):
return True
@lazy_property
def messages(self):
return MessageHandler(self)
@lazy_property
def languages(self):
return LanguageHandler(self)
def at_after_move(self, source_location, **kwargs):
"""
Hook for after movement. Look around, with brief determining how much detail we get.
:param source_location: Room
:return:
"""
table = self.db.sitting_at_table
if table and source_location != self.location:
table.leave(self)
if self.db.briefmode:
string = ""
# handle cases of self.location being None or not a Room object
try:
string = "{c%s{n" % self.location.name
string += self.location.return_contents(self, show_places=False)
string += self.location.event_string()
except AttributeError:
pass
self.msg(string)
else:
self.msg(self.at_look(self.location))
if self.ndb.waypoint:
traversed = self.ndb.traversed or []
try:
traversed.append(source_location.id)
except AttributeError:
pass
self.ndb.traversed = list(set(traversed))
if self.location == self.ndb.waypoint:
self.msg("You have reached your destination.")
self.ndb.waypoint = None
self.ndb.traversed = []
return
dirs = self.get_directions(self.ndb.waypoint)
if dirs:
self.msg("You sense your destination lies through the %s." % dirs)
else:
self.msg("You've lost track of how to get to your destination.")
self.ndb.waypoint = None
self.ndb.traversed = []
if self.ndb.following and self.ndb.following.location != self.location:
self.stop_follow()
if self.db.room_title:
self.attributes.remove("room_title")
if self.combat.combat and self in self.combat.combat.ndb.observers:
self.combat.combat.remove_observer(self)
if self.location:
self.location.triggerhandler.check_room_entry_triggers(self)
def return_appearance(self, pobject, detailed=False, format_desc=False, show_contents=False):
"""
This is a convenient hook for a 'look'
command to call.
"""
if not pobject:
return
# get and identify all objects
if pobject is self or pobject.check_permstring("builders"):
detailed = True
strip_ansi = pobject.db.stripansinames
string = "{c%s{n" % self.get_fancy_name()
# Health appearance will also determine whether we
# use an alternate appearance if we are dead.
health_appearance = self.get_health_appearance()
# desc is our current appearance, can be fake. self.perm_desc is 'true' form
desc = self.desc
# to do: check to see through disguises
if strip_ansi:
try:
from evennia.utils.ansi import parse_ansi
desc = parse_ansi(desc, strip_ansi=True)
except (AttributeError, ValueError, TypeError, UnicodeDecodeError):
pass
script = self.appearance_script
if desc:
extras = self.return_extras(pobject)
if extras:
extras += "\n"
string += "\n\n%s%s" % (extras, desc)
if script:
scent = script.db.scent
if scent:
string += "\n\n%s{n" % scent
if health_appearance:
string += "\n\n%s" % health_appearance
string += self.return_contents(pobject, detailed, strip_ansi=strip_ansi)
return string
@property
def species(self):
return self.db.species or "Human"
@property
def appearance_script(self):
scripts = self.scripts.get("Appearance")
if scripts:
return scripts[0]
def return_extras(self, pobject):
"""
Return a string from glancing at us
:param pobject: Character
:return:
"""
mask = self.db.mask
if not mask:
hair = self.db.haircolor or ""
eyes = self.db.eyecolor or ""
skin = self.db.skintone or ""
height = self.db.height or ""
species = self.species
gender = self.db.gender or ""
age = self.db.age
else:
hair = mask.db.haircolor or "--"
eyes = mask.db.eyecolor or "--"
skin = mask.db.skintone or "--"
height = mask.db.height or "--"
species = mask.db.species or "--"
gender = mask.db.gender or "--"
age = mask.db.age or "--"
hair = hair.capitalize()
eyes = eyes.capitalize()
skin = skin.capitalize()
gender = gender.capitalize()
if pobject.check_permstring("builders"):
true_age = self.db.real_age
if true_age and true_age != age:
pobject.msg("{wThis true age is:{n %s" % true_age)
string = """
{w.---------------------->Physical Characteristics<---------------------.{n
{w| |{n
{w| Species:{n %(species)-14s {wGender:{n %(gender)-15s {wAge:{n %(age)-15s{w|{n
{w| Height:{n %(height)-15s {wEye Color:{n %(eyes)-15s {w|{n
{w| Hair Color:{n %(hair)-11s {wSkin Tone:{n %(skin)-17s {w|{n
{w.---------------------------------------------------------------------.{n
""" % ({'species': species, 'hair': hair, 'eyes': eyes, 'height': height, 'gender': gender, 'age': age, 'skin': skin})
return string
def death_process(self, *args, **kwargs):
"""
This object dying. Set its state to dead, send out
death message to location. Add death commandset.
"""
if self.db.health_status == "dead":
return
self.db.health_status = "dead"
self.db.container = True
if self.location:
self.location.msg_contents("{r%s has died.{n" % self.name)
try:
from commands.cmdsets import death
cmds = death.DeathCmdSet
if cmds.key not in [ob.key for ob in self.cmdset.all()]:
self.cmdset.add(cmds, permanent=True)
except Exception as err:
print("<<ERROR>>: Error when importing death cmdset: %s" % err)
from server.utils.arx_utils import inform_staff
if not self.db.npc:
inform_staff("{rDeath{n: Character {c%s{n has died." % self.key)
def resurrect(self, *args, **kwargs):
"""
Cue 'Bring Me Back to Life' by Evanessence.
"""
self.db.health_status = "alive"
self.db.container = False
if self.location:
self.location.msg_contents("{w%s has returned to life.{n" % self.name)
try:
from commands.cmdsets import death
self.cmdset.delete(death.DeathCmdSet)
except Exception as err:
print("<<ERROR>>: Error when importing mobile cmdset: %s" % err)
# we'll also be asleep when we're dead, so that we're resurrected unconscious if we're brought back
self.fall_asleep(uncon=True, quiet=True)
def fall_asleep(self, uncon=False, quiet=False, verb=None, **kwargs):
"""
Falls asleep. Uncon flag determines if this is regular sleep,
or unconsciousness.
"""
reason = " is %s and" % verb if verb else ""
if uncon:
self.db.sleep_status = "unconscious"
else:
self.db.sleep_status = "asleep"
if self.location and not quiet:
self.location.msg_contents("%s%s falls %s." % (self.name, reason, self.db.sleep_status))
try:
from commands.cmdsets import sleep
cmds = sleep.SleepCmdSet
if cmds.key not in [ob.key for ob in self.cmdset.all()]:
self.cmdset.add(cmds, permanent=True)
except Exception as err:
print("<<ERROR>>: Error when importing sleep cmdset: %s" % err)
@property
def conscious(self):
return ((self.db.sleep_status != "unconscious" and self.db.sleep_status != "asleep")
and self.db.health_status != "dead")
def wake_up(self, quiet=False):
"""
Wakes up.
"""
if self.db.health_status == "dead":
return
if self.location:
if not quiet and not self.conscious:
self.location.msg_contents("%s wakes up." % self.name)
try:
from commands.cmdsets import sleep
self.cmdset.delete(sleep.SleepCmdSet)
except Exception as err:
print("<<ERROR>>: Error when importing mobile cmdset: %s" % err)
self.db.sleep_status = "awake"
return
def recovery_test(self, diff_mod=0, free=False):
"""
A mechanism for healing characters. Whenever they get a recovery
test, they heal the result of a willpower+stamina roll, against
a base difficulty of 0. diff_mod can change that difficulty value,
and with a higher difficulty can mean it can heal a negative value,
resulting in the character getting worse off. We go ahead and change
the player's health now, but leave the result of the roll in the
caller's hands to trigger other checks - death checks if we got
worse, unconsciousness checks, whatever.
"""
# no helping us if we're dead
if self.db.health_status == "dead":
return
diff = 0 + diff_mod
roll = do_dice_check(self, stat_list=["willpower", "stamina"], difficulty=diff)
self.change_health(roll)
if not free:
self.db.last_recovery_test = time.time()
return roll
def change_health(self, amount, quiet=False, affect_real_dmg=True, wake=True):
"""
Change character's health and maybe tell them about it.
Positive amount will 'heal'. Negative will 'harm'.
A character with hp to attempt waking up.
"""
difference = self.get_health_percentage(abs(amount))
if not quiet:
msg = "You feel "
if difference <= 0:
msg += "no "
elif difference <= 0.1:
msg += "a little "
elif difference <= 0.25:
pass
elif difference <= 0.5:
msg += "a lot "
elif difference <= 0.75:
msg += "significantly "
else:
msg += "profoundly "
msg += "better" if amount > 0 else "worse"
punctuation = "." if difference < 0.5 else "!"
self.msg(msg + punctuation)
if affect_real_dmg:
self.real_dmg -= amount
else:
self.temp_dmg -= amount
if difference:
self.triggerhandler.check_health_change_triggers(amount)
if wake and self.dmg <= self.max_hp and self.db.sleep_status != "awake":
self.wake_up()
def get_health_percentage(self, damage=None):
"""Returns the float percentage of the health. If damage is not specified, we use self.dmg"""
if damage is None:
damage = self.dmg
return float(damage) / float(self.max_hp)
def get_health_appearance(self):
"""
Return a string based on our current health.
"""
wounds = self.get_health_percentage()
msg = "%s " % self.name
if self.db.health_status == "dead":
return msg + "is currently dead."
elif wounds <= 0:
msg += "is in perfect health"
elif 0 < wounds <= self.death_threshold:
msg += "seems to have %s injuries" % self.get_wound_descriptor(self.dmg)
else:
msg += "is in critical condition - possibly dying"
sleep_status = self.db.sleep_status
if sleep_status and sleep_status != "awake":
msg += ", and is %s" % sleep_status
return msg + "."
def get_wound_descriptor(self, dmg):
wound = self.get_health_percentage(dmg)
if wound <= 0:
wound_desc = "no"
elif wound <= 0.1:
wound_desc = "minor"
elif 0.1 < wound <= 0.25:
wound_desc = "moderate"
elif 0.25 < wound <= 0.5:
wound_desc = "serious"
elif 0.5 < wound <= 0.75:
wound_desc = "severe"
elif 0.75 < wound < 2.0:
wound_desc = "grievous"
else:
wound_desc = "grave"
return wound_desc
def sensing_check(self, difficulty=15, invis=False, allow_wake=False):
"""
See if the character detects something that is hiding or invisible.
The difficulty is supplied by the calling function.
Target can be included for additional situational
"""
if not self.conscious and not allow_wake:
return -100
roll = do_dice_check(self, stat="perception", stat_keep=True, difficulty=difficulty)
return roll
def get_fancy_name(self, short=False, display_mask=True):
"""
Returns either an illusioned name, a long_name with titles, or our key.
"""
if self.db.false_name and display_mask:
return self.db.false_name
if not short and self.db.longname:
return self.db.longname
return self.db.colored_name or self.key
@property
def max_hp(self):
"""Returns our max hp"""
hp = self.db.stamina or 0
hp *= 20
hp += 20
bonus = self.db.bonus_max_hp or 0
hp += bonus
hp += self.boss_rating * 100
return hp
@property
def death_threshold(self):
"""
Multiplier on how much higher our damage must be than our max health for us to
roll to survive dying.
Returns:
float: Multiplier before death checks happen
"""
return 1.25
@property
def dmg(self):
"""Returns how much damage we've taken."""
return self.real_dmg + self.temp_dmg
@dmg.setter
def dmg(self, value):
self.real_dmg = value
# alias for dmg
damage = dmg
@property
def temp_dmg(self):
if self.ndb.temp_dmg is None:
self.ndb.temp_dmg = 0
return self.ndb.temp_dmg
@temp_dmg.setter
def temp_dmg(self, value):
self.ndb.temp_dmg = value
@property
def real_dmg(self):
return self.db.damage or 0
@real_dmg.setter
def real_dmg(self, dmg):
if dmg < 1:
dmg = 0
self.db.damage = dmg
self.start_recovery_script()
def start_recovery_script(self):
# start the script if we have damage
start_script = self.dmg > 0
scripts = [ob for ob in self.scripts.all() if ob.key == "Recovery"]
if scripts:
if start_script:
scripts[0].start()
else:
scripts[0].stop()
elif start_script:
self.scripts.add("typeclasses.scripts.recovery.Recovery")
@property
def xp(self):
return self.db.xp or 0
@xp.setter
def xp(self, value):
self.db.xp = value
def adjust_xp(self, value):
"""
Spend or earn xp. Total xp keeps track of all xp we've earned on this
character, and isn't lowered by spending xp. Checks for having sufficient
xp should be before this takes place, so we'll raise an exception if they
can't pay the cost.
"""
if not self.db.total_xp:
self.db.total_xp = 0
if not self.xp:
self.xp = 0
if value > 0:
self.db.total_xp += value
try:
self.roster.adjust_xp(value)
except (AttributeError, ValueError, TypeError):
pass
else:
if self.xp < abs(value):
raise ValueError("Bad value passed to adjust_xp -" +
" character did not have enough xp to pay for the value.")
self.xp += value
def follow(self, targ):
if not targ.ndb.followers:
targ.ndb.followers = []
targ.msg("%s starts to follow you. To remove them as a follower, use 'ditch'." % self.name)
if self not in targ.ndb.followers:
targ.ndb.followers.append(self)
self.msg("You start to follow %s. To stop following, use 'follow' with no arguments." % targ.name)
self.ndb.following = targ
def stop_follow(self):
f_targ = self.ndb.following
if not f_targ:
return
self.msg("You stop following %s." % f_targ.name)
if f_targ.ndb.followers:
try:
f_targ.ndb.followers.remove(self)
f_targ.msg("%s stops following you." % self.name)
except (ValueError, TypeError, AttributeError):
pass
self.ndb.following = None
def msg_watchlist(self, msg):
"""
Sends a message to all players who are watching this character if
we are not hiding from watch.
"""
watchers = self.db.watched_by or []
pc = self.player_ob
if not pc:
return
if not watchers or pc.db.hide_from_watch:
return
for watcher in watchers:
spam = watcher.ndb.journal_spam or []
if self not in spam:
watcher.msg(msg)
spam.append(self)
watcher.ndb.journal_spam = spam
def _get_max_support(self):
try:
dompc = self.player_ob.Dominion
remaining = 0
for member in dompc.memberships.filter(deguilded=False):
remaining += member.pool_share
for ren in dompc.renown.all():
remaining += ren.level
except (TypeError, AttributeError, ValueError):
return 0
interval = self.social_clout
multiplier = 1.0
total = 0
if interval <= 0:
return 0
while multiplier > 0:
if interval >= remaining:
total += remaining * multiplier
return int(total)
total += interval * multiplier
multiplier -= 0.25
remaining -= interval
return int(total)
max_support = property(_get_max_support)
@property
def social_clout(self):
"""Another representation of social value of a character"""
total = 0
my_skills = self.db.skills or {}
skills_used = {"diplomacy": 2, "empathy": 2, "seduction": 2, "etiquette": 2, "manipulation": 2, "propaganda": 2,
"intimidation": 1, "leadership": 1, "streetwise": 1, "performance": 1, "haggling": 1}
stats_used = {"charm": 2, "composure": 1, "command": 1}
for skill, exponent in skills_used.items():
total += pow(my_skills.get(skill, 0), exponent)
for stat, exponent in stats_used.items():
total += pow((self.attributes.get(stat) or 0), exponent)
return total // 5
@property
def guards(self):
if self.db.assigned_guards is None:
self.db.assigned_guards = []
return self.db.assigned_guards
def remove_guard(self, guard):
"""
This discontinues anything we were using the guard for.
Args:
guard: Previously a guard, possibly a retainer.
"""
if guard in self.guards:
self.guards.remove(guard)
if self.messages.custom_messenger == guard:
self.messages.custom_messenger = None
@property
def num_guards(self):
return sum(ob.quantity for ob in self.guards)
@property
def present_guards(self):
return [ob for ob in self.guards if ob.location == self.location]
@property
def num_armed_guards(self):
try:
return sum([ob.num_armed_guards for ob in self.present_guards])
except TypeError:
return 0
@property
def max_guards(self):
try:
return 15 - (self.db.social_rank or 10)
except TypeError:
return 5
def get_directions(self, room):
"""
Uses the ObjectDB manager and repeated related_set calls in order
to find the exit in the current room that directly points to it.
"""
loc = self.location
if not loc:
return
x_ori = loc.db.x_coord
y_ori = loc.db.y_coord
x_dest = room.db.x_coord
y_dest = room.db.y_coord
check_exits = []
try:
x = x_dest - x_ori
y = y_dest - y_ori
dest = ""
if y > 0:
dest += "north"
if y < 0:
dest += "south"
check_exits.append(dest)
if x > 0:
dest += "east"
check_exits.append("east")
if x < 0:
dest += "west"
check_exits.append("west")
if abs(x) > abs(y):
check_exits.reverse()
# inserts the NE/SE/SW/NW direction at 0 to be highest priority
check_exits.insert(0, dest)
for dirname in check_exits:
if loc.locations_set.filter(db_key__iexact=dirname).exclude(db_destination__in=self.ndb.traversed or []):
return "{c" + dirname + "{n"
dest = "{c" + dest + "{n roughly. Please use '{w@map{n' to determine an exact route"
except (AttributeError, TypeError, ValueError):
print("Error in using directions for rooms: %s, %s" % (loc.id, room.id))
print("origin is (%s,%s), destination is (%s, %s)" % (x_ori, y_ori, x_dest, y_dest))
self.msg("Rooms not properly set up for @directions. Logging error.")
return
# try to find it through traversal
base_query = "db_destination_id"
exit_name = []
iterations = 0
# anything beyond 10 squares becomes extremely lengthy
max_iter = 5
exit_ids = [ob.id for ob in loc.exits]
q_add = ""
from django.db.models import Q
exclude_ob = Q()
def get_new_exclude_ob():
"""Helper function to build Q() objects to exclude"""
base_exclude_query = "db_tags__db_key"
other_exclude_query = {q_add + "db_destination_id": loc.id}
traversed_query = {q_add + "db_destination_id__in": self.ndb.traversed or []}
exclude_query = q_add + base_exclude_query
exclude_dict = {exclude_query: "secret"}
return Q(**exclude_dict) | Q(**other_exclude_query) | Q(**traversed_query)
while not exit_name and iterations < max_iter:
q_add = "db_destination__locations_set__" * iterations
query = q_add + base_query
filter_dict = {query: room.id}
exclude_ob |= get_new_exclude_ob()
q_ob = Q(Q(**filter_dict) & ~exclude_ob)
exit_name = loc.locations_set.distinct().filter(id__in=exit_ids).exclude(exclude_ob).filter(q_ob)
iterations += 1
if not exit_name:
return "{c" + dest + "{n"
return "{c" + str(exit_name[0]) + "{n"
def at_post_puppet(self):
"""
Called just after puppeting has completed.
:type self: Character
"""
super(Character, self).at_post_puppet()
try:
self.messages.messenger_notification(2, force=True)
except (AttributeError, ValueError, TypeError):
import traceback
traceback.print_exc()
guards = self.guards
for guard in guards:
if guard.discreet:
continue
docked_location = guard.db.docked
if docked_location and docked_location == self.location:
guard.summon()
def at_post_unpuppet(self, player, session=None, **kwargs):
"""
We stove away the character when the player goes ooc/logs off,
otherwise the character object will remain in the room also after the
player logged off ("headless", so to say).
:type self: Character
:type player: Player
:type session: Session
"""
super(Character, self).at_post_unpuppet(player, session)
if not self.sessions.count():
table = self.db.sitting_at_table
if table:
table.leave(self)
guards = self.guards
for guard in guards:
try:
if guard.location and 'persistent_guard' not in guard.tags.all():
guard.dismiss()
except AttributeError:
continue
@property
def portrait(self):
from web.character.models import Photo
try:
return self.roster.profile_picture
except (AttributeError, Photo.DoesNotExist):
return None
def get_absolute_url(self):
from django.core.urlresolvers import reverse
return reverse('character:sheet', kwargs={'object_id': self.id})
@lazy_property
def combat(self):
from typeclasses.scripts.combat.combatant import CombatHandler
return CombatHandler(self)
def view_stats(self, viewer, combat=False):
from commands.base_commands.roster import display_stats, display_skills, display_abilities
display_stats(viewer, self)
display_skills(viewer, self)
display_abilities(viewer, self)
if combat:
viewer.msg(self.combat.display_stats())
@property
def posecount(self):
return self.db.pose_count or 0
@posecount.setter
def posecount(self, val):
self.db.pose_count = val
@property
def previous_posecount(self):
return self.db.previous_posecount or 0
@previous_posecount.setter
def previous_posecount(self, val):
self.db.previous_posecount = val
@property
def total_posecount(self):
return self.posecount + self.previous_posecount
def announce_move_from(self, destination, msg=None, mapping=None, **kwargs):
"""
Called if the move is to be announced. This is
called while we are still standing in the old
location.
Args:
destination (Object): The place we are going to.
msg (str, optional): a replacement message.
mapping (dict, optional): additional mapping objects.
"""
def format_string(viewer):
if msg:
return msg
if secret:
return "%s is leaving." % self.get_display_name(viewer)
else:
return "%s is leaving, heading for %s." % (self.get_display_name(viewer),
destination.get_display_name(viewer))
if not self.location:
return
secret = False
if mapping:
secret = mapping.get('secret', False)
for obj in self.location.contents:
if obj != self:
string = format_string(obj)
obj.msg(string)
def announce_move_to(self, source_location, msg=None, mapping=None, **kwargs):
"""
Called after the move if the move was not quiet. At this point
we are standing in the new location.
Args:
source_location (Object): The place we came from
msg (str, optional): the replacement message if location.
mapping (dict, optional): additional mapping objects.
You can override this method and call its parent with a
message to simply change the default message. In the string,
you can use the following as mappings (between braces):
object: the object which is moving.
exit: the exit from which the object is moving (if found).
origin: the location of the object before the move.
destination: the location of the object after moving.
"""
if not source_location and self.location.has_player:
# This was created from nowhere and added to a player's
# inventory; it's probably the result of a create command.
string = "You now have %s in your possession." % self.get_display_name(self.location)
self.location.msg(string)
return
secret = False
if mapping:
secret = mapping.get('secret', False)
def format_string(viewer):
if msg:
return msg
if secret:
return "%s arrives." % self.get_display_name(viewer)
else:
from_str = " from %s" % source_location.get_display_name(viewer) if source_location else ""
return "%s arrives%s." % (self.get_display_name(viewer), from_str)
for obj in self.location.contents:
if obj != self:
string = format_string(obj)
obj.msg(string)
@property
def can_crit(self):
try:
if self.roster.roster.name == "Active":
return True
else:
return False
except AttributeError:
return False
@property
def titles(self):
full_titles = self.db.titles or []
return ", ".join(str(ob) for ob in full_titles)
@property
def is_npc(self):
if self.tags.get("npc"):
return True
try:
if self.roster.roster.name == "Unavailable":
return True
except AttributeError:
pass
return False
@property
def attackable(self):
return not bool(self.tags.get("unattackable"))
@property
def boss_rating(self):
try:
return int(self.db.boss_rating)
except (TypeError, ValueError):
return 0
@boss_rating.setter
def boss_rating(self, value):
self.db.boss_rating = value
@property
def sleepless(self):
"""Cannot fall unconscious - undead, etc"""
return bool(self.tags.get("sleepless"))
@property
def defense_modifier(self):
return self.db.defense_modifier or 0
@defense_modifier.setter
def defense_modifier(self, value):
self.db.defense_modifier = value
@property
def attack_modifier(self):
base = self.db.attack_modifier or 0
return base + (self.boss_rating * 10)
@attack_modifier.setter
def attack_modifier(self, value):
self.db.attack_modifier = value
def search(self, # type: Character
searchdata, global_search=False, use_nicks=True, typeclass=None, location=None,
attribute_name=None, quiet=False, exact=False, candidates=None, nofound_string=None,
multimatch_string=None, use_dbref=True):
from django.conf import settings
# if we're staff, we just use the regular search method
if self.check_permstring("builders"):
return super(Character, self).search(searchdata, global_search=global_search, use_nicks=use_nicks,
typeclass=typeclass, location=location,
attribute_name=attribute_name, quiet=quiet, exact=exact,
candidates=candidates, nofound_string=nofound_string,
multimatch_string=multimatch_string, use_dbref=use_dbref)
# we're not staff. We get search results, then throw out matches of people wearing masks that were by their key
results = super(Character, self).search(searchdata, global_search=global_search, use_nicks=use_nicks,
typeclass=typeclass, location=location, attribute_name=attribute_name,
quiet=True, exact=exact, candidates=candidates,
nofound_string=nofound_string, multimatch_string=multimatch_string,
use_dbref=use_dbref)
# we prune results of keys for masked (false_name) objects in results
results = [ob for ob in results if not ob.db.false_name or searchdata.lower() != ob.key.lower()]
# quiet means that messaging is handled elsewhere
if quiet:
return results
if location == self:
nofound_string = nofound_string or "You don't carry '%s'." % searchdata
multimatch_string = multimatch_string or "You carry more than one '%s':" % searchdata
# call the _AT_SEARCH_RESULT func to transform our results and send messages
_AT_SEARCH_RESULT = variable_from_module(*settings.SEARCH_AT_RESULT.rsplit('.', 1))
return _AT_SEARCH_RESULT(results, self, query=searchdata, nofound_string=nofound_string,
multimatch_string=multimatch_string)
def can_be_trained_by(self, trainer):
"""
Checks if we can be trained by trainer. If False, send a message to trainer and let them know why. The default
implementation will just return True, but this is overridden in Retainers, for example.
Args:
trainer: Character to check training
Returns:
True if we can be trained, False otherwise.
"""
if self.db.trainer:
trainer.msg("They have already been trained.")
return False
return True
def post_training(self, trainer, trainer_msg="", targ_msg="", **kwargs):
"""
Handles bookkeeping after this character is trained.
Args:
trainer: Character that trained us.
trainer_msg (str): Message to send to trainer
targ_msg (str): Message to send to this Character
Returns:
True if everything went off. Used for trying to catch extremely elusive caching errors.
"""
from server.utils.arx_utils import trainer_diagnostics
currently_training = trainer.db.currently_training or []
# num_trained is redundancy to attempt to prevent cache errors.
num_trained = trainer.db.num_trained or len(currently_training)
if num_trained < len(currently_training):
num_trained = len(currently_training)
num_trained += 1
self.db.trainer = trainer
currently_training.append(self)
trainer.db.currently_training = currently_training
trainer.db.num_trained = num_trained
if trainer_msg:
trainer.msg(trainer_msg)
if targ_msg:
self.msg(targ_msg)
print "Character.post_training call: %s" % trainer_diagnostics(trainer)
return True
def show_online(self, caller, check_puppet=True):
"""
Args:
caller: Player who is checking to see if they can see us online
check_puppet: Whether the Character needs an active puppet to show as online
Returns:
True if we're online and the player has privileges to see us. False otherwise
"""
if check_puppet:
if not self.sessions.all():
return False
player = self.player
else:
player = self.player_ob
if not player:
return False
if not player.db.hide_from_watch:
return True
if caller.check_permstring("builders"):
return True
# we're hiding from watch and caller is not staff, so they don't see us online
return False
@property
def glass_jaw(self):
return False
@property
def valid_actions(self):
from world.dominion.models import PlotAction
from django.db.models import Q
return PlotAction.objects.filter(Q(dompc=self.dompc) | Q(assistants=self.dompc)).exclude(
status=PlotAction.CANCELLED).distinct()
@property
def past_actions(self):
return self.player_ob.past_actions
@property
def past_participated_actions(self):
return self.player_ob.past_participated_actions
@property
def recent_actions(self):
return self.player_ob.recent_actions
@property
def recent_assists(self):
return self.player_ob.recent_assists
@property
def skills(self):
if self.db.skills is None:
self.db.skills = {}
return self.db.skills
@property
def truesight(self):
return self.check_permstring("builders") or self.tags.get("story_npc")
def get_display_name(self, looker, **kwargs):
if not self.is_disguised:
return super(Character, self).get_display_name(looker, **kwargs)
name = self.name
try:
if looker.truesight:
name = "%s (%s)" % (self.name, self.key)
if looker.check_permstring("builders"):
name += "(%s)" % self.id
except AttributeError:
pass
return name
@property
def dompc(self):
"""Returns our Dominion object"""
return self.player_ob.Dominion
@property
def secrets(self):
from web.character.models import Clue
return self.roster.clue_discoveries.filter(clue__clue_type=Clue.CHARACTER_SECRET,
clue__tangible_object=self).exclude(clue__desc="").distinct()
|
py | 1a47c8b220c0317d5207b4ea9059fd3e1324aba8 | class Solution:
def intToRoman(self, num: int) -> str:
romans ={ v:k for k,v in {
'I': 1,
'V': 5,
'IV': 4,
'X': 10,
'IX': 9,
'L': 50,
'XL': 40,
'C': 100,
'XC': 90,
'D': 500,
'CD': 400,
'M': 1000,
'CM': 900
}.items()}
values = list(sorted(romans.keys()))
if num == 0:
return ''
result = []
value = 4000
while num > 0:
if num - value >= 0:
result.append(romans[value])
num -= value
else:
value = values.pop()
return ''.join(result)
s = Solution()
print(s.intToRoman(3) == 'III')
print(s.intToRoman(4) == 'IV')
print(s.intToRoman(9) == 'IX')
print(s.intToRoman(58) == 'LVIII')
print(s.intToRoman(1994) == 'MCMXCIV')
|
py | 1a47ca266b7afb7b98194434b89e606e4cd71505 | # -*- coding: utf-8 -*-
"""
pygments.lexers.r
~~~~~~~~~~~~~~~~~
Lexers for the R/S languages.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import Lexer, RegexLexer, include, words, do_insertions
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Generic
__all__ = ['RConsoleLexer', 'SLexer', 'RdLexer']
line_re = re.compile('.*?\n')
class RConsoleLexer(Lexer):
"""
For R console transcripts or R CMD BATCH output files.
"""
name = 'RConsole'
aliases = ['rconsole', 'rout']
filenames = ['*.Rout']
def get_tokens_unprocessed(self, text):
slexer = SLexer(**self.options)
current_code_block = ''
insertions = []
for match in line_re.finditer(text):
line = match.group()
if line.startswith('>') or line.startswith('+'):
# Colorize the prompt as such,
# then put rest of line into current_code_block
insertions.append((len(current_code_block),
[(0, Generic.Prompt, line[:2])]))
current_code_block += line[2:]
else:
# We have reached a non-prompt line!
# If we have stored prompt lines, need to process them first.
if current_code_block:
# Weave together the prompts and highlight code.
for item in do_insertions(
insertions, slexer.get_tokens_unprocessed(current_code_block)):
yield item
# Reset vars for next code block.
current_code_block = ''
insertions = []
# Now process the actual line itself, this is output from R.
yield match.start(), Generic.Output, line
# If we happen to end on a code block with nothing after it, need to
# process the last code block. This is neither elegant nor DRY so
# should be changed.
if current_code_block:
for item in do_insertions(
insertions, slexer.get_tokens_unprocessed(current_code_block)):
yield item
class SLexer(RegexLexer):
"""
For S, S-plus, and R source code.
.. versionadded:: 0.10
"""
name = 'S'
aliases = ['splus', 's', 'r']
filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron']
mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
'text/x-R', 'text/x-r-history', 'text/x-r-profile']
builtins_base = (
'Arg', 'Conj', 'Cstack_info', 'Encoding', 'FALSE',
'Filter', 'Find', 'I', 'ISOdate', 'ISOdatetime', 'Im', 'Inf',
'La.svd', 'Map', 'Math.Date', 'Math.POSIXt', 'Math.data.frame',
'Math.difftime', 'Math.factor', 'Mod', 'NA_character_',
'NA_complex_', 'NA_real_', 'NCOL', 'NROW', 'NULLNA_integer_', 'NaN',
'Negate', 'NextMethod', 'Ops.Date', 'Ops.POSIXt', 'Ops.data.frame',
'Ops.difftime', 'Ops.factor', 'Ops.numeric_version', 'Ops.ordered',
'Position', 'R.Version', 'R.home', 'R.version', 'R.version.string',
'RNGkind', 'RNGversion', 'R_system_version', 'Re', 'Recall',
'Reduce', 'Summary.Date', 'Summary.POSIXct', 'Summary.POSIXlt',
'Summary.data.frame', 'Summary.difftime', 'Summary.factor',
'Summary.numeric_version', 'Summary.ordered', 'Sys.Date',
'Sys.chmod', 'Sys.getenv', 'Sys.getlocale', 'Sys.getpid',
'Sys.glob', 'Sys.info', 'Sys.localeconv', 'Sys.readlink',
'Sys.setFileTime', 'Sys.setenv', 'Sys.setlocale', 'Sys.sleep',
'Sys.time', 'Sys.timezone', 'Sys.umask', 'Sys.unsetenv',
'Sys.which', 'TRUE', 'UseMethod', 'Vectorize', 'abbreviate', 'abs',
'acos', 'acosh', 'addNA', 'addTaskCallback', 'agrep', 'alist',
'all', 'all.equal', 'all.equal.POSIXct', 'all.equal.character',
'all.equal.default', 'all.equal.factor', 'all.equal.formula',
'all.equal.language', 'all.equal.list', 'all.equal.numeric',
'all.equal.raw', 'all.names', 'all.vars', 'any', 'anyDuplicated',
'anyDuplicated.array', 'anyDuplicated.data.frame',
'anyDuplicated.default', 'anyDuplicated.matrix', 'aperm',
'aperm.default', 'aperm.table', 'append', 'apply', 'args',
'arrayInd', 'as.Date', 'as.Date.POSIXct', 'as.Date.POSIXlt',
'as.Date.character', 'as.Date.date', 'as.Date.dates',
'as.Date.default', 'as.Date.factor', 'as.Date.numeric',
'as.POSIXct', 'as.POSIXct.Date', 'as.POSIXct.POSIXlt',
'as.POSIXct.date', 'as.POSIXct.dates', 'as.POSIXct.default',
'as.POSIXct.numeric', 'as.POSIXlt', 'as.POSIXlt.Date',
'as.POSIXlt.POSIXct', 'as.POSIXlt.character', 'as.POSIXlt.date',
'as.POSIXlt.dates', 'as.POSIXlt.default', 'as.POSIXlt.factor',
'as.POSIXlt.numeric', 'as.array', 'as.array.default', 'as.call',
'as.character', 'as.character.Date', 'as.character.POSIXt',
'as.character.condition', 'as.character.default',
'as.character.error', 'as.character.factor', 'as.character.hexmode',
'as.character.numeric_version', 'as.character.octmode',
'as.character.srcref', 'as.complex', 'as.data.frame',
'as.data.frame.AsIs', 'as.data.frame.Date', 'as.data.frame.POSIXct',
'as.data.frame.POSIXlt', 'as.data.frame.array',
'as.data.frame.character', 'as.data.frame.complex',
'as.data.frame.data.frame', 'as.data.frame.default',
'as.data.frame.difftime', 'as.data.frame.factor',
'as.data.frame.integer', 'as.data.frame.list',
'as.data.frame.logical', 'as.data.frame.matrix',
'as.data.frame.model.matrix', 'as.data.frame.numeric',
'as.data.frame.numeric_version', 'as.data.frame.ordered',
'as.data.frame.raw', 'as.data.frame.table', 'as.data.frame.ts',
'as.data.frame.vector', 'as.difftime', 'as.double',
'as.double.POSIXlt', 'as.double.difftime', 'as.environment',
'as.expression', 'as.expression.default', 'as.factor',
'as.function', 'as.function.default', 'as.hexmode', 'as.integer',
'as.list', 'as.list.Date', 'as.list.POSIXct', 'as.list.data.frame',
'as.list.default', 'as.list.environment', 'as.list.factor',
'as.list.function', 'as.list.numeric_version', 'as.logical',
'as.logical.factor', 'as.matrix', 'as.matrix.POSIXlt',
'as.matrix.data.frame', 'as.matrix.default', 'as.matrix.noquote',
'as.name', 'as.null', 'as.null.default', 'as.numeric',
'as.numeric_version', 'as.octmode', 'as.ordered',
'as.package_version', 'as.pairlist', 'as.qr', 'as.raw', 'as.single',
'as.single.default', 'as.symbol', 'as.table', 'as.table.default',
'as.vector', 'as.vector.factor', 'asNamespace', 'asS3', 'asS4',
'asin', 'asinh', 'assign', 'atan', 'atan2', 'atanh',
'attachNamespace', 'attr', 'attr.all.equal', 'attributes',
'autoload', 'autoloader', 'backsolve', 'baseenv', 'basename',
'besselI', 'besselJ', 'besselK', 'besselY', 'beta',
'bindingIsActive', 'bindingIsLocked', 'bindtextdomain', 'bitwAnd',
'bitwNot', 'bitwOr', 'bitwShiftL', 'bitwShiftR', 'bitwXor', 'body',
'bquote', 'browser', 'browserCondition', 'browserSetDebug',
'browserText', 'builtins', 'by', 'by.data.frame', 'by.default',
'bzfile', 'c.Date', 'c.POSIXct', 'c.POSIXlt', 'c.noquote',
'c.numeric_version', 'call', 'callCC', 'capabilities', 'casefold',
'cat', 'category', 'cbind', 'cbind.data.frame', 'ceiling',
'char.expand', 'charToRaw', 'charmatch', 'chartr', 'check_tzones',
'chol', 'chol.default', 'chol2inv', 'choose', 'class',
'clearPushBack', 'close', 'close.connection', 'close.srcfile',
'close.srcfilealias', 'closeAllConnections', 'col', 'colMeans',
'colSums', 'colnames', 'commandArgs', 'comment', 'computeRestarts',
'conditionCall', 'conditionCall.condition', 'conditionMessage',
'conditionMessage.condition', 'conflicts', 'contributors', 'cos',
'cosh', 'crossprod', 'cummax', 'cummin', 'cumprod', 'cumsum', 'cut',
'cut.Date', 'cut.POSIXt', 'cut.default', 'dQuote', 'data.class',
'data.matrix', 'date', 'debug', 'debugonce',
'default.stringsAsFactors', 'delayedAssign', 'deparse', 'det',
'determinant', 'determinant.matrix', 'dget', 'diag', 'diff',
'diff.Date', 'diff.POSIXt', 'diff.default', 'difftime', 'digamma',
'dim', 'dim.data.frame', 'dimnames', 'dimnames.data.frame', 'dir',
'dir.create', 'dirname', 'do.call', 'dput', 'drop', 'droplevels',
'droplevels.data.frame', 'droplevels.factor', 'dump', 'duplicated',
'duplicated.POSIXlt', 'duplicated.array', 'duplicated.data.frame',
'duplicated.default', 'duplicated.matrix',
'duplicated.numeric_version', 'dyn.load', 'dyn.unload', 'eapply',
'eigen', 'else', 'emptyenv', 'enc2native', 'enc2utf8',
'encodeString', 'enquote', 'env.profile', 'environment',
'environmentIsLocked', 'environmentName', 'eval', 'eval.parent',
'evalq', 'exists', 'exp', 'expand.grid', 'expm1', 'expression',
'factor', 'factorial', 'fifo', 'file', 'file.access', 'file.append',
'file.choose', 'file.copy', 'file.create', 'file.exists',
'file.info', 'file.link', 'file.path', 'file.remove', 'file.rename',
'file.show', 'file.symlink', 'find.package', 'findInterval',
'findPackageEnv', 'findRestart', 'floor', 'flush',
'flush.connection', 'force', 'formals', 'format',
'format.AsIs', 'format.Date', 'format.POSIXct', 'format.POSIXlt',
'format.data.frame', 'format.default', 'format.difftime',
'format.factor', 'format.hexmode', 'format.info',
'format.libraryIQR', 'format.numeric_version', 'format.octmode',
'format.packageInfo', 'format.pval', 'format.summaryDefault',
'formatC', 'formatDL', 'forwardsolve', 'gamma', 'gc', 'gc.time',
'gcinfo', 'gctorture', 'gctorture2', 'get', 'getAllConnections',
'getCallingDLL', 'getCallingDLLe', 'getConnection',
'getDLLRegisteredRoutines', 'getDLLRegisteredRoutines.DLLInfo',
'getDLLRegisteredRoutines.character', 'getElement',
'getExportedValue', 'getHook', 'getLoadedDLLs', 'getNamespace',
'getNamespaceExports', 'getNamespaceImports', 'getNamespaceInfo',
'getNamespaceName', 'getNamespaceUsers', 'getNamespaceVersion',
'getNativeSymbolInfo', 'getOption', 'getRversion', 'getSrcLines',
'getTaskCallbackNames', 'geterrmessage', 'gettext', 'gettextf',
'getwd', 'gl', 'globalenv', 'gregexpr', 'grep', 'grepRaw', 'grepl',
'gsub', 'gzcon', 'gzfile', 'head', 'iconv', 'iconvlist',
'icuSetCollate', 'identical', 'identity', 'ifelse', 'importIntoEnv',
'in', 'inherits', 'intToBits', 'intToUtf8', 'interaction', 'interactive',
'intersect', 'inverse.rle', 'invisible', 'invokeRestart',
'invokeRestartInteractively', 'is.R', 'is.array', 'is.atomic',
'is.call', 'is.character', 'is.complex', 'is.data.frame',
'is.double', 'is.element', 'is.environment', 'is.expression',
'is.factor', 'is.finite', 'is.function', 'is.infinite',
'is.integer', 'is.language', 'is.list', 'is.loaded', 'is.logical',
'is.matrix', 'is.na', 'is.na.POSIXlt', 'is.na.data.frame',
'is.na.numeric_version', 'is.name', 'is.nan', 'is.null',
'is.numeric', 'is.numeric.Date', 'is.numeric.POSIXt',
'is.numeric.difftime', 'is.numeric_version', 'is.object',
'is.ordered', 'is.package_version', 'is.pairlist', 'is.primitive',
'is.qr', 'is.raw', 'is.recursive', 'is.single', 'is.symbol',
'is.table', 'is.unsorted', 'is.vector', 'isBaseNamespace',
'isIncomplete', 'isNamespace', 'isOpen', 'isRestart', 'isS4',
'isSeekable', 'isSymmetric', 'isSymmetric.matrix', 'isTRUE',
'isatty', 'isdebugged', 'jitter', 'julian', 'julian.Date',
'julian.POSIXt', 'kappa', 'kappa.default', 'kappa.lm', 'kappa.qr',
'kronecker', 'l10n_info', 'labels', 'labels.default', 'lapply',
'lazyLoad', 'lazyLoadDBexec', 'lazyLoadDBfetch', 'lbeta', 'lchoose',
'length', 'length.POSIXlt', 'letters', 'levels', 'levels.default',
'lfactorial', 'lgamma', 'library.dynam', 'library.dynam.unload',
'licence', 'license', 'list.dirs', 'list.files', 'list2env', 'load',
'loadNamespace', 'loadedNamespaces', 'loadingNamespaceInfo',
'local', 'lockBinding', 'lockEnvironment', 'log', 'log10', 'log1p',
'log2', 'logb', 'lower.tri', 'ls', 'make.names', 'make.unique',
'makeActiveBinding', 'mapply', 'margin.table', 'mat.or.vec',
'match', 'match.arg', 'match.call', 'match.fun', 'max', 'max.col',
'mean', 'mean.Date', 'mean.POSIXct', 'mean.POSIXlt', 'mean.default',
'mean.difftime', 'mem.limits', 'memCompress', 'memDecompress',
'memory.profile', 'merge', 'merge.data.frame', 'merge.default',
'message', 'mget', 'min', 'missing', 'mode', 'month.abb',
'month.name', 'months', 'months.Date', 'months.POSIXt',
'months.abb', 'months.nameletters', 'names', 'names.POSIXlt',
'namespaceExport', 'namespaceImport', 'namespaceImportClasses',
'namespaceImportFrom', 'namespaceImportMethods', 'nargs', 'nchar',
'ncol', 'new.env', 'ngettext', 'nlevels', 'noquote', 'norm',
'normalizePath', 'nrow', 'numeric_version', 'nzchar', 'objects',
'oldClass', 'on.exit', 'open', 'open.connection', 'open.srcfile',
'open.srcfilealias', 'open.srcfilecopy', 'options', 'order',
'ordered', 'outer', 'packBits', 'packageEvent',
'packageHasNamespace', 'packageStartupMessage', 'package_version',
'pairlist', 'parent.env', 'parent.frame', 'parse',
'parseNamespaceFile', 'paste', 'paste0', 'path.expand',
'path.package', 'pipe', 'pmatch', 'pmax', 'pmax.int', 'pmin',
'pmin.int', 'polyroot', 'pos.to.env', 'pretty', 'pretty.default',
'prettyNum', 'print', 'print.AsIs', 'print.DLLInfo',
'print.DLLInfoList', 'print.DLLRegisteredRoutines', 'print.Date',
'print.NativeRoutineList', 'print.POSIXct', 'print.POSIXlt',
'print.by', 'print.condition', 'print.connection',
'print.data.frame', 'print.default', 'print.difftime',
'print.factor', 'print.function', 'print.hexmode',
'print.libraryIQR', 'print.listof', 'print.noquote',
'print.numeric_version', 'print.octmode', 'print.packageInfo',
'print.proc_time', 'print.restart', 'print.rle',
'print.simple.list', 'print.srcfile', 'print.srcref',
'print.summary.table', 'print.summaryDefault', 'print.table',
'print.warnings', 'prmatrix', 'proc.time', 'prod', 'prop.table',
'provideDimnames', 'psigamma', 'pushBack', 'pushBackLength', 'q',
'qr', 'qr.Q', 'qr.R', 'qr.X', 'qr.coef', 'qr.default', 'qr.fitted',
'qr.qty', 'qr.qy', 'qr.resid', 'qr.solve', 'quarters',
'quarters.Date', 'quarters.POSIXt', 'quit', 'quote', 'range',
'range.default', 'rank', 'rapply', 'raw', 'rawConnection',
'rawConnectionValue', 'rawShift', 'rawToBits', 'rawToChar', 'rbind',
'rbind.data.frame', 'rcond', 'read.dcf', 'readBin', 'readChar',
'readLines', 'readRDS', 'readRenviron', 'readline', 'reg.finalizer',
'regexec', 'regexpr', 'registerS3method', 'registerS3methods',
'regmatches', 'remove', 'removeTaskCallback', 'rep', 'rep.Date',
'rep.POSIXct', 'rep.POSIXlt', 'rep.factor', 'rep.int',
'rep.numeric_version', 'rep_len', 'replace', 'replicate',
'requireNamespace', 'restartDescription', 'restartFormals',
'retracemem', 'rev', 'rev.default', 'rle', 'rm', 'round',
'round.Date', 'round.POSIXt', 'row', 'row.names',
'row.names.data.frame', 'row.names.default', 'rowMeans', 'rowSums',
'rownames', 'rowsum', 'rowsum.data.frame', 'rowsum.default',
'sQuote', 'sample', 'sample.int', 'sapply', 'save', 'save.image',
'saveRDS', 'scale', 'scale.default', 'scan', 'search',
'searchpaths', 'seek', 'seek.connection', 'seq', 'seq.Date',
'seq.POSIXt', 'seq.default', 'seq.int', 'seq_along', 'seq_len',
'sequence', 'serialize', 'set.seed', 'setHook', 'setNamespaceInfo',
'setSessionTimeLimit', 'setTimeLimit', 'setdiff', 'setequal',
'setwd', 'shQuote', 'showConnections', 'sign', 'signalCondition',
'signif', 'simpleCondition', 'simpleError', 'simpleMessage',
'simpleWarning', 'simplify2array', 'sin', 'single',
'sinh', 'sink', 'sink.number', 'slice.index', 'socketConnection',
'socketSelect', 'solve', 'solve.default', 'solve.qr', 'sort',
'sort.POSIXlt', 'sort.default', 'sort.int', 'sort.list', 'split',
'split.Date', 'split.POSIXct', 'split.data.frame', 'split.default',
'sprintf', 'sqrt', 'srcfile', 'srcfilealias', 'srcfilecopy',
'srcref', 'standardGeneric', 'stderr', 'stdin', 'stdout', 'stop',
'stopifnot', 'storage.mode', 'strftime', 'strptime', 'strsplit',
'strtoi', 'strtrim', 'structure', 'strwrap', 'sub', 'subset',
'subset.data.frame', 'subset.default', 'subset.matrix',
'substitute', 'substr', 'substring', 'sum', 'summary',
'summary.Date', 'summary.POSIXct', 'summary.POSIXlt',
'summary.connection', 'summary.data.frame', 'summary.default',
'summary.factor', 'summary.matrix', 'summary.proc_time',
'summary.srcfile', 'summary.srcref', 'summary.table',
'suppressMessages', 'suppressPackageStartupMessages',
'suppressWarnings', 'svd', 'sweep', 'sys.call', 'sys.calls',
'sys.frame', 'sys.frames', 'sys.function', 'sys.load.image',
'sys.nframe', 'sys.on.exit', 'sys.parent', 'sys.parents',
'sys.save.image', 'sys.source', 'sys.status', 'system',
'system.file', 'system.time', 'system2', 't', 't.data.frame',
't.default', 'table', 'tabulate', 'tail', 'tan', 'tanh', 'tapply',
'taskCallbackManager', 'tcrossprod', 'tempdir', 'tempfile',
'testPlatformEquivalence', 'textConnection', 'textConnectionValue',
'toString', 'toString.default', 'tolower', 'topenv', 'toupper',
'trace', 'traceback', 'tracemem', 'tracingState', 'transform',
'transform.data.frame', 'transform.default', 'trigamma', 'trunc',
'trunc.Date', 'trunc.POSIXt', 'truncate', 'truncate.connection',
'try', 'tryCatch', 'typeof', 'unclass', 'undebug', 'union',
'unique', 'unique.POSIXlt', 'unique.array', 'unique.data.frame',
'unique.default', 'unique.matrix', 'unique.numeric_version',
'units', 'units.difftime', 'unix.time', 'unlink', 'unlist',
'unloadNamespace', 'unlockBinding', 'unname', 'unserialize',
'unsplit', 'untrace', 'untracemem', 'unz', 'upper.tri', 'url',
'utf8ToInt', 'vapply', 'version', 'warning', 'warnings', 'weekdays',
'weekdays.Date', 'weekdays.POSIXt', 'which', 'which.max',
'which.min', 'with', 'with.default', 'withCallingHandlers',
'withRestarts', 'withVisible', 'within', 'within.data.frame',
'within.list', 'write', 'write.dcf', 'writeBin', 'writeChar',
'writeLines', 'xor', 'xor.hexmode', 'xor.octmode',
'xpdrows.data.frame', 'xtfrm', 'xtfrm.AsIs', 'xtfrm.Date',
'xtfrm.POSIXct', 'xtfrm.POSIXlt', 'xtfrm.Surv', 'xtfrm.default',
'xtfrm.difftime', 'xtfrm.factor', 'xtfrm.numeric_version', 'xzfile',
'zapsmall'
)
tokens = {
'comments': [
(r'#.*$', Comment.Single),
],
'valid_name': [
(r'[a-zA-Z][\w.]*', Text),
# can begin with ., but not if that is followed by a digit
(r'\.[a-zA-Z_][\w.]*', Text),
],
'punctuation': [
(r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
],
'keywords': [
(words(builtins_base, suffix=r'(?![\w. =])'),
Keyword.Pseudo),
(r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
r'(?![\w.])',
Keyword.Reserved),
(r'(array|category|character|complex|double|function|integer|list|'
r'logical|matrix|numeric|vector|data.frame|c)'
r'(?![\w.])',
Keyword.Type),
(r'(library|require|attach|detach|source)'
r'(?![\w.])',
Keyword.Namespace)
],
'operators': [
(r'<<?-|->>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator),
(r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator)
],
'builtin_symbols': [
(r'(NULL|NA(_(integer|real|complex|character)_)?|'
r'letters|LETTERS|Inf|TRUE|FALSE|NaN|pi|\.\.(\.|[0-9]+))'
r'(?![\w.])',
Keyword.Constant),
(r'(T|F)\b', Name.Builtin.Pseudo),
],
'numbers': [
# hex number
(r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
# decimal number
(r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[Li]?',
Number),
],
'statements': [
include('comments'),
# whitespaces
(r'\s+', Text),
(r'`.*?`', String.Backtick),
(r'\'', String, 'string_squote'),
(r'\"', String, 'string_dquote'),
include('builtin_symbols'),
include('numbers'),
include('keywords'),
include('punctuation'),
include('operators'),
include('valid_name'),
],
'root': [
include('statements'),
# blocks:
(r'\{|\}', Punctuation),
# (r'\{', Punctuation, 'block'),
(r'.', Text),
],
# 'block': [
# include('statements'),
# ('\{', Punctuation, '#push'),
# ('\}', Punctuation, '#pop')
# ],
'string_squote': [
(r'([^\'\\]|\\.)*\'', String, '#pop'),
],
'string_dquote': [
(r'([^"\\]|\\.)*"', String, '#pop'),
],
}
def analyse_text(text):
if re.search(r'[a-z0-9_\])\s]<-(?!-)', text):
return 0.11
class RdLexer(RegexLexer):
"""
Pygments Lexer for R documentation (Rd) files
This is a very minimal implementation, highlighting little more
than the macros. A description of Rd syntax is found in `Writing R
Extensions <http://cran.r-project.org/doc/manuals/R-exts.html>`_
and `Parsing Rd files <developer.r-project.org/parseRd.pdf>`_.
.. versionadded:: 1.6
"""
name = 'Rd'
aliases = ['rd']
filenames = ['*.Rd']
mimetypes = ['text/x-r-doc']
# To account for verbatim / LaTeX-like / and R-like areas
# would require parsing.
tokens = {
'root': [
# catch escaped brackets and percent sign
(r'\\[\\{}%]', String.Escape),
# comments
(r'%.*$', Comment),
# special macros with no arguments
(r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
# macros
(r'\\[a-zA-Z]+\b', Keyword),
# special preprocessor macros
(r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
# non-escaped brackets
(r'[{}]', Name.Builtin),
# everything else
(r'[^\\%\n{}]+', Text),
(r'.', Text),
]
}
|
py | 1a47ca51cc6ca724843f14f98cd45930a2e6245c | import numpy as np
from sklearn import datasets
from scipy.stats import f
EPSILON = 10e-10 # only to prevent division by zero
def mean_vector_similarity(X, Y):
x_mean = np.mean(X, axis=0)
y_mean = np.mean(Y, axis=0)
sim = float((x_mean.dot(y_mean)) / (np.linalg.norm(x_mean)
* np.linalg.norm(y_mean) + EPSILON))
return sim
|
py | 1a47ca5a365c02284841a8ed5eabe55598ed0fe4 | """
Utilities for ESPEI
Classes and functions defined here should have some reuse potential.
"""
import itertools
import re
import os
from collections import namedtuple
import bibtexparser
import numpy as np
import sympy
import dask
from bibtexparser.bparser import BibTexParser
from bibtexparser.customization import convert_to_unicode
from distributed import Client
from pycalphad import variables as v
from six import string_types
from sympy import Symbol
from tinydb import TinyDB, where
from tinydb.storages import MemoryStorage
def unpack_piecewise(x):
if isinstance(x, sympy.Piecewise):
return float(x.args[0].expr)
else:
return float(x)
class PickleableTinyDB(TinyDB):
"""A pickleable version of TinyDB that uses MemoryStorage as a default."""
def __getstate__(self):
# first remove the query cache. The cache speed is not important to us.
for table_name in self.tables():
self.table(table_name)._query_cache = {}
pickle_dict = {}
for key, value in self.__dict__.items():
if key == '_table':
pickle_dict[key] = value.all()
else:
pickle_dict[key] = value
return pickle_dict
def __setstate__(self, state):
self.__init__(storage=MemoryStorage)
self.insert_multiple(state['_table'])
class ImmediateClient(Client):
"""
A subclass of distributed.Client that automatically unwraps the Futures
returned by map.
"""
def map(self, f, *iterators, **kwargs):
_client = super(ImmediateClient, self)
result = _client.gather(_client.map(f, *[list(it) for it in iterators], **kwargs))
return result
def sigfigs(x, n):
"""Round x to n significant digits"""
if x != 0:
return np.around(x, -(np.floor(np.log10(np.abs(x)))).astype(np.int) + (n - 1))
else:
return x
def optimal_parameters(trace_array, lnprob_array, kth=0):
"""
Return the optimal parameters in the trace based on the highest likelihood.
If kth is specified, return the kth set of *unique* optimal parameters.
Parameters
----------
trace_array : ndarray
Array of shape (iterations, number of chains, number of parameters)
lnprob_array : ndarray
Array of shape (number of chains, iterations)
kth : int
Zero-indexed optimum. 0 (the default) is the most optimal solution. 1 is
the second most optimal, etc.. Only *unique* solutions will be returned.
Returns
-------
Array of optimal parameters
Notes
-----
It is ok if the calculation did not finish and the arrays are padded with
zeros. The number of chains and iterations in the trace and lnprob arrays
must match.
"""
# Swap first two indices of trace_array for compatability with new version of emcee.
trace_array = np.swapaxes(trace_array, 0, 1)
# indicies of chains + iterations that have non-zero parameters (that step has run)
nz = np.nonzero(np.all(trace_array != 0, axis=-1))
# chain + iteration index with the highest likelihood
unique_params = np.zeros(trace_array.shape[-1])
unique_params_found = -1
# loop through all possible nonzero iterations
for i in range(nz[-1][-1]):
# find the next set of parameters parameters
candidate_index = np.argpartition(-lnprob_array[nz], i)[i]
candidate_params = trace_array[nz][candidate_index]
# if the parameters are unique, make them the new unique parameters
if np.any(candidate_params != unique_params):
unique_params = candidate_params
unique_params_found += 1
# if we have found the kth set of unique parameters, stop
if unique_params_found == kth:
return unique_params
return np.zeros(trace_array.shape[-1])
def database_symbols_to_fit(dbf, symbol_regex="^V[V]?([0-9]+)$"):
"""
Return names of the symbols to fit that match the regular expression
Parameters
----------
dbf : Database
pycalphad Database
symbol_regex : str
Regular expression of the fitting symbols. Defaults to V or VV followed by one or more numbers.
Returns
-------
dict
Context dictionary for different methods of calculation the error.
"""
pattern = re.compile(symbol_regex)
return sorted([x for x in sorted(dbf.symbols.keys()) if pattern.match(x)])
def flexible_open_string(obj):
"""
Return the string of a an object that is either file-like, a file path, or the raw string.
Parameters
----------
obj : string-like or file-like
Either a multiline string, a path, or a file-like object
Returns
-------
str
"""
if isinstance(obj, string_types):
# the obj is a string
if '\n' in obj:
# if the string has linebreaks, then we assume it's a raw string. Return it.
return obj
else:
# assume it is a path
with open(obj) as fp:
read_string = fp.read()
return read_string
elif hasattr(obj, 'read'):
# assume it is file-like
read_string = obj.read()
return read_string
else:
raise ValueError('Unable to determine how to extract the string of the passed object ({}) of type {}. Expected a raw string, file-like, or path-like.'.format(obj, type(obj)))
bibliography_database = PickleableTinyDB(storage=MemoryStorage)
def add_bibtex_to_bib_database(bibtex, bib_db=None):
"""
Add entries from a BibTeX file to the bibliography database
Parameters
----------
bibtex : str
Either a multiline string, a path, or a file-like object of a BibTeX file
bib_db: PickleableTinyDB
Database to put the BibTeX entries. Defaults to a module-level default database
Returns
-------
The modified bibliographic database
"""
if not bib_db:
bib_db = bibliography_database
bibtex_string = flexible_open_string(bibtex)
parser = BibTexParser()
parser.customization = convert_to_unicode
parsed_bibtex = bibtexparser.loads(bibtex_string, parser=parser)
bib_db.insert_multiple(parsed_bibtex.entries)
return bib_db
def bib_marker_map(bib_keys, markers=None):
"""
Return a dict with reference keys and marker dicts
Parameters
----------
bib_keys :
markers : list
List of 2-tuples of ('fillstyle', 'marker') e.g. [('top', 'o'), ('full', 's')].
Defaults to cycling through the filled markers, the different fill styles.
Returns
-------
dict
Dictionary with bib_keys as keys, dict values of formatted strings and marker dicts
Examples
--------
>>> mm = bib_marker_map(['otis2016', 'bocklund2018'])
>>> mm == {'bocklund2018': {'formatted': 'bocklund2018', 'markers': {'fillstyle': 'none', 'marker': 'o'}}, 'otis2016': {'formatted': 'otis2016', 'markers': {'fillstyle': 'none', 'marker': 'v'}}}
True
"""
# TODO: support custom formatting from looking up keys in a bib_db
if not markers:
filled_markers = ['o', 'v', 's', 'd', 'P', 'X', '^', '<', '>']
fill_styles = ['none', 'full', 'top', 'right', 'bottom', 'left']
markers = itertools.product(fill_styles, filled_markers)
b_m_map = dict()
for ref, marker_tuple in zip(sorted(bib_keys), markers):
fill, mark = marker_tuple
b_m_map[ref] = {
'formatted': ref, # just use the key for formatting
'markers': {
'fillstyle': fill,
'marker': mark
}
}
return b_m_map
def parameter_term(expression, symbol):
"""
Determine the term, e.g. T*log(T) that belongs to the symbol in expression
Parameters
----------
expression :
symbol :
Returns
-------
"""
if expression == symbol:
# the parameter is the symbol, so the multiplicative term is 1.
term = 1
else:
if isinstance(expression, sympy.Piecewise):
expression = expression.args[0][0]
if isinstance(expression, sympy.Symbol):
# this is not mathematically correct, but we just need to be able to split it into args
expression = sympy.Add(expression, 1)
if not isinstance(expression, sympy.Add):
raise ValueError('Parameter {} is a {} not a sympy.Add or a Piecewise Add'.format(expression, type(expression)))
expression_terms = expression.args
term = None
for term_coeff in expression_terms:
coeff, root = term_coeff.as_coeff_mul(symbol)
if root == (symbol,):
term = coeff
break
if term is None:
raise ValueError('No multiplicative terms found for Symbol {} in parameter {}'.format(symbol, expression))
return term
def formatted_constituent_array(constituent_array):
"""
Given a constituent array of Species, return the classic CALPHAD-style interaction.
Parameters
----------
constituent_array : list
List of sublattices, which are lists of Species in that sublattice
Returns
-------
str
String of the constituent array formatted in the classic CALPHAD style
Examples
--------
>>> from pycalphad import variables as v
>>> const_array = [[v.Species('CU'), v.Species('MG')], [v.Species('MG')]]
>>> formatted_constituent_array(const_array)
'CU,MG:MG'
"""
return ':'.join([','.join([sp.name for sp in subl]) for subl in constituent_array])
def formatted_parameter(dbf, symbol, unique=True):
"""
Get the deconstructed pretty parts of the parameter/term a symbol belongs to in a Database.
Parameters
----------
dbf : pycalphad.Database
symbol : string or sympy.Symbol
Symbol in the Database to get the parameter for.
unique : bool
If True, will raise if more than one parameter containing the symbol is found.
Returns
-------
FormattedParameter
A named tuple with the following attributes:
``phase_name``, ``interaction``, ``symbol``, ``term``, ``parameter_type``
or ``term_symbol`` (which is just the Symbol * temperature term)
"""
FormattedParameter = namedtuple('FormattedParameter', ['phase_name', 'interaction', 'symbol', 'term', 'parameter_type', 'term_symbol'])
if not isinstance(symbol, Symbol):
symbol = Symbol(symbol)
search_res = dbf._parameters.search(
where('parameter').test(lambda x: symbol in x.free_symbols))
if len(search_res) == 0:
raise ValueError('Symbol {} not found in any parameters.'.format(symbol))
elif (len(search_res) > 1) and unique:
raise ValueError('Parameters found containing Symbol {} are not unique. Found {}.'.format(symbol, search_res))
formatted_parameters = []
for result in search_res:
const_array = formatted_constituent_array(result['constituent_array'])
# format the paramter type to G or L0, L1, ...
parameter_type = '{}{}'.format(result['parameter_type'], result['parameter_order'])
# override non-interacting to G if there's no interaction
has_interaction = ',' in const_array
if not has_interaction:
if (result['parameter_type'] == 'G') or (result['parameter_type'] == 'L'):
parameter_type = 'G'
term = parameter_term(result['parameter'], symbol)
formatted_param = FormattedParameter(result['phase_name'],
const_array,
symbol,
term,
parameter_type,
term*symbol
)
formatted_parameters.append(formatted_param)
if unique:
return formatted_parameters[0]
else:
return formatted_parameters
def build_sitefractions(phase_name, sublattice_configurations, sublattice_occupancies):
"""Convert nested lists of sublattice configurations and occupancies to a list
of dictionaries. The dictionaries map SiteFraction symbols to occupancy
values. Note that zero occupancy site fractions will need to be added
separately since the total degrees of freedom aren't known in this function.
Parameters
----------
phase_name : str
Name of the phase
sublattice_configurations : [[str]]
sublattice configuration
sublattice_occupancies : [[float]]
occupancy of each sublattice
Returns
-------
[[float]]
a list of site fractions over sublattices
"""
result = []
for config, occ in zip(sublattice_configurations, sublattice_occupancies):
sitefracs = {}
config = [[c] if not isinstance(c, (list, tuple)) else c for c in config]
occ = [[o] if not isinstance(o, (list, tuple)) else o for o in occ]
if len(config) != len(occ):
raise ValueError('Sublattice configuration length differs from occupancies')
for sublattice_idx in range(len(config)):
if isinstance(config[sublattice_idx], (list, tuple)) != isinstance(occ[sublattice_idx], (list, tuple)):
raise ValueError('Sublattice configuration type differs from occupancies')
if not isinstance(config[sublattice_idx], (list, tuple)):
# This sublattice is fully occupied by one component
sitefracs[v.SiteFraction(phase_name, sublattice_idx, config[sublattice_idx])] = occ[sublattice_idx]
else:
# This sublattice is occupied by multiple elements
if len(config[sublattice_idx]) != len(occ[sublattice_idx]):
raise ValueError('Length mismatch in sublattice configuration')
for comp, val in zip(config[sublattice_idx], occ[sublattice_idx]):
sitefracs[v.SiteFraction(phase_name, sublattice_idx, comp)] = val
result.append(sitefracs)
return result
def popget(d, key, default=None):
"""
Get the key from the dict, returning the default if not found.
Parameters
----------
d : dict
Dictionary to get key from.
key : object
Key to get from the dictionary.
default : object
Default to return if key is not found in dictionary.
Returns
-------
object
Examples
---------
>>> d = {'ABC': 5.0}
>>> popget(d, 'ZPF', 1.0) == 1.0
True
>>> popget(d, 'ABC', 1.0) == 5.0
True
"""
try:
return d.pop(key)
except KeyError:
return default
def get_dask_config_paths():
"""
Return a list of configuration file paths for dask.
The last path in the list has the highest precedence.
Returns
-------
list
Examples
--------
>>> config_files = get_dask_config_paths()
>>> len(config_files) > 1
True
"""
candidates = dask.config.paths
file_paths = []
for path in candidates:
if os.path.exists(path):
if os.path.isdir(path):
file_paths.extend(sorted([
os.path.join(path, p)
for p in os.listdir(path)
if os.path.splitext(p)[1].lower() in ('.json', '.yaml', '.yml')
]))
else:
file_paths.append(path)
return file_paths
|
py | 1a47caa7e19102cf16e741a3b58296e9247364e7 | #!/usr/bin/env python3
"""Common library for reading from and writing to files.
This module provides functions for reading in formatted data from system files
and writing it back out. Examples include reading a string list or integer
matrix from a file.
"""
import csv
from typing import Iterable, Iterator, List, Optional
def ints_from_file(file_name: str, sep: str = ' ') -> Iterator[List[int]]:
"""Reads a list of integer rows from a file.
Args:
file_name: A relative path to the file to read from.
sep: A separator token that appears between integers within a row.
Yields:
Each integer row, in sequence, from ``file_name``, where each row is on
a separate line and integers within a row are separated by ``sep``.
"""
with open(file_name) as input_file:
for line in input_file:
yield [int(token) for token in line.rstrip().split(sep)]
def strings_from_file(
file_name: str,
sep: str = ',',
quote: Optional[str] = '"',
) -> Iterable[str]:
"""Reads a sequence of formatted strings from a file.
Args:
file_name: A relative path to the file to read from.
sep: A separator token that appears between input strings in the file.
quote: If present, designates a custom quotation mark character to be
stripped from the start and end of each input string. If ``None``,
each input string will be interpreted verbatim.
Yields:
Each input string, in sequence, from ``file_name``, where strings are
separated by ``sep`` and quoted with ``quote`` characters.
"""
with open(file_name, newline='') as input_file:
if quote is None or quote == '':
quote_style = csv.QUOTE_NONE
else:
quote_style = csv.QUOTE_ALL
reader = csv.reader(
input_file, delimiter=sep, quotechar=quote, quoting=quote_style)
for row in reader:
for token in row:
yield token
|
py | 1a47cb0968e2a72ec6aa08cdf363462310163cd2 | name = 'John'
#1 basic method
print('Hello, his name is ' + name)
#2 .format method
print('Hello, his name is {}'.format(name))
#3 f-string literal method
print(f'Hello, his name is {name}')
#4 Template
from string import Template
s = Template('$who likes $what')
s.substitute(who='tim', what='kung pao')
# 'tim likes kung pao'
#1: won't work if name isn't a string.
#2: is fine on any version but is a little unwieldy to type.
# Generally the best for compatibility across python versions.
#3: If the best option in terms of readability (and performance).
# But it only works on python3.6+, so not a good idea if you want your code to be backwards compatible.
#4: Template strings support $-based substitutions
#5: old style formatting ala %s, %d, etc, which are now discouraged in favour for str.format.
|
py | 1a47cb863a3b59258d8b687c983ebea3b0dd7f1f | import sys
import os
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
ROOT_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
import time
import json
import numpy as np
import cv2
import random
import torch
from torch.utils.data import DataLoader
from tqdm import tqdm
from lib.options import BaseOptions
from lib.mesh_util import *
from lib.sample_util import *
from lib.train_util import *
from lib.data import *
from lib.model import *
from lib.geometry import index
# get options
opt = BaseOptions().parse()
def train(opt):
# set cuda
cuda = torch.device('cuda:%d' % opt.gpu_id)
# train_dataset = TrainDataset(opt, phase='train')
# test_dataset = TrainDataset(opt, phase='test')
train_dataset = MRIDataset(opt, phase='train')
test_dataset = MRIDataset(opt, phase='test')
projection_mode = train_dataset.projection_mode
# create data loader
train_data_loader = DataLoader(train_dataset,
batch_size=opt.batch_size, shuffle=not opt.serial_batches,
num_workers=opt.num_threads, pin_memory=opt.pin_memory)
print('train data size: ', len(train_data_loader))
# NOTE: batch size should be 1 and use all the points for evaluation
test_data_loader = DataLoader(test_dataset,
batch_size=1, shuffle=False,
num_workers=opt.num_threads, pin_memory=opt.pin_memory)
print('test data size: ', len(test_data_loader))
# create net
netG = HGPIFuNet(opt, projection_mode).to(device=cuda)
optimizerG = torch.optim.RMSprop(netG.parameters(), lr=opt.learning_rate, momentum=0, weight_decay=0)
lr = opt.learning_rate
print('Using Network: ', netG.name)
def set_train():
netG.train()
def set_eval():
netG.eval()
# load checkpoints
if opt.load_netG_checkpoint_path is not None:
print('loading for net G ...', opt.load_netG_checkpoint_path)
netG.load_state_dict(torch.load(opt.load_netG_checkpoint_path, map_location=cuda))
if opt.continue_train:
if opt.resume_epoch < 0:
model_path = '%s/%s/netG_latest' % (opt.checkpoints_path, opt.name)
else:
model_path = '%s/%s/netG_epoch_%d' % (opt.checkpoints_path, opt.name, opt.resume_epoch)
print('Resuming from ', model_path)
netG.load_state_dict(torch.load(model_path, map_location=cuda))
os.makedirs(opt.checkpoints_path, exist_ok=True)
os.makedirs(opt.results_path, exist_ok=True)
os.makedirs('%s/%s' % (opt.checkpoints_path, opt.name), exist_ok=True)
os.makedirs('%s/%s' % (opt.results_path, opt.name), exist_ok=True)
opt_log = os.path.join(opt.results_path, opt.name, 'opt.txt')
with open(opt_log, 'w') as outfile:
outfile.write(json.dumps(vars(opt), indent=2))
# training
start_epoch = 0 if not opt.continue_train else max(opt.resume_epoch,0)
for epoch in range(start_epoch, opt.num_epoch):
epoch_start_time = time.time()
if not opt.eval_only:
set_train()
iter_data_time = time.time()
for train_idx, train_data in enumerate(train_data_loader):
iter_start_time = time.time()
# retrieve the data
image_tensor = train_data['img'].to(device=cuda)
calib_tensor = train_data['calib'].to(device=cuda)
sample_tensor = train_data['samples'].to(device=cuda)
image_tensor, calib_tensor = reshape_multiview_tensors(image_tensor, calib_tensor)
if opt.num_views > 1:
sample_tensor = reshape_sample_tensor(sample_tensor, opt.num_views)
label_tensor = train_data['labels'].to(device=cuda)
# network input and output
res, error = netG.forward(image_tensor, sample_tensor, calib_tensor, labels=label_tensor)
optimizerG.zero_grad()
error.backward()
optimizerG.step()
iter_net_time = time.time()
eta = ((iter_net_time - epoch_start_time) / (train_idx + 1)) * len(train_data_loader) - (
iter_net_time - epoch_start_time)
if train_idx % opt.freq_plot == 0:
print(
'Name: {0} | Epoch: {1} | {2}/{3} | Err: {4:.06f} | LR: {5:.06f} | Sigma: {6:.02f} | dataT: {7:.05f} | netT: {8:.05f} | ETA: {9:02d}:{10:02d}'.format(
opt.name, epoch, train_idx, len(train_data_loader), error.item(), lr, opt.sigma,
iter_start_time - iter_data_time,
iter_net_time - iter_start_time, int(eta // 60),
int(eta - 60 * (eta // 60))))
if train_idx % opt.freq_save == 0 and train_idx != 0:
torch.save(netG.state_dict(), '%s/%s/netG_latest' % (opt.checkpoints_path, opt.name))
torch.save(netG.state_dict(), '%s/%s/netG_epoch_%d' % (opt.checkpoints_path, opt.name, epoch))
if train_idx % opt.freq_save_ply == 0:
save_path = '%s/%s/pred.ply' % (opt.results_path, opt.name)
r = res[0].cpu()
points = sample_tensor[0].transpose(0, 1).cpu()
save_samples_truncted_prob(save_path, points.detach().numpy(), r.detach().numpy())
iter_data_time = time.time()
# update learning rate
lr = adjust_learning_rate(optimizerG, epoch, lr, opt.schedule, opt.gamma)
#### test
with torch.no_grad():
set_eval()
if not opt.no_num_eval:
test_losses = {}
print('calc error (test) ...')
test_errors = calc_error(opt, netG, cuda, test_dataset, 100)
print('eval test MSE: {0:06f} IOU: {1:06f} prec: {2:06f} recall: {3:06f}'.format(*test_errors))
MSE, IOU, prec, recall = test_errors
test_losses['MSE(test)'] = MSE
test_losses['IOU(test)'] = IOU
test_losses['prec(test)'] = prec
test_losses['recall(test)'] = recall
print('calc error (train) ...')
train_dataset.is_train = False
train_errors = calc_error(opt, netG, cuda, train_dataset, 100)
train_dataset.is_train = True
print('eval train MSE: {0:06f} IOU: {1:06f} prec: {2:06f} recall: {3:06f}'.format(*train_errors))
MSE, IOU, prec, recall = train_errors
test_losses['MSE(train)'] = MSE
test_losses['IOU(train)'] = IOU
test_losses['prec(train)'] = prec
test_losses['recall(train)'] = recall
# if not opt.no_gen_mesh:
# print('generate mesh (test) ...')
# for gen_idx in tqdm(range(opt.num_gen_mesh_test)):
# test_data = random.choice(test_dataset)
# save_path = '%s/%s/test_eval_epoch%d_%s.obj' % (
# opt.results_path, opt.name, epoch, test_data['name'])
# gen_mesh(opt, netG, cuda, test_data, save_path)
#
# print('generate mesh (train) ...')
# train_dataset.is_train = False
# for gen_idx in tqdm(range(opt.num_gen_mesh_test)):
# train_data = random.choice(train_dataset)
# save_path = '%s/%s/train_eval_epoch%d_%s.obj' % (
# opt.results_path, opt.name, epoch, train_data['name'])
# gen_mesh(opt, netG, cuda, train_data, save_path)
# train_dataset.is_train = True
if not opt.no_gen_mri:
print('generate mri (test) ...')
for gen_idx in tqdm(range(opt.num_gen_mesh_test)):
test_data = random.choice(test_dataset)
save_path = '%s/%s/test_eval_epoch%d_%s.obj' % (
opt.results_path, opt.name, epoch, test_data['name'])
gen_mri(opt, netG, cuda, test_data, save_path)
print('generate mri (train) ...')
train_dataset.is_train = False
for gen_idx in tqdm(range(opt.num_gen_mesh_test)):
train_data = random.choice(train_dataset)
save_path = '%s/%s/train_eval_epoch%d_%s.obj' % (
opt.results_path, opt.name, epoch, train_data['name'])
gen_mri(opt, netG, cuda, train_data, save_path)
train_dataset.is_train = True
if opt.eval_only:
break
if __name__ == '__main__':
train(opt) |
py | 1a47cbf568269631c3a68dff7c3bf85ee79d37c6 | """
Copyright [2009-2019] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from caching.base import CachingMixin, CachingManager
from django.db import models
from portal.models import RnaPrecomputed
class EnsemblCompara(CachingMixin, models.Model):
id = models.IntegerField(primary_key=True)
ensembl_transcript_id = models.TextField()
urs_taxid = models.ForeignKey(RnaPrecomputed, to_field='id', db_column='urs_taxid', on_delete=models.CASCADE)
homology_id = models.IntegerField()
objects = CachingManager()
class Meta:
db_table = 'ensembl_compara'
|
py | 1a47cf1fe075a1ebdb51ad10c45c0539082dc4ae | import rolls
class Atributes:
def __init__(self):
self.values = {
"strength": 10,
"dexterity": 10,
"constitution": 10,
"intelligence": 10,
"wisdom": 10,
"charisma": 10
}
self.modifiers = {
"strength": 0,
"dexterity": 0,
"constitution": 0,
"intelligence": 0,
"wisdom": 0,
"charisma": 0
}
self.maxValue = {
"strength": 20,
"dexterity": 20,
"constitution": 20,
"intelligence": 20,
"wisdom": 20,
"charisma": 20
}
def rollAtributes(self,min_val=1,max_val=6, maxAtributeValue=20, numberOfDice=4):
self.values = {
"strength": min(rolls.rollAtribute(min_val,max_val,numberOfDice),maxAtributeValue),
"dexterity": min(rolls.rollAtribute(min_val,max_val,numberOfDice),maxAtributeValue),
"constitution": min(rolls.rollAtribute(min_val,max_val,numberOfDice),maxAtributeValue),
"intelligence": min(rolls.rollAtribute(min_val,max_val,numberOfDice),maxAtributeValue),
"wisdom": min(rolls.rollAtribute(min_val,max_val,numberOfDice),maxAtributeValue),
"charisma": min(rolls.rollAtribute(min_val,max_val,numberOfDice),maxAtributeValue)
}
self.setModifiers()
def setAtribute(self,atribute,value):
try:
self.values[atribute] = value
self.setModifiers()
except:
print("bad selection")
def setAtributes(self,atributes):
if isinstance(atributes,dict):
# If you pass in atributes as a dictionary
for key in self.values.keys():
self.values[key] = atributes[key]
else:
# If you pass in atributes as a list
for k,key in enumerate(self.values.keys()):
self.values[key] = atributes[k]
self.setModifiers()
def setModifiers(self):
keys = self.modifiers.keys()
for key in keys:
self.modifiers[key] = (self.values[key] - 10)//2
def addClassMods(self, classMods):
for key in self.values.keys():
self.maxValue[key] = 20+classMods[key]
self.values[key] = min(self.maxValue[key], self.values[key]+classMods[key])
|
py | 1a47cf671cfa48b3b0ff9236f16be0fedadd2f3d | #!/usr/bin/env python
##
# Author: Yorick Peterse
# Website: http://www.yorickpeterse.com/
# Description: SpeedTouch Key is a Python script that generates (possible) keys
# for a SpeedTouch Wireless network that uses the default SSID/password
# combination based on the serial number.
#
# Imports
from hashlib import sha1
from multiprocessing import Process
import sys
import os
import itertools
# =============================================
# ============= 1: Initialization =============
# =============================================
# Fancy boot screen
print "==================================\n==== SpeedTouch Key Generator ====\n==================================\n"
# SSID part
ssid_part = raw_input("Enter the last 6 characters of the SSID: ")
ssid_part = ssid_part.upper().strip()
# Validate the SSID part
if len(ssid_part) <> 6:
print "ERROR: The specified part of the SSID is invalid, it should be exactly 6 characters long"
sys.exit()
else:
pass
# Required variables in order to generate the serial number
production_years = ['04', '05','06','07','08','09','10', '11']
production_weeks = range(1,53)
# Required in order to generate the unit number
chars_string = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
list_items = []
unit_numbers = []
# Convert the string to a list
for char in chars_string:
list_items.append(char)
# Dict containing the final results
processed_years = []
password_dict = {}
# Get all possible combinations
combinations = itertools.permutations(list_items,3)
# =============================================
# ========= 2: Combination generation =========
# =============================================
# Loop through each combination, convert it to a string and add it to the list
for combination in combinations:
# Convert to string and to hex
to_append = '%s%s%s' % (combination[0],combination[1],combination[2])
to_append = to_append.encode('hex')
# Append to the list, but only if it isn't already in there
unit_numbers.append(to_append)
# =============================================
# ============ 3: Main application ============
# =============================================
# Generator function
def generator(year,weeks,units):
# Loop through each week
for week in weeks:
# Loop through each possible unit number
for unit in units:
# Create the serial number
serial = 'CP%s%s%s' % (year,week,unit)
serial = serial.upper()
# Hash the serial using SHA-1
serial_hash = sha1(serial).hexdigest().upper()
# Get the last bit and compare it to the input, print the key if it matches
last_bit = serial_hash[-6:]
password = serial_hash[:10]
if last_bit == ssid_part:
# Add the password to the dictionary
print
print " * Possible password: %s" % (password)
print " Year: %s" % (year)
print " Week: %s" % (week)
print " Combo: %s" % (unit)
print " Serial: %s" % (serial)
sys.exit()
# Main part, this is where most of the work is done
# Loop through each year and create a new process
if __name__ == '__main__':
print "Generating possible passwords..."
for year in production_years:
p = Process(target=generator, args=(year,production_weeks,unit_numbers))
p.start()
|
py | 1a47cfd2c805016bfbe18624153075fc5c0cc25c | from .goosepaper import Goosepaper # noqa
__version__ = "0.5.0"
|
py | 1a47d0652aa8e14ac61346e6b52f6e841ee4dd89 | # -*- coding: utf-8 -*-
#/usr/bin/python2
'''
By kyubyong park. [email protected].
https://www.github.com/kyubyong/kss
'''
from __future__ import print_function, division
import numpy as np
import librosa
import os, copy
import matplotlib
matplotlib.use('pdf')
import matplotlib.pyplot as plt
from scipy import signal
from hyperparams import Hyperparams as hp
import tensorflow as tf
def get_spectrograms(fpath):
'''Parse the wave file in `fpath` and
Returns normalized melspectrogram and linear spectrogram.
Args:
fpath: A string. The full path of a sound file.
Returns:
mel: A 2d array of shape (T, n_mels) and dtype of float32.
mag: A 2d array of shape (T, 1+n_fft/2) and dtype of float32.
'''
# Loading sound file
y, sr = librosa.load(fpath, sr=hp.sr)
# Trimming
y, _ = librosa.effects.trim(y, top_db=40)
# Preemphasis
y = np.append(y[0], y[1:] - hp.preemphasis * y[:-1])
# stft
linear = librosa.stft(y=y,
n_fft=hp.n_fft,
hop_length=hp.hop_length,
win_length=hp.win_length)
# magnitude spectrogram
mag = np.abs(linear) # (1+n_fft//2, T)
# mel spectrogram
mel_basis = librosa.filters.mel(hp.sr, hp.n_fft, hp.n_mels) # (n_mels, 1+n_fft//2)
mel = np.dot(mel_basis, mag) # (n_mels, t)
# to decibel
mel = 20 * np.log10(np.maximum(1e-5, mel))
mag = 20 * np.log10(np.maximum(1e-5, mag))
# normalize
mel = np.clip((mel - hp.ref_db + hp.max_db) / hp.max_db, 1e-8, 1)
mag = np.clip((mag - hp.ref_db + hp.max_db) / hp.max_db, 1e-8, 1)
# Transpose
mel = mel.T.astype(np.float32) # (T, n_mels)
mag = mag.T.astype(np.float32) # (T, 1+n_fft//2)
return mel, mag
def spectrogram2wav(mag):
'''# Generate wave file from linear magnitude spectrogram
Args:
mag: A numpy array of (T, 1+n_fft//2)
Returns:
wav: A 1-D numpy array.
'''
# transpose
mag = mag.T
# de-noramlize
mag = (np.clip(mag, 0, 1) * hp.max_db) - hp.max_db + hp.ref_db
# to amplitude
mag = np.power(10.0, mag * 0.05)
# wav reconstruction
wav = griffin_lim(mag**hp.power)
# de-preemphasis
wav = signal.lfilter([1], [1, -hp.preemphasis], wav)
# trim
wav = trim(wav)
return wav.astype(np.float32)
def griffin_lim(spectrogram):
'''Applies Griffin-Lim's raw.'''
X_best = copy.deepcopy(spectrogram)
for i in range(hp.n_iter):
X_t = invert_spectrogram(X_best)
est = librosa.stft(X_t, hp.n_fft, hp.hop_length, win_length=hp.win_length)
phase = est / np.maximum(1e-8, np.abs(est))
X_best = spectrogram * phase
X_t = invert_spectrogram(X_best)
y = np.real(X_t)
return y
def invert_spectrogram(spectrogram):
'''Applies inverse fft.
Args:
spectrogram: [1+n_fft//2, t]
'''
return librosa.istft(spectrogram, hp.hop_length, win_length=hp.win_length, window="hann")
def plot_alignment(alignment, gs, dir=hp.logdir):
"""Plots the alignment.
Args:
alignment: A numpy array with shape of (encoder_steps, decoder_steps)
gs: (int) global step.
dir: Output path.
"""
if not os.path.exists(dir): os.mkdir(dir)
fig, ax = plt.subplots()
im = ax.imshow(alignment)
fig.colorbar(im)
plt.title('{} Steps'.format(gs))
plt.savefig('{}/alignment_{}.png'.format(dir, gs), format='png')
def guided_attention(g=0.2):
'''Guided attention. Refer to page 3 on the paper.'''
W = np.zeros((hp.max_N, hp.max_T), dtype=np.float32)
for n_pos in range(W.shape[0]):
for t_pos in range(W.shape[1]):
W[n_pos, t_pos] = 1 - np.exp(-(t_pos / float(hp.max_T) - n_pos / float(hp.max_N)) ** 2 / (2 * g * g))
return W
def learning_rate_decay(init_lr, global_step, warmup_steps = 4000.0):
'''Noam scheme from tensor2tensor'''
step = tf.to_float(global_step + 1)
return init_lr * warmup_steps**0.5 * tf.minimum(step * warmup_steps**-1.5, step**-0.5)
def load_spectrograms(fpath):
'''Read the wave file in `fpath`
and extracts spectrograms'''
fname = os.path.basename(fpath)
mel, mag = get_spectrograms(fpath)
t = mel.shape[0]
# Marginal padding for reduction shape sync.
num_paddings = hp.r - (t % hp.r) if t % hp.r != 0 else 0
mel = np.pad(mel, [[0, num_paddings], [0, 0]], mode="constant")
mag = np.pad(mag, [[0, num_paddings], [0, 0]], mode="constant")
# Reduction
mel = mel[::hp.r, :]
return fname, mel, mag
#This is adapted by
# https://github.com/keithito/tacotron/blob/master/util/audio.py#L55-62
def trim(wav, top_db=40, min_silence_sec=0.8):
frame_length = int(hp.sr * min_silence_sec)
hop_length = int(frame_length / 4)
endpoint = librosa.effects.split(wav, frame_length=frame_length,
hop_length=hop_length,
top_db=top_db)[0, 1]
return wav[:endpoint]
def load_j2hcj():
'''
Arg:
jamo: A Hangul Jamo character(0x01100-0x011FF)
Returns:
A dictionary that converts jamo into Hangul Compatibility Jamo(0x03130 - 0x0318F) Character
'''
jamo = u'''␀␃ !,.?ᄀᄁᄂᄃᄄᄅᄆᄇᄈᄉᄊᄋᄌᄍᄎᄏᄐᄑ하ᅢᅣᅤᅥᅦᅧᅨᅩᅪᅫᅬᅭᅮᅯᅰᅱᅲᅳᅴᅵᆨᆩᆪᆫᆬᆭᆮᆯᆰᆱᆲᆴᆶᆷᆸᆹᆺᆻᆼᆽᆾᆿᇀᇁᇂ'''
hcj = u'''␀␃ !,.?ㄱㄲㄴㄷㄸㄹㅁㅂㅃㅅㅆㅇㅈㅉㅊㅋㅌㅍㅎㅏㅐㅑㅒㅓㅔㅕㅖㅗㅘㅙㅚㅛㅜㅝㅞㅟㅠㅡㅢㅣㄱㄲㄳㄴㄵㄶㄷㄹㄺㄻㄼㄾㅀㅁㅂㅄㅅㅆㅇㅈㅊㅋㅌㅍㅎ'''
assert len(jamo) == len(hcj)
j2hcj = {j: h for j, h in zip(jamo, hcj)}
return j2hcj
def load_j2sj():
'''
Arg:
jamo: A Hangul Jamo character(0x01100-0x011FF)
Returns:
A dictionary that decomposes double consonants into two single consonants.
'''
jamo = u'''␀␃ !,.?ᄀᄁᄂᄃᄄᄅᄆᄇᄈᄉᄊᄋᄌᄍᄎᄏᄐᄑ하ᅢᅣᅤᅥᅦᅧᅨᅩᅪᅫᅬᅭᅮᅯᅰᅱᅲᅳᅴᅵᆨᆩᆪᆫᆬᆭᆮᆯᆰᆱᆲᆴᆶᆷᆸᆹᆺᆻᆼᆽᆾᆿᇀᇁᇂ'''
sj = u'''␀|␃| |!|,|.|?|ᄀ|ᄀᄀ|ᄂ|ᄃ|ᄃᄃ|ᄅ|ᄆ|ᄇ|ᄇᄇ|ᄉ|ᄉᄉ|ᄋ|ᄌ|ᄌᄌ|ᄎ|ᄏ|ᄐ|ᄑ|ᄒ|ᅡ|ᅢ|ᅣ|ᅤ|ᅥ|ᅦ|ᅧ|ᅨ|ᅩ|ᅪ|ᅫ|ᅬ|ᅭ|ᅮ|ᅯ|ᅰ|ᅱ|ᅲ|ᅳ|ᅴ|ᅵ|ᆨ|ᆨᆨ|ᆨᆺ|ᆫ|ᆫᆽ|ᆫᇂ|ᆮ|ᆯ|ᆯᆨ|ᆯᆷ|ᆯᆸ|ᆯᇀ|ᆯᇂ|ᆷ|ᆸ|ᆸᆺ|ᆺ|ᆺᆺ|ᆼ|ᆽ|ᆾ|ᆿ|ᇀ|ᇁ|ᇂ'''
assert len(jamo)==len(sj.split("|"))
j2sj = {j: s for j, s in zip(jamo, sj.split("|"))}
return j2sj
def load_j2shcj():
'''
Arg:
jamo: A Hangul Jamo character(0x01100-0x011FF)
Returns:
A dictionary that converts jamo into Hangul Compatibility Jamo(0x03130 - 0x0318F) Character.
Double consonants are further decomposed into single consonants.
'''
jamo = u'''␀␃ !,.?ᄀᄁᄂᄃᄄᄅᄆᄇᄈᄉᄊᄋᄌᄍᄎᄏᄐᄑ하ᅢᅣᅤᅥᅦᅧᅨᅩᅪᅫᅬᅭᅮᅯᅰᅱᅲᅳᅴᅵᆨᆩᆪᆫᆬᆭᆮᆯᆰᆱᆲᆴᆶᆷᆸᆹᆺᆻᆼᆽᆾᆿᇀᇁᇂ'''
shcj = u'''␀|␃| |!|,|.|?|ㄱ|ㄱㄱ|ㄴ|ㄷ|ㄷㄷ|ㄹ|ㅁ|ㅂ|ㅂㅂ|ㅅ|ㅅㅅ|ㅇ|ㅈ|ㅈㅈ|ㅊ|ㅋ|ㅌ|ㅍ|ㅎ|ㅏ|ㅐ|ㅑ|ㅒ|ㅓ|ㅔ|ㅕ|ㅖ|ㅗ|ㅘ|ㅙ|ㅚ|ㅛ|ㅜ|ㅝ|ㅞ|ㅟ|ㅠ|ㅡ|ㅢ|ㅣ|ㄱ|ㄱㄱ|ㄱㅅ|ㄴ|ㄴㅈ|ㄴㅎ|ㄷ|ㄹ|ㄹㄱ|ㄹㅁ|ㄹㅂ|ㄹㅌ|ㄹㅎ|ㅁ|ㅂ|ㅂㅅ|ㅅ|ㅅㅅ|ㅇ|ㅈ|ㅊ|ㅋ|ㅌ|ㅍ|ㅎ'''
assert len(jamo)==len(shcj.split("|"))
j2shcj = {j: s for j, s in zip(jamo, shcj.split("|"))}
return j2shcj
|
py | 1a47d29101c20ffbdaf69e2acd45c039bebc035a | #
# Copyright (c) 2018 TECHNICAL UNIVERSITY OF MUNICH, DEPARTMENT OF MECHANICAL ENGINEERING, CHAIR OF APPLIED MECHANICS,
# BOLTZMANNSTRASSE 15, 85748 GARCHING/MUNICH, GERMANY, [email protected].
#
# Distributed under 3-Clause BSD license. See LICENSE file for more information.
#
"""
Tools for assembly module.
"""
__all__ = [
'get_index_of_csr_data',
'fill_csr_matrix'
]
# try to import the fortran routines
use_fortran = False
try:
import amfe.f90_assembly
use_fortran = True
except ImportError:
print('Python was not able to load the fast fortran assembly routines.')
def get_index_of_csr_data(i, j, indptr, indices):
"""
Get the value index of the i,j-element of a matrix in CSR format.
Parameters
----------
i : int
row index which is asked to get the CSR-index for
j : int
column index which is asked to get the CSR-index for
indptr : ndarray
index-ptr-Array of the CSR-Matrix.
indices : ndarray
indices array of CSR-matrix (represents the nonzero column indices)
Returns
-------
k : int
index of the value array of the CSR-matrix, in which value [i,j] is stored.
Notes
-----
This routine works only, if the tuple i,j is acutally a real entry of the matrix. Otherwise the value k=0 will be
returned and an Error Message will be provided.
"""
# indices for row i are stored in indices[indptr[k]:indptr[k+1]]; thus the indptr marks the start and end of the
# part of the indices and val vector where all entries of a row are stored
# set k to the start of data of row k
k = indptr[i]
# search for appearance of j in the nonzero column indices which are stored in indices[k] till
# indices[k+indptr[i+1]]
while j != indices[k]:
# while column j not found search for j in next entry
k += 1
# Check if next search would be in next (wrong) row
if k > indptr[i + 1]:
print('ERROR! The index in the csr matrix is not preallocated!')
k = 0
break
return k
def fill_csr_matrix(indptr, indices, vals, K, k_indices):
"""
Fill the values of K into the vals-array of a sparse CSR Matrix given the k_indices array. The values of K are
added to the current values (typically for assembly processes)
Parameters
----------
indptr : ndarray
indptr-array of a preallocated CSR-Matrix
indices : ndarray
indices-array of a preallocated CSR-Matrix
vals : ndarray
vals-array of a preallocated CSR-Marix
K : ndarray
'small' square array whose values will be distributed into the
CSR-Matrix, Shape is (n,n)
k_indices : ndarray
mapping array of the global indices for the 'small' K array.
The (i,j) entry of K has the global indices (k_indices[i], k_indices[j])
Shape is (n,)
Returns
-------
None
"""
ndof_l = K.shape[0]
for i in range(ndof_l):
for j in range(ndof_l):
l = get_index_of_csr_data(k_indices[i], k_indices[j], indptr, indices)
vals[l] += K[i, j]
return
if use_fortran:
###########################################################################
# Fortran routine that will override the functions above for massive speedup.
###########################################################################
get_index_of_csr_data = amfe.f90_assembly.get_index_of_csr_data
fill_csr_matrix = amfe.f90_assembly.fill_csr_matrix
|
py | 1a47d35a76bc4b08bd13fdc99fe44334614b43f0 | #!/usr/bin/env python
import requests
from dnacentersdk import DNACenterAPI
from pprint import pprint
dnac_url = "https://sandboxdnac2.cisco.com"
dnac_username = "devnetuser"
dnac_password = "Cisco123!"
if (__name__ == "__main__"):
dnac = DNACenterAPI(username=dnac_username, password=dnac_password, base_url=dnac_url)
dnac_devices = dnac.devices.get_device_list()
headers = ["Hostname","IP","Family"]
header_format = "{:<25}{:<15}{:<15}"
print(header_format.format(*headers))
for dnac_device in dnac_devices['response']:
dnac_device_details = [
dnac_device['hostname'] or "N/A",
dnac_device['managementIpAddress'] or "N/A",
dnac_device['family'] or "N/A"
]
print(header_format.format(*dnac_device_details))
|
py | 1a47d377ae8e8367381ca425e257193efb3d4f3e | # Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib import exceptions
from highlanderclient.tests.functional.cli.v1 import base_v1
class StandardItemsAvailabilityCLITests(base_v1.HighlanderClientTestBase):
def test_std_workflows_availability(self):
wfs = self.highlander_admin("workflow-list")
self.assertTableStruct(
wfs,
["Name", "Tags", "Input", "Created at", "Updated at"]
)
self.assertIn("std.create_instance",
[workflow["Name"] for workflow in wfs])
wfs = self.highlander_alt_user("workflow-list")
self.assertTableStruct(
wfs,
["Name", "Tags", "Input", "Created at", "Updated at"]
)
self.assertIn("std.create_instance",
[workflow["Name"] for workflow in wfs])
def test_std_actions_availability(self):
acts = self.highlander_admin("action-list")
self.assertTableStruct(
acts,
["Name", "Is system", "Input", "Description",
"Tags", "Created at", "Updated at"]
)
self.assertIn("glance.images_list",
[action["Name"] for action in acts])
acts = self.highlander_alt_user("action-list")
self.assertTableStruct(
acts,
["Name", "Is system", "Input", "Description",
"Tags", "Created at", "Updated at"]
)
self.assertIn("glance.images_list",
[action["Name"] for action in acts])
class WorkbookIsolationCLITests(base_v1.HighlanderClientTestBase):
def test_workbook_name_uniqueness(self):
self.workbook_create(self.wb_def)
self.assertRaises(
exceptions.CommandFailed,
self.highlander_admin,
"workbook-create",
params="{0}".format(self.wb_def)
)
self.workbook_create(self.wb_def, admin=False)
self.assertRaises(
exceptions.CommandFailed,
self.highlander_alt_user,
"workbook-create",
params="{0}".format(self.wb_def)
)
def test_wb_isolation(self):
wb = self.workbook_create(self.wb_def)
wb_name = self.get_value_of_field(wb, "Name")
wbs = self.highlander_admin("workbook-list")
self.assertIn(wb_name, [w["Name"] for w in wbs])
alt_wbs = self.highlander_alt_user("workbook-list")
self.assertNotIn(wb_name, [w["Name"] for w in alt_wbs])
def test_get_wb_from_another_tenant(self):
wb = self.workbook_create(self.wb_def)
name = self.get_value_of_field(wb, "Name")
self.assertRaises(
exceptions.CommandFailed,
self.highlander_alt_user,
"workbook-get",
params=name
)
def test_delete_wb_from_another_tenant(self):
wb = self.workbook_create(self.wb_def)
name = self.get_value_of_field(wb, "Name")
self.assertRaises(
exceptions.CommandFailed,
self.highlander_alt_user,
"workbook-delete",
params=name
)
class WorkflowIsolationCLITests(base_v1.HighlanderClientTestBase):
def test_workflow_name_uniqueness(self):
self.workflow_create(self.wf_def)
self.assertRaises(
exceptions.CommandFailed,
self.highlander_admin,
"workflow-create",
params="{0}".format(self.wf_def)
)
self.workflow_create(self.wf_def, admin=False)
self.assertRaises(
exceptions.CommandFailed,
self.highlander_alt_user,
"workflow-create",
params="{0}".format(self.wf_def)
)
def test_wf_isolation(self):
wf = self.workflow_create(self.wf_def)
wfs = self.highlander_admin("workflow-list")
self.assertIn(wf[0]["Name"], [w["Name"] for w in wfs])
alt_wfs = self.highlander_alt_user("workflow-list")
self.assertNotIn(wf[0]["Name"], [w["Name"] for w in alt_wfs])
def test_get_wf_from_another_tenant(self):
wf = self.workflow_create(self.wf_def)
self.assertRaises(
exceptions.CommandFailed,
self.highlander_alt_user,
"workflow-get",
params=wf[0]["Name"]
)
def test_delete_wf_from_another_tenant(self):
wf = self.workflow_create(self.wf_def)
self.assertRaises(
exceptions.CommandFailed,
self.highlander_alt_user,
"workflow-delete",
params=wf[0]["Name"]
)
class ActionIsolationCLITests(base_v1.HighlanderClientTestBase):
def test_actions_name_uniqueness(self):
self.action_create(self.act_def)
self.assertRaises(
exceptions.CommandFailed,
self.highlander_admin,
"action-create",
params="{0}".format(self.act_def)
)
self.action_create(self.act_def, admin=False)
self.assertRaises(
exceptions.CommandFailed,
self.highlander_alt_user,
"action-create",
params="{0}".format(self.act_def)
)
def test_action_isolation(self):
act = self.action_create(self.act_def)
acts = self.highlander_admin("action-list")
self.assertIn(act[0]["Name"], [a["Name"] for a in acts])
alt_acts = self.highlander_alt_user("action-list")
self.assertNotIn(act[0]["Name"], [a["Name"] for a in alt_acts])
def test_get_action_from_another_tenant(self):
act = self.action_create(self.act_def)
self.assertRaises(
exceptions.CommandFailed,
self.highlander_alt_user,
"action-get",
params=act[0]["Name"]
)
def test_delete_action_from_another_tenant(self):
act = self.action_create(self.act_def)
self.assertRaises(
exceptions.CommandFailed,
self.highlander_alt_user,
"action-delete",
params=act[0]["Name"]
)
class CronTriggerIsolationCLITests(base_v1.HighlanderClientTestBase):
def test_cron_trigger_name_uniqueness(self):
wf = self.workflow_create(self.wf_def)
self.cron_trigger_create(
"trigger", wf[0]["Name"], "{}", "5 * * * *")
self.assertRaises(
exceptions.CommandFailed,
self.cron_trigger_create,
"trigger",
"5 * * * *",
wf[0]["Name"],
"{}"
)
wf = self.workflow_create(self.wf_def, admin=False)
self.cron_trigger_create("trigger", wf[0]["Name"], "{}", "5 * * * *",
None, None, admin=False)
self.assertRaises(
exceptions.CommandFailed,
self.cron_trigger_create,
"trigger", wf[0]["Name"], "{}", "5 * * * *",
None, None, admin=False
)
def test_cron_trigger_isolation(self):
wf = self.workflow_create(self.wf_def)
self.cron_trigger_create(
"trigger", wf[0]["Name"], "{}", "5 * * * *")
alt_trs = self.highlander_alt_user("cron-trigger-list")
self.assertNotIn("trigger", [t["Name"] for t in alt_trs])
class ExecutionIsolationCLITests(base_v1.HighlanderClientTestBase):
def test_execution_isolation(self):
wf = self.workflow_create(self.wf_def)
ex = self.execution_create(wf[0]["Name"])
exec_id = self.get_value_of_field(ex, "ID")
execs = self.highlander_admin("execution-list")
self.assertIn(exec_id, [e["ID"] for e in execs])
alt_execs = self.highlander_alt_user("execution-list")
self.assertNotIn(exec_id, [e["ID"] for e in alt_execs])
def test_get_execution_from_another_tenant(self):
wf = self.workflow_create(self.wf_def)
ex = self.execution_create(wf[0]["Name"])
exec_id = self.get_value_of_field(ex, "ID")
self.assertRaises(
exceptions.CommandFailed,
self.highlander_alt_user,
"execution-get",
params=exec_id
)
class EnvironmentIsolationCLITests(base_v1.HighlanderClientTestBase):
def setUp(self):
super(EnvironmentIsolationCLITests, self).setUp()
self.env_file = "env.yaml"
self.create_file("{0}".format(self.env_file),
"name: env\n"
"description: Test env\n"
"variables:\n"
" var: value")
def test_environment_name_uniqueness(self):
self.environment_create(self.env_file)
self.assertRaises(
exceptions.CommandFailed,
self.highlander_admin,
"environment-create",
params=self.env_file
)
self.environment_create(self.env_file, admin=False)
self.assertRaises(
exceptions.CommandFailed,
self.highlander_alt_user,
"environment-create",
params=self.env_file
)
def test_environment_isolation(self):
env = self.environment_create(self.env_file)
env_name = self.get_value_of_field(env, "Name")
envs = self.highlander_admin("environment-list")
self.assertIn(env_name, [en["Name"] for en in envs])
alt_envs = self.highlander_alt_user("environment-list")
self.assertNotIn(env_name, [en["Name"] for en in alt_envs])
def test_get_env_from_another_tenant(self):
env = self.environment_create(self.env_file)
env_name = self.get_value_of_field(env, "Name")
self.assertRaises(
exceptions.CommandFailed,
self.highlander_alt_user,
"environment-get",
params=env_name
)
def test_delete_env_from_another_tenant(self):
env = self.environment_create(self.env_file)
env_name = self.get_value_of_field(env, "Name")
self.assertRaises(
exceptions.CommandFailed,
self.highlander_alt_user,
"environment-delete",
params=env_name
)
class ActionExecutionIsolationCLITests(base_v1.HighlanderClientTestBase):
def test_action_execution_isolation(self):
wf = self.workflow_create(self.wf_def)
wf_exec = self.execution_create(wf[0]["Name"])
direct_ex_id = self.get_value_of_field(wf_exec, 'ID')
self.wait_execution_success(direct_ex_id)
act_execs = self.highlander_admin("action-execution-list")
self.assertIn(wf[0]["Name"],
[act["Workflow name"] for act in act_execs])
alt_act_execs = self.highlander_alt_user("action-execution-list")
self.assertNotIn(wf[0]["Name"],
[act["Workflow name"] for act in alt_act_execs])
def test_get_action_execution_from_another_tenant(self):
wf = self.workflow_create(self.wf_def)
ex = self.execution_create(wf[0]["Name"])
exec_id = self.get_value_of_field(ex, "ID")
self.assertRaises(
exceptions.CommandFailed,
self.highlander_alt_user,
"action-execution-get",
params=exec_id
)
|
py | 1a47d42e4c893c87a5f2196cd35e569fb0b9c9bb | import sys
import ASAPPpy.feature_extraction as fe
import ASAPPpy.chatbot as cht
from importlib import reload
word2vec_model = None
fasttext_model = None
ptlkb64_model = None
glove300_model = None
numberbatch_model = None
if __name__ == "__main__":
models_loaded = 0
while True:
if models_loaded == 0:
word2vec_model, fasttext_model, ptlkb64_model, glove300_model, numberbatch_model = fe.load_embeddings_models()
models_loaded = 1
cht.chatbot(word2vec_model=word2vec_model, fasttext_model=fasttext_model, ptlkb64_model=ptlkb64_model, glove300_model=glove300_model, numberbatch_model=numberbatch_model)
print("Press enter to re-run the script, CTRL-C to exit")
sys.stdin.readline()
reload(cht)
|
py | 1a47d434716fed7c98a58fd7625f833f83eb414e | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class CreateJobExecutionPlanFolderRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Emr', '2016-04-08', 'CreateJobExecutionPlanFolder')
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_Name(self):
return self.get_query_params().get('Name')
def set_Name(self,Name):
self.add_query_param('Name',Name)
def get_ParentId(self):
return self.get_query_params().get('ParentId')
def set_ParentId(self,ParentId):
self.add_query_param('ParentId',ParentId) |
py | 1a47d467c97accde09c7d91fc3997a0d60af8479 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Company.logo'
db.add_column('companies_company', 'logo',
self.gf('django.db.models.fields.files.ImageField')(max_length=100, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Company.logo'
db.delete_column('companies_company', 'logo')
models = {
'companies.company': {
'Meta': {'object_name': 'Company'},
'_about_rendered': ('django.db.models.fields.TextField', [], {}),
'about': ('markupfield.fields.MarkupField', [], {'rendered_field': 'True', 'blank': 'True'}),
'about_markup_type': ('django.db.models.fields.CharField', [], {'default': "'restructuredtext'", 'max_length': '30', 'blank': 'True'}),
'contact': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['companies'] |
py | 1a47d489e9401559e508f004639a94ea2ae80165 | """ Module to run the example files and report their success/failure results
Add a function to the ExampleTest class corresponding to an example script to
be tested.
This is done till better strategy for parallel testing is implemented
"""
from pytest import mark
from .example_test_case import ExampleTestCase, get_example_script
from pysph.base.nnps import get_number_of_threads
@mark.skipif(get_number_of_threads() == 1, reason= "N_threads=1; OpenMP does not seem available.")
class TestOpenMPExamples(ExampleTestCase):
@mark.slow
def test_3Ddam_break_example(self):
dt = 2e-5; tf = 13*dt
serial_kwargs = dict(
timestep=dt, tf=tf, pfreq=100, test=None
)
extra_parallel_kwargs = dict(openmp=None)
# Note that we set nprocs=1 here since we do not want
# to run this with mpirun.
self.run_example(
get_example_script('sphysics/dambreak_sphysics.py'),
nprocs=1, atol=1e-14,
serial_kwargs=serial_kwargs,
extra_parallel_kwargs=extra_parallel_kwargs
)
@mark.slow
def test_elliptical_drop_example(self):
tf = 0.0076*0.25
serial_kwargs = dict(kernel='CubicSpline', tf=tf)
extra_parallel_kwargs = dict(openmp=None)
# Note that we set nprocs=1 here since we do not want
# to run this with mpirun.
self.run_example(
'elliptical_drop.py', nprocs=1, atol=1e-14,
serial_kwargs=serial_kwargs,
extra_parallel_kwargs=extra_parallel_kwargs
)
def test_ldcavity_example(self):
dt=1e-4; tf=200*dt
serial_kwargs = dict(timestep=dt, tf=tf, pfreq=500)
extra_parallel_kwargs = dict(openmp=None)
# Note that we set nprocs=1 here since we do not want
# to run this with mpirun.
self.run_example(
'cavity.py', nprocs=1, atol=1e-14,
serial_kwargs=serial_kwargs,
extra_parallel_kwargs=extra_parallel_kwargs
)
|
py | 1a47d4d70a0b19d920a420542fbfe91a67ba220e | # coding: utf-8
from __future__ import unicode_literals
from ..utils import month_by_name
from .common import InfoExtractor
class FranceInterIE(InfoExtractor):
_VALID_URL = r"https?://(?:www\.)?franceinter\.fr/emissions/(?P<id>[^?#]+)"
_TEST = {
"url": "https://www.franceinter.fr/emissions/affaires-sensibles/affaires-sensibles-07-septembre-2016",
"md5": "9e54d7bdb6fdc02a841007f8a975c094",
"info_dict": {
"id": "affaires-sensibles/affaires-sensibles-07-septembre-2016",
"ext": "mp3",
"title": "Affaire Cahuzac : le contentieux du compte en Suisse",
"description": "md5:401969c5d318c061f86bda1fa359292b",
"thumbnail": r"re:^https?://.*\.jpg",
"upload_date": "20160907",
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_url = self._search_regex(
r'(?s)<div[^>]+class=["\']page-diffusion["\'][^>]*>.*?<button[^>]+data-url=(["\'])(?P<url>(?:(?!\1).)+)\1',
webpage,
"video url",
group="url",
)
title = self._og_search_title(webpage)
description = self._og_search_description(webpage)
thumbnail = self._html_search_meta(["og:image", "twitter:image"], webpage)
upload_date_str = self._search_regex(
r'class=["\']\s*cover-emission-period\s*["\'][^>]*>[^<]+\s+(\d{1,2}\s+[^\s]+\s+\d{4})<',
webpage,
"upload date",
fatal=False,
)
if upload_date_str:
upload_date_list = upload_date_str.split()
upload_date_list.reverse()
upload_date_list[1] = "%02d" % (
month_by_name(upload_date_list[1], lang="fr") or 0
)
upload_date_list[2] = "%02d" % int(upload_date_list[2])
upload_date = "".join(upload_date_list)
else:
upload_date = None
return {
"id": video_id,
"title": title,
"description": description,
"thumbnail": thumbnail,
"upload_date": upload_date,
"formats": [
{
"url": video_url,
"vcodec": "none",
}
],
}
|
py | 1a47d5a854af857972f07497864c08dce7a76915 | """ Tensorflow implementation of the face detection / alignment algorithm found at
https://github.com/kpzhang93/MTCNN_face_detection_alignment
"""
# MIT License
#
# Copyright (c) 2016 David Sandberg
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six import string_types, iteritems
import numpy as np
import tensorflow as tf
#from math import floor
import cv2
import os
def layer(op):
'''Decorator for composable network layers.'''
def layer_decorated(self, *args, **kwargs):
# Automatically set a name if not provided.
name = kwargs.setdefault('name', self.get_unique_name(op.__name__))
# Figure out the layer inputs.
if len(self.terminals) == 0:
raise RuntimeError('No input variables found for layer %s.' % name)
elif len(self.terminals) == 1:
layer_input = self.terminals[0]
else:
layer_input = list(self.terminals)
# Perform the operation and get the output.
layer_output = op(self, layer_input, *args, **kwargs)
# Add to layer LUT.
self.layers[name] = layer_output
# This output is now the input for the next layer.
self.feed(layer_output)
# Return self for chained calls.
return self
return layer_decorated
class Network(object):
def __init__(self, inputs, trainable=True):
# The input nodes for this network
self.inputs = inputs
# The current list of terminal nodes
self.terminals = []
# Mapping from layer names to layers
self.layers = dict(inputs)
# If true, the resulting variables are set as trainable
self.trainable = trainable
self.setup()
def setup(self):
'''Construct the network. '''
raise NotImplementedError('Must be implemented by the subclass.')
def load(self, data_path, session, ignore_missing=False):
'''Load network weights.
data_path: The path to the numpy-serialized network weights
session: The current TensorFlow session
ignore_missing: If true, serialized weights for missing layers are ignored.
'''
data_dict = np.load(data_path, encoding='latin1').item() #pylint: disable=no-member
for op_name in data_dict:
with tf.variable_scope(op_name, reuse=True):
for param_name, data in iteritems(data_dict[op_name]):
try:
var = tf.get_variable(param_name)
session.run(var.assign(data))
except ValueError:
if not ignore_missing:
raise
def feed(self, *args):
'''Set the input(s) for the next operation by replacing the terminal nodes.
The arguments can be either layer names or the actual layers.
'''
assert len(args) != 0
self.terminals = []
for fed_layer in args:
if isinstance(fed_layer, string_types):
try:
fed_layer = self.layers[fed_layer]
except KeyError:
raise KeyError('Unknown layer name fed: %s' % fed_layer)
self.terminals.append(fed_layer)
return self
def get_output(self):
'''Returns the current network output.'''
return self.terminals[-1]
def get_unique_name(self, prefix):
'''Returns an index-suffixed unique name for the given prefix.
This is used for auto-generating layer names based on the type-prefix.
'''
ident = sum(t.startswith(prefix) for t, _ in self.layers.items()) + 1
return '%s_%d' % (prefix, ident)
def make_var(self, name, shape):
'''Creates a new TensorFlow variable.'''
return tf.get_variable(name, shape, trainable=self.trainable)
def validate_padding(self, padding):
'''Verifies that the padding is one of the supported ones.'''
assert padding in ('SAME', 'VALID')
@layer
def conv(self,
inp,
k_h,
k_w,
c_o,
s_h,
s_w,
name,
relu=True,
padding='SAME',
group=1,
biased=True):
# Verify that the padding is acceptable
self.validate_padding(padding)
# Get the number of channels in the input
c_i = int(inp.get_shape()[-1])
# Verify that the grouping parameter is valid
assert c_i % group == 0
assert c_o % group == 0
# Convolution for a given input and kernel
convolve = lambda i, k: tf.nn.conv2d(i, k, [1, s_h, s_w, 1], padding=padding)
with tf.variable_scope(name) as scope:
kernel = self.make_var('weights', shape=[k_h, k_w, c_i // group, c_o])
# This is the common-case. Convolve the input without any further complications.
output = convolve(inp, kernel)
# Add the biases
if biased:
biases = self.make_var('biases', [c_o])
output = tf.nn.bias_add(output, biases)
if relu:
# ReLU non-linearity
output = tf.nn.relu(output, name=scope.name)
return output
@layer
def prelu(self, inp, name):
with tf.variable_scope(name):
i = int(inp.get_shape()[-1])
alpha = self.make_var('alpha', shape=(i,))
output = tf.nn.relu(inp) + tf.multiply(alpha, -tf.nn.relu(-inp))
return output
@layer
def max_pool(self, inp, k_h, k_w, s_h, s_w, name, padding='SAME'):
self.validate_padding(padding)
return tf.nn.max_pool(inp,
ksize=[1, k_h, k_w, 1],
strides=[1, s_h, s_w, 1],
padding=padding,
name=name)
@layer
def fc(self, inp, num_out, name, relu=True):
with tf.variable_scope(name):
input_shape = inp.get_shape()
if input_shape.ndims == 4:
# The input is spatial. Vectorize it first.
dim = 1
for d in input_shape[1:].as_list():
dim *= int(d)
feed_in = tf.reshape(inp, [-1, dim])
else:
feed_in, dim = (inp, input_shape[-1].value)
weights = self.make_var('weights', shape=[dim, num_out])
biases = self.make_var('biases', [num_out])
op = tf.nn.relu_layer if relu else tf.nn.xw_plus_b
fc = op(feed_in, weights, biases, name=name)
return fc
"""
Multi dimensional softmax,
refer to https://github.com/tensorflow/tensorflow/issues/210
compute softmax along the dimension of target
the native softmax only supports batch_size x dimension
"""
@layer
def softmax(self, target, axis, name=None):
max_axis = tf.reduce_max(target, axis, keep_dims=True)
target_exp = tf.exp(target-max_axis)
normalize = tf.reduce_sum(target_exp, axis, keep_dims=True)
softmax = tf.div(target_exp, normalize, name)
return softmax
class PNet(Network):
def setup(self):
(self.feed('data') #pylint: disable=no-value-for-parameter, no-member
.conv(3, 3, 10, 1, 1, padding='VALID', relu=False, name='conv1')
.prelu(name='PReLU1')
.max_pool(2, 2, 2, 2, name='pool1')
.conv(3, 3, 16, 1, 1, padding='VALID', relu=False, name='conv2')
.prelu(name='PReLU2')
.conv(3, 3, 32, 1, 1, padding='VALID', relu=False, name='conv3')
.prelu(name='PReLU3')
.conv(1, 1, 2, 1, 1, relu=False, name='conv4-1')
.softmax(3,name='prob1'))
(self.feed('PReLU3') #pylint: disable=no-value-for-parameter
.conv(1, 1, 4, 1, 1, relu=False, name='conv4-2'))
class RNet(Network):
def setup(self):
(self.feed('data') #pylint: disable=no-value-for-parameter, no-member
.conv(3, 3, 28, 1, 1, padding='VALID', relu=False, name='conv1')
.prelu(name='prelu1')
.max_pool(3, 3, 2, 2, name='pool1')
.conv(3, 3, 48, 1, 1, padding='VALID', relu=False, name='conv2')
.prelu(name='prelu2')
.max_pool(3, 3, 2, 2, padding='VALID', name='pool2')
.conv(2, 2, 64, 1, 1, padding='VALID', relu=False, name='conv3')
.prelu(name='prelu3')
.fc(128, relu=False, name='conv4')
.prelu(name='prelu4')
.fc(2, relu=False, name='conv5-1')
.softmax(1,name='prob1'))
(self.feed('prelu4') #pylint: disable=no-value-for-parameter
.fc(4, relu=False, name='conv5-2'))
class ONet(Network):
def setup(self):
(self.feed('data') #pylint: disable=no-value-for-parameter, no-member
.conv(3, 3, 32, 1, 1, padding='VALID', relu=False, name='conv1')
.prelu(name='prelu1')
.max_pool(3, 3, 2, 2, name='pool1')
.conv(3, 3, 64, 1, 1, padding='VALID', relu=False, name='conv2')
.prelu(name='prelu2')
.max_pool(3, 3, 2, 2, padding='VALID', name='pool2')
.conv(3, 3, 64, 1, 1, padding='VALID', relu=False, name='conv3')
.prelu(name='prelu3')
.max_pool(2, 2, 2, 2, name='pool3')
.conv(2, 2, 128, 1, 1, padding='VALID', relu=False, name='conv4')
.prelu(name='prelu4')
.fc(256, relu=False, name='conv5')
.prelu(name='prelu5')
.fc(2, relu=False, name='conv6-1')
.softmax(1, name='prob1'))
(self.feed('prelu5') #pylint: disable=no-value-for-parameter
.fc(4, relu=False, name='conv6-2'))
(self.feed('prelu5') #pylint: disable=no-value-for-parameter
.fc(10, relu=False, name='conv6-3'))
def create_mtcnn(sess, model_path):
if not model_path:
model_path,_ = os.path.split(os.path.realpath(__file__))
with tf.variable_scope('pnet'):
data = tf.placeholder(tf.float32, (None,None,None,3), 'input')
pnet = PNet({'data':data})
pnet.load(os.path.join(model_path, 'det1.npy'), sess)
with tf.variable_scope('rnet'):
data = tf.placeholder(tf.float32, (None,24,24,3), 'input')
rnet = RNet({'data':data})
rnet.load(os.path.join(model_path, 'det2.npy'), sess)
with tf.variable_scope('onet'):
data = tf.placeholder(tf.float32, (None,48,48,3), 'input')
onet = ONet({'data':data})
onet.load(os.path.join(model_path, 'det3.npy'), sess)
pnet_fun = lambda img : sess.run(('pnet/conv4-2/BiasAdd:0', 'pnet/prob1:0'), feed_dict={'pnet/input:0':img})
rnet_fun = lambda img : sess.run(('rnet/conv5-2/conv5-2:0', 'rnet/prob1:0'), feed_dict={'rnet/input:0':img})
onet_fun = lambda img : sess.run(('onet/conv6-2/conv6-2:0', 'onet/conv6-3/conv6-3:0', 'onet/prob1:0'), feed_dict={'onet/input:0':img})
return pnet_fun, rnet_fun, onet_fun
def detect_face(img, minsize, pnet, rnet, onet, threshold, factor):
# im: input image
# minsize: minimum of faces' size
# pnet, rnet, onet: caffemodel
# threshold: threshold=[th1 th2 th3], th1-3 are three steps's threshold
# fastresize: resize img from last scale (using in high-resolution images) if fastresize==true
factor_count=0
total_boxes=np.empty((0,9))
points=np.empty(0)
h=img.shape[0]
w=img.shape[1]
minl=np.amin([h, w])
m=12.0/minsize
minl=minl*m
# creat scale pyramid
scales=[]
while minl>=12:
scales += [m*np.power(factor, factor_count)]
minl = minl*factor
factor_count += 1
# first stage
for j in range(len(scales)):
scale=scales[j]
hs=int(np.ceil(h*scale))
ws=int(np.ceil(w*scale))
im_data = imresample(img, (hs, ws))
im_data = (im_data-127.5)*0.0078125
img_x = np.expand_dims(im_data, 0)
img_y = np.transpose(img_x, (0,2,1,3))
out = pnet(img_y)
out0 = np.transpose(out[0], (0,2,1,3))
out1 = np.transpose(out[1], (0,2,1,3))
boxes, _ = generateBoundingBox(out1[0,:,:,1].copy(), out0[0,:,:,:].copy(), scale, threshold[0])
# inter-scale nms
pick = nms(boxes.copy(), 0.5, 'Union')
if boxes.size>0 and pick.size>0:
boxes = boxes[pick,:]
total_boxes = np.append(total_boxes, boxes, axis=0)
numbox = total_boxes.shape[0]
if numbox>0:
pick = nms(total_boxes.copy(), 0.7, 'Union')
total_boxes = total_boxes[pick,:]
regw = total_boxes[:,2]-total_boxes[:,0]
regh = total_boxes[:,3]-total_boxes[:,1]
qq1 = total_boxes[:,0]+total_boxes[:,5]*regw
qq2 = total_boxes[:,1]+total_boxes[:,6]*regh
qq3 = total_boxes[:,2]+total_boxes[:,7]*regw
qq4 = total_boxes[:,3]+total_boxes[:,8]*regh
total_boxes = np.transpose(np.vstack([qq1, qq2, qq3, qq4, total_boxes[:,4]]))
total_boxes = rerec(total_boxes.copy())
total_boxes[:,0:4] = np.fix(total_boxes[:,0:4]).astype(np.int32)
dy, edy, dx, edx, y, ey, x, ex, tmpw, tmph = pad(total_boxes.copy(), w, h)
numbox = total_boxes.shape[0]
if numbox>0:
# second stage
tempimg = np.zeros((24,24,3,numbox))
for k in range(0,numbox):
tmp = np.zeros((int(tmph[k]),int(tmpw[k]),3))
tmp[dy[k]-1:edy[k],dx[k]-1:edx[k],:] = img[y[k]-1:ey[k],x[k]-1:ex[k],:]
if tmp.shape[0]>0 and tmp.shape[1]>0 or tmp.shape[0]==0 and tmp.shape[1]==0:
tempimg[:,:,:,k] = imresample(tmp, (24, 24))
else:
return np.empty()
tempimg = (tempimg-127.5)*0.0078125
tempimg1 = np.transpose(tempimg, (3,1,0,2))
out = rnet(tempimg1)
out0 = np.transpose(out[0])
out1 = np.transpose(out[1])
score = out1[1,:]
ipass = np.where(score>threshold[1])
total_boxes = np.hstack([total_boxes[ipass[0],0:4].copy(), np.expand_dims(score[ipass].copy(),1)])
mv = out0[:,ipass[0]]
if total_boxes.shape[0]>0:
pick = nms(total_boxes, 0.7, 'Union')
total_boxes = total_boxes[pick,:]
total_boxes = bbreg(total_boxes.copy(), np.transpose(mv[:,pick]))
total_boxes = rerec(total_boxes.copy())
numbox = total_boxes.shape[0]
if numbox>0:
# third stage
total_boxes = np.fix(total_boxes).astype(np.int32)
dy, edy, dx, edx, y, ey, x, ex, tmpw, tmph = pad(total_boxes.copy(), w, h)
tempimg = np.zeros((48,48,3,numbox))
for k in range(0,numbox):
tmp = np.zeros((int(tmph[k]),int(tmpw[k]),3))
tmp[dy[k]-1:edy[k],dx[k]-1:edx[k],:] = img[y[k]-1:ey[k],x[k]-1:ex[k],:]
if tmp.shape[0]>0 and tmp.shape[1]>0 or tmp.shape[0]==0 and tmp.shape[1]==0:
tempimg[:,:,:,k] = imresample(tmp, (48, 48))
else:
return np.empty()
tempimg = (tempimg-127.5)*0.0078125
tempimg1 = np.transpose(tempimg, (3,1,0,2))
out = onet(tempimg1)
out0 = np.transpose(out[0])
out1 = np.transpose(out[1])
out2 = np.transpose(out[2])
score = out2[1,:]
points = out1
ipass = np.where(score>threshold[2])
points = points[:,ipass[0]]
total_boxes = np.hstack([total_boxes[ipass[0],0:4].copy(), np.expand_dims(score[ipass].copy(),1)])
mv = out0[:,ipass[0]]
w = total_boxes[:,2]-total_boxes[:,0]+1
h = total_boxes[:,3]-total_boxes[:,1]+1
points[0:5,:] = np.tile(w,(5, 1))*points[0:5,:] + np.tile(total_boxes[:,0],(5, 1))-1
points[5:10,:] = np.tile(h,(5, 1))*points[5:10,:] + np.tile(total_boxes[:,1],(5, 1))-1
if total_boxes.shape[0]>0:
total_boxes = bbreg(total_boxes.copy(), np.transpose(mv))
pick = nms(total_boxes.copy(), 0.7, 'Min')
total_boxes = total_boxes[pick,:]
points = points[:,pick]
return total_boxes, points
def bulk_detect_face(images, detection_window_size_ratio, pnet, rnet, onet, threshold, factor):
# im: input image
# minsize: minimum of faces' size
# pnet, rnet, onet: caffemodel
# threshold: threshold=[th1 th2 th3], th1-3 are three steps's threshold [0-1]
all_scales = [None] * len(images)
images_with_boxes = [None] * len(images)
for i in range(len(images)):
images_with_boxes[i] = {'total_boxes': np.empty((0, 9))}
# create scale pyramid
for index, img in enumerate(images):
all_scales[index] = []
h = img.shape[0]
w = img.shape[1]
minsize = int(detection_window_size_ratio * np.minimum(w, h))
factor_count = 0
minl = np.amin([h, w])
if minsize <= 12:
minsize = 12
m = 12.0 / minsize
minl = minl * m
while minl >= 12:
all_scales[index].append(m * np.power(factor, factor_count))
minl = minl * factor
factor_count += 1
# # # # # # # # # # # # #
# first stage - fast proposal network (pnet) to obtain face candidates
# # # # # # # # # # # # #
images_obj_per_resolution = {}
# TODO: use some type of rounding to number module 8 to increase probability that pyramid images will have the same resolution across input images
for index, scales in enumerate(all_scales):
h = images[index].shape[0]
w = images[index].shape[1]
for scale in scales:
hs = int(np.ceil(h * scale))
ws = int(np.ceil(w * scale))
if (ws, hs) not in images_obj_per_resolution:
images_obj_per_resolution[(ws, hs)] = []
im_data = imresample(images[index], (hs, ws))
im_data = (im_data - 127.5) * 0.0078125
img_y = np.transpose(im_data, (1, 0, 2)) # caffe uses different dimensions ordering
images_obj_per_resolution[(ws, hs)].append({'scale': scale, 'image': img_y, 'index': index})
for resolution in images_obj_per_resolution:
images_per_resolution = [i['image'] for i in images_obj_per_resolution[resolution]]
outs = pnet(images_per_resolution)
for index in range(len(outs[0])):
scale = images_obj_per_resolution[resolution][index]['scale']
image_index = images_obj_per_resolution[resolution][index]['index']
out0 = np.transpose(outs[0][index], (1, 0, 2))
out1 = np.transpose(outs[1][index], (1, 0, 2))
boxes, _ = generateBoundingBox(out1[:, :, 1].copy(), out0[:, :, :].copy(), scale, threshold[0])
# inter-scale nms
pick = nms(boxes.copy(), 0.5, 'Union')
if boxes.size > 0 and pick.size > 0:
boxes = boxes[pick, :]
images_with_boxes[image_index]['total_boxes'] = np.append(images_with_boxes[image_index]['total_boxes'],
boxes,
axis=0)
for index, image_obj in enumerate(images_with_boxes):
numbox = image_obj['total_boxes'].shape[0]
if numbox > 0:
h = images[index].shape[0]
w = images[index].shape[1]
pick = nms(image_obj['total_boxes'].copy(), 0.7, 'Union')
image_obj['total_boxes'] = image_obj['total_boxes'][pick, :]
regw = image_obj['total_boxes'][:, 2] - image_obj['total_boxes'][:, 0]
regh = image_obj['total_boxes'][:, 3] - image_obj['total_boxes'][:, 1]
qq1 = image_obj['total_boxes'][:, 0] + image_obj['total_boxes'][:, 5] * regw
qq2 = image_obj['total_boxes'][:, 1] + image_obj['total_boxes'][:, 6] * regh
qq3 = image_obj['total_boxes'][:, 2] + image_obj['total_boxes'][:, 7] * regw
qq4 = image_obj['total_boxes'][:, 3] + image_obj['total_boxes'][:, 8] * regh
image_obj['total_boxes'] = np.transpose(np.vstack([qq1, qq2, qq3, qq4, image_obj['total_boxes'][:, 4]]))
image_obj['total_boxes'] = rerec(image_obj['total_boxes'].copy())
image_obj['total_boxes'][:, 0:4] = np.fix(image_obj['total_boxes'][:, 0:4]).astype(np.int32)
dy, edy, dx, edx, y, ey, x, ex, tmpw, tmph = pad(image_obj['total_boxes'].copy(), w, h)
numbox = image_obj['total_boxes'].shape[0]
tempimg = np.zeros((24, 24, 3, numbox))
if numbox > 0:
for k in range(0, numbox):
tmp = np.zeros((int(tmph[k]), int(tmpw[k]), 3))
tmp[dy[k] - 1:edy[k], dx[k] - 1:edx[k], :] = images[index][y[k] - 1:ey[k], x[k] - 1:ex[k], :]
if tmp.shape[0] > 0 and tmp.shape[1] > 0 or tmp.shape[0] == 0 and tmp.shape[1] == 0:
tempimg[:, :, :, k] = imresample(tmp, (24, 24))
else:
return np.empty()
tempimg = (tempimg - 127.5) * 0.0078125
image_obj['rnet_input'] = np.transpose(tempimg, (3, 1, 0, 2))
# # # # # # # # # # # # #
# second stage - refinement of face candidates with rnet
# # # # # # # # # # # # #
bulk_rnet_input = np.empty((0, 24, 24, 3))
for index, image_obj in enumerate(images_with_boxes):
if 'rnet_input' in image_obj:
bulk_rnet_input = np.append(bulk_rnet_input, image_obj['rnet_input'], axis=0)
out = rnet(bulk_rnet_input)
out0 = np.transpose(out[0])
out1 = np.transpose(out[1])
score = out1[1, :]
i = 0
for index, image_obj in enumerate(images_with_boxes):
if 'rnet_input' not in image_obj:
continue
rnet_input_count = image_obj['rnet_input'].shape[0]
score_per_image = score[i:i + rnet_input_count]
out0_per_image = out0[:, i:i + rnet_input_count]
ipass = np.where(score_per_image > threshold[1])
image_obj['total_boxes'] = np.hstack([image_obj['total_boxes'][ipass[0], 0:4].copy(),
np.expand_dims(score_per_image[ipass].copy(), 1)])
mv = out0_per_image[:, ipass[0]]
if image_obj['total_boxes'].shape[0] > 0:
h = images[index].shape[0]
w = images[index].shape[1]
pick = nms(image_obj['total_boxes'], 0.7, 'Union')
image_obj['total_boxes'] = image_obj['total_boxes'][pick, :]
image_obj['total_boxes'] = bbreg(image_obj['total_boxes'].copy(), np.transpose(mv[:, pick]))
image_obj['total_boxes'] = rerec(image_obj['total_boxes'].copy())
numbox = image_obj['total_boxes'].shape[0]
if numbox > 0:
tempimg = np.zeros((48, 48, 3, numbox))
image_obj['total_boxes'] = np.fix(image_obj['total_boxes']).astype(np.int32)
dy, edy, dx, edx, y, ey, x, ex, tmpw, tmph = pad(image_obj['total_boxes'].copy(), w, h)
for k in range(0, numbox):
tmp = np.zeros((int(tmph[k]), int(tmpw[k]), 3))
tmp[dy[k] - 1:edy[k], dx[k] - 1:edx[k], :] = images[index][y[k] - 1:ey[k], x[k] - 1:ex[k], :]
if tmp.shape[0] > 0 and tmp.shape[1] > 0 or tmp.shape[0] == 0 and tmp.shape[1] == 0:
tempimg[:, :, :, k] = imresample(tmp, (48, 48))
else:
return np.empty()
tempimg = (tempimg - 127.5) * 0.0078125
image_obj['onet_input'] = np.transpose(tempimg, (3, 1, 0, 2))
i += rnet_input_count
# # # # # # # # # # # # #
# third stage - further refinement and facial landmarks positions with onet
# # # # # # # # # # # # #
bulk_onet_input = np.empty((0, 48, 48, 3))
for index, image_obj in enumerate(images_with_boxes):
if 'onet_input' in image_obj:
bulk_onet_input = np.append(bulk_onet_input, image_obj['onet_input'], axis=0)
out = onet(bulk_onet_input)
out0 = np.transpose(out[0])
out1 = np.transpose(out[1])
out2 = np.transpose(out[2])
score = out2[1, :]
points = out1
i = 0
ret = []
for index, image_obj in enumerate(images_with_boxes):
if 'onet_input' not in image_obj:
ret.append(None)
continue
onet_input_count = image_obj['onet_input'].shape[0]
out0_per_image = out0[:, i:i + onet_input_count]
score_per_image = score[i:i + onet_input_count]
points_per_image = points[:, i:i + onet_input_count]
ipass = np.where(score_per_image > threshold[2])
points_per_image = points_per_image[:, ipass[0]]
image_obj['total_boxes'] = np.hstack([image_obj['total_boxes'][ipass[0], 0:4].copy(),
np.expand_dims(score_per_image[ipass].copy(), 1)])
mv = out0_per_image[:, ipass[0]]
w = image_obj['total_boxes'][:, 2] - image_obj['total_boxes'][:, 0] + 1
h = image_obj['total_boxes'][:, 3] - image_obj['total_boxes'][:, 1] + 1
points_per_image[0:5, :] = np.tile(w, (5, 1)) * points_per_image[0:5, :] + np.tile(
image_obj['total_boxes'][:, 0], (5, 1)) - 1
points_per_image[5:10, :] = np.tile(h, (5, 1)) * points_per_image[5:10, :] + np.tile(
image_obj['total_boxes'][:, 1], (5, 1)) - 1
if image_obj['total_boxes'].shape[0] > 0:
image_obj['total_boxes'] = bbreg(image_obj['total_boxes'].copy(), np.transpose(mv))
pick = nms(image_obj['total_boxes'].copy(), 0.7, 'Min')
image_obj['total_boxes'] = image_obj['total_boxes'][pick, :]
points_per_image = points_per_image[:, pick]
ret.append((image_obj['total_boxes'], points_per_image))
else:
ret.append(None)
i += onet_input_count
return ret
# function [boundingbox] = bbreg(boundingbox,reg)
def bbreg(boundingbox,reg):
# calibrate bounding boxes
if reg.shape[1]==1:
reg = np.reshape(reg, (reg.shape[2], reg.shape[3]))
w = boundingbox[:,2]-boundingbox[:,0]+1
h = boundingbox[:,3]-boundingbox[:,1]+1
b1 = boundingbox[:,0]+reg[:,0]*w
b2 = boundingbox[:,1]+reg[:,1]*h
b3 = boundingbox[:,2]+reg[:,2]*w
b4 = boundingbox[:,3]+reg[:,3]*h
boundingbox[:,0:4] = np.transpose(np.vstack([b1, b2, b3, b4 ]))
return boundingbox
def generateBoundingBox(imap, reg, scale, t):
# use heatmap to generate bounding boxes
stride=2
cellsize=12
imap = np.transpose(imap)
dx1 = np.transpose(reg[:,:,0])
dy1 = np.transpose(reg[:,:,1])
dx2 = np.transpose(reg[:,:,2])
dy2 = np.transpose(reg[:,:,3])
y, x = np.where(imap >= t)
if y.shape[0]==1:
dx1 = np.flipud(dx1)
dy1 = np.flipud(dy1)
dx2 = np.flipud(dx2)
dy2 = np.flipud(dy2)
score = imap[(y,x)]
reg = np.transpose(np.vstack([ dx1[(y,x)], dy1[(y,x)], dx2[(y,x)], dy2[(y,x)] ]))
if reg.size==0:
reg = np.empty((0,3))
bb = np.transpose(np.vstack([y,x]))
q1 = np.fix((stride*bb+1)/scale)
q2 = np.fix((stride*bb+cellsize-1+1)/scale)
boundingbox = np.hstack([q1, q2, np.expand_dims(score,1), reg])
return boundingbox, reg
# function pick = nms(boxes,threshold,type)
def nms(boxes, threshold, method):
if boxes.size==0:
return np.empty((0,3))
x1 = boxes[:,0]
y1 = boxes[:,1]
x2 = boxes[:,2]
y2 = boxes[:,3]
s = boxes[:,4]
area = (x2-x1+1) * (y2-y1+1)
I = np.argsort(s)
pick = np.zeros_like(s, dtype=np.int16)
counter = 0
while I.size>0:
i = I[-1]
pick[counter] = i
counter += 1
idx = I[0:-1]
xx1 = np.maximum(x1[i], x1[idx])
yy1 = np.maximum(y1[i], y1[idx])
xx2 = np.minimum(x2[i], x2[idx])
yy2 = np.minimum(y2[i], y2[idx])
w = np.maximum(0.0, xx2-xx1+1)
h = np.maximum(0.0, yy2-yy1+1)
inter = w * h
if method is 'Min':
o = inter / np.minimum(area[i], area[idx])
else:
o = inter / (area[i] + area[idx] - inter)
I = I[np.where(o<=threshold)]
pick = pick[0:counter]
return pick
# function [dy edy dx edx y ey x ex tmpw tmph] = pad(total_boxes,w,h)
def pad(total_boxes, w, h):
# compute the padding coordinates (pad the bounding boxes to square)
tmpw = (total_boxes[:,2]-total_boxes[:,0]+1).astype(np.int32)
tmph = (total_boxes[:,3]-total_boxes[:,1]+1).astype(np.int32)
numbox = total_boxes.shape[0]
dx = np.ones((numbox), dtype=np.int32)
dy = np.ones((numbox), dtype=np.int32)
edx = tmpw.copy().astype(np.int32)
edy = tmph.copy().astype(np.int32)
x = total_boxes[:,0].copy().astype(np.int32)
y = total_boxes[:,1].copy().astype(np.int32)
ex = total_boxes[:,2].copy().astype(np.int32)
ey = total_boxes[:,3].copy().astype(np.int32)
tmp = np.where(ex>w)
edx.flat[tmp] = np.expand_dims(-ex[tmp]+w+tmpw[tmp],1)
ex[tmp] = w
tmp = np.where(ey>h)
edy.flat[tmp] = np.expand_dims(-ey[tmp]+h+tmph[tmp],1)
ey[tmp] = h
tmp = np.where(x<1)
dx.flat[tmp] = np.expand_dims(2-x[tmp],1)
x[tmp] = 1
tmp = np.where(y<1)
dy.flat[tmp] = np.expand_dims(2-y[tmp],1)
y[tmp] = 1
return dy, edy, dx, edx, y, ey, x, ex, tmpw, tmph
# function [bboxA] = rerec(bboxA)
def rerec(bboxA):
# convert bboxA to square
h = bboxA[:,3]-bboxA[:,1]
w = bboxA[:,2]-bboxA[:,0]
l = np.maximum(w, h)
bboxA[:,0] = bboxA[:,0]+w*0.5-l*0.5
bboxA[:,1] = bboxA[:,1]+h*0.5-l*0.5
bboxA[:,2:4] = bboxA[:,0:2] + np.transpose(np.tile(l,(2,1)))
return bboxA
def imresample(img, sz):
im_data = cv2.resize(img, (sz[1], sz[0]), interpolation=cv2.INTER_AREA) #@UndefinedVariable
return im_data
# This method is kept for debugging purpose
# h=img.shape[0]
# w=img.shape[1]
# hs, ws = sz
# dx = float(w) / ws
# dy = float(h) / hs
# im_data = np.zeros((hs,ws,3))
# for a1 in range(0,hs):
# for a2 in range(0,ws):
# for a3 in range(0,3):
# im_data[a1,a2,a3] = img[int(floor(a1*dy)),int(floor(a2*dx)),a3]
# return im_data
|
py | 1a47d709d373977dba3bdb4e750ff42e13ae3937 | #!/usr/bin/python
# Copyright 2013 The Emscripten Authors. All rights reserved.
# Emscripten is available under two separate licenses, the MIT license and the
# University of Illinois/NCSA Open Source License. Both these licenses can be
# found in the LICENSE file.
"""Runs csmith, a C fuzzer, and looks for bugs.
CSMITH_PATH should be set to something like /usr/local/include/csmith
"""
import os
import sys
import shutil
import random
from distutils.spawn import find_executable
from subprocess import check_call, Popen, PIPE, CalledProcessError
script_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(os.path.dirname(os.path.dirname(script_dir))))
from tools import shared
# can add flags like --no-threads --ion-offthread-compile=off
engine = eval('shared.' + sys.argv[1]) if len(sys.argv) > 1 else shared.JS_ENGINES[0]
print('testing js engine', engine)
TEST_BINARYEN = 1
CSMITH = os.environ.get('CSMITH', find_executable('csmith'))
assert CSMITH, 'Could not find CSmith on your PATH. Please set the environment variable CSMITH.'
CSMITH_PATH = os.environ.get('CSMITH_PATH', '/usr/include/csmith')
assert os.path.exists(CSMITH_PATH), 'Please set the environment variable CSMITH_PATH.'
CSMITH_CFLAGS = ['-I', CSMITH_PATH]
filename = os.path.join(os.getcwd(), 'temp_fuzzcode' + str(os.getpid()) + '_')
shared.DEFAULT_TIMEOUT = 5
tried = 0
notes = {'invalid': 0, 'embug': 0}
fails = 0
while 1:
if random.random() < 0.666:
opts = '-O' + str(random.randint(0, 3))
else:
if random.random() < 0.5:
opts = '-Os'
else:
opts = '-Oz'
print('opt level:', opts)
llvm_opts = []
if random.random() < 0.5:
llvm_opts = ['--llvm-opts', str(random.randint(0, 3))]
print('Tried %d, notes: %s' % (tried, notes))
print('1) Generate source')
extra_args = []
if random.random() < 0.5:
extra_args += ['--no-math64']
extra_args += ['--no-bitfields'] # due to pnacl bug 4027, "LLVM ERROR: can't convert calls with illegal types"
# if random.random() < 0.5: extra_args += ['--float'] # XXX hits undefined behavior on float=>int conversions (too big to fit)
if random.random() < 0.5:
extra_args += ['--max-funcs', str(random.randint(10, 30))]
suffix = '.c'
COMP = shared.CLANG_CC
fullname = filename + suffix
check_call([CSMITH, '--no-volatiles', '--no-packed-struct'] + extra_args,
# ['--max-block-depth', '2', '--max-block-size', '2', '--max-expr-complexity', '2', '--max-funcs', '2'],
stdout=open(fullname, 'w'))
print('1) Generate source... %.2f K' % (len(open(fullname).read()) / 1024.))
tried += 1
print('2) Compile natively')
shared.try_delete(filename)
try:
shared.run_process([COMP, '-m32', opts, fullname, '-o', filename + '1'] + CSMITH_CFLAGS + ['-w']) # + shared.get_cflags()
except CalledProcessError:
print('Failed to compile natively using clang')
notes['invalid'] += 1
continue
shared.run_process([COMP, '-m32', opts, '-emit-llvm', '-c', fullname, '-o', filename + '.bc'] + CSMITH_CFLAGS + shared.get_cflags() + ['-w'])
shared.run_process([shared.path_from_root('tools', 'nativize_llvm.py'), filename + '.bc'], stderr=PIPE)
shutil.move(filename + '.bc.run', filename + '2')
shared.run_process([COMP, fullname, '-o', filename + '3'] + CSMITH_CFLAGS + ['-w'])
print('3) Run natively')
try:
correct1 = shared.timeout_run(Popen([filename + '1'], stdout=PIPE, stderr=PIPE), 3)
if 'Segmentation fault' in correct1 or len(correct1) < 10:
raise Exception('segfault')
correct2 = shared.timeout_run(Popen([filename + '2'], stdout=PIPE, stderr=PIPE), 3)
if 'Segmentation fault' in correct2 or len(correct2) < 10:
raise Exception('segfault')
correct3 = shared.timeout_run(Popen([filename + '3'], stdout=PIPE, stderr=PIPE), 3)
if 'Segmentation fault' in correct3 or len(correct3) < 10:
raise Exception('segfault')
if correct1 != correct3:
raise Exception('clang opts change result')
except Exception as e:
print('Failed or infinite looping in native, skipping', e)
notes['invalid'] += 1
continue
fail_output_name = 'newfail_%d_%d%s' % (os.getpid(), fails, suffix)
print('4) Compile JS-ly and compare')
def try_js(args=[]):
shared.try_delete(filename + '.js')
js_args = [shared.EMCC, fullname, '-o', filename + '.js'] + [opts] + llvm_opts + CSMITH_CFLAGS + args + ['-w']
if TEST_BINARYEN:
if random.random() < 0.5:
js_args += ['-g']
if random.random() < 0.5:
# pick random passes
BINARYEN_EXTRA_PASSES = [
"code-pushing",
"duplicate-function-elimination",
"dce",
"remove-unused-brs",
"remove-unused-names",
"local-cse",
"optimize-instructions",
"post-emscripten",
"precompute",
"simplify-locals",
"simplify-locals-nostructure",
"vacuum",
"coalesce-locals",
"reorder-locals",
"merge-blocks",
"remove-unused-module-elements",
"memory-packing",
]
passes = []
while 1:
passes.append(random.choice(BINARYEN_EXTRA_PASSES))
if random.random() < 0.1:
break
js_args += ['-s', 'BINARYEN_EXTRA_PASSES="' + ','.join(passes) + '"']
if random.random() < 0.5:
js_args += ['-s', 'ALLOW_MEMORY_GROWTH=1']
if random.random() < 0.5 and 'ALLOW_MEMORY_GROWTH=1' not in js_args and 'BINARYEN=1' not in js_args:
js_args += ['-s', 'MAIN_MODULE=1']
if random.random() < 0.25:
js_args += ['-s', 'INLINING_LIMIT=1'] # inline nothing, for more call interaction
if random.random() < 0.5:
js_args += ["--memory-init-file", "0", "-s", "MEM_INIT_METHOD=2"]
if random.random() < 0.5:
js_args += ['-s', 'ASSERTIONS=1']
print('(compile)', ' '.join(js_args))
short_args = [shared.EMCC, fail_output_name] + js_args[5:]
escaped_short_args = map(lambda x: ("'" + x + "'") if '"' in x else x, short_args)
open(fullname, 'a').write('\n// ' + ' '.join(escaped_short_args) + '\n\n')
try:
shared.run_process(js_args)
assert os.path.exists(filename + '.js')
return js_args
except Exception:
return False
def execute_js(engine):
print('(run in %s)' % engine)
try:
js = shared.timeout_run(Popen(shared.NODE_JS + [filename + '.js'], stdout=PIPE, stderr=PIPE), 15 * 60)
except Exception:
print('failed to run in primary')
return False
js = js.split('\n')[0] + '\n' # remove any extra printed stuff (node workarounds)
return correct1 == js or correct2 == js
def fail():
global fails
print("EMSCRIPTEN BUG")
notes['embug'] += 1
fails += 1
shutil.copyfile(fullname, fail_output_name)
js_args = try_js()
if not js_args:
fail()
continue
if not execute_js(engine):
fail()
continue
|
py | 1a47d77da0c4290f8f4c254053b23f60863892fc | from direct.distributed.AstronInternalRepository import AstronInternalRepository
from pandac.PandaModules import *
from RootObjectUD import RootObjectUD
from AvatarManagerUD import AvatarManagerUD
class ToonUDRepository(AstronInternalRepository):
def __init__(self, threadedNet = True):
dcFileNames = ['direct.dc', 'toon.dc']
self.baseChannel = 100000000
self.GameGlobalsId = 1000
self.serverId = 4002
AstronInternalRepository.__init__(self, self.baseChannel, self.serverId, dcFileNames = dcFileNames,
dcSuffix = 'UD', connectMethod = self.CM_NET,
threadedNet = threadedNet)
# Allow some time for other processes.
base.setSleep(0.01)
tcpPort = base.config.GetInt('ai-server-port', 7199)
hostname = base.config.GetString('ai-server-host', '127.0.0.1')
self.acceptOnce('airConnected', self.connectSuccess)
self.connect(hostname, tcpPort)
def connectSuccess(self):
""" Successfully connected to the Message Director.
Now to generate the AvatarManagerAI """
rootObj = RootObjectUD(self)
rootObj.generateWithRequiredAndId(self.GameGlobalsId, 0, 0)
self.setAI(self.GameGlobalsId, self.baseChannel)
avatarManager = AvatarManagerUD(self)
avatarManager.generateWithRequiredAndId(1001, self.GameGlobalsId, 0)
print 'Connected successfully!'
def getAvatarIdFromSender(self):
return self.getMsgSender() & 0xFFFFFFFFL
|
py | 1a47d8af261466dd914775ef2275be6efc5c5b02 | import testmodule
def writetofile(args):
with open(args[0], 'w') as f:
f.write(' '.join(args[1:]))
|
py | 1a47d8af6f8442d37edff4184a88b7956a009173 | import click
import pkg_resources
from cloudshell.recorder.recorder_orchestrator import RecorderOrchestrator
@click.group()
def cli():
pass
@cli.command()
def version():
"""
Displays the cloudshell-recorder version
"""
click.echo(
u'cloudshell-recorder version ' + pkg_resources.get_distribution('cloudshell-recorder').version)
@cli.command()
@click.argument(u'ip')
@click.option(u'--cli-user', help="CLI user")
@click.option(u'--cli-password', help="CLI password")
@click.option(u'--cli-enable-password', help="CLI enable password")
@click.option(u'--cli-session-type', default="auto",
help="CLI session type: auto (for autodetect session type), ssh, telnet")
@click.option(u'--rest-user', help="REST user")
@click.option(u'--rest-password', help="REST password")
@click.option(u'--rest-token', help="REST token")
@click.option(u'--snmp-community', help="SNMP v1 or v2 community")
@click.option(u'--snmp-user', help="SNMP v3 user")
@click.option(u'--snmp-password', help="SNMP password or auth")
@click.option(u'--snmp-private-key', help="SNMP privacy key")
@click.option(u'--snmp-auth-protocol', default="NONE",
help="SNMP auth encryption type: SHA, MD5, SHA224, SHA256, SHA384, SHA512, NONE. Default is NONE.")
@click.option(u'--snmp-priv-protocol', default="NONE",
help="SNMP privacy encryption type: DES, 3DES, AES, AES128, AES192, AES192BLMT, AES256, AES256BLMT, "
"NONE. Default is NONE.")
@click.option(u"--record-type", default="all", help="Defines what will be recorded. "
"Multiple values supported, i.e.: cli,snmp"
"Possible values: cli, rest, snmp, all. Default is all")
@click.option(u'--snmp-auto-detect-vendor', is_flag=True, default=False,
help="Enables auto detect of device manufacturer")
@click.option(u'--snmp-record-oids', default="shells_based",
help="Specify an OID template file for adding records 'template:PATH_TO_FILE' "
"or set it to 'all' to record entire device. "
"Default is 'shells_based', which will record all OIDs used by the shells.")
@click.option(u'--destination-path', default="%APPDATA%\\Quali\\Recordings",
help="Destination path, i.e. %APPDATA%\\Quali\\Recordings")
@click.option(u'--snmp-timeout', default=2000, help="SNMP timeout")
@click.option(u'--snmp-retries', default=2, help="Number of SNMP retries")
@click.option(u'--snmp-bulk', is_flag=True, default=False, help="Add to use snmpbulk for better performance")
@click.option(u'--snmp-bulk-repetitions', default=25, help="Number of snmpbulk repetitions")
def new(ip,
destination_path,
record_type="all",
cli_user=None,
cli_password=None,
cli_enable_password=None,
cli_session_type="auto",
rest_user=None,
rest_password=None,
rest_token=None,
snmp_community=None,
snmp_user=None,
snmp_password=None,
snmp_private_key=None,
snmp_auth_protocol=None,
snmp_priv_protocol=None,
snmp_record_oids=None,
snmp_timeout=2000,
snmp_retries=2,
snmp_bulk=False,
snmp_bulk_repetitions=25,
snmp_auto_detect_vendor=False):
"""
Creates a new device recording based on a template
"""
try:
RecorderOrchestrator(ip, recording_type=record_type, destination_path=destination_path).new_recording(
cli_user=cli_user, cli_password=cli_password,
cli_enable_password=cli_enable_password,
cli_session_type=cli_session_type,
rest_user=rest_user,
rest_password=rest_password,
rest_token=rest_token,
snmp_community=snmp_community,
snmp_user=snmp_user, snmp_password=snmp_password,
snmp_private_key=snmp_private_key,
snmp_auth_protocol=snmp_auth_protocol,
snmp_priv_protocol=snmp_priv_protocol,
snmp_record=snmp_record_oids,
snmp_timeout=snmp_timeout,
snmp_bulk=snmp_bulk,
snmp_retries=snmp_retries,
snmp_bulk_repetitions=snmp_bulk_repetitions,
snmp_auto_detect_vendor=snmp_auto_detect_vendor)
except Exception as e:
click.secho("\n {0}\nERROR: {1}\n{0}\n".format("*" * 80, e.message))
with click.Context(new) as context:
click.echo(new.get_help(context))
# return
raise e
|
py | 1a47d9b3a08f458ba591bd88aa64541a8d3c48a0 | import pandas as pd
import csv as csv
import glob
#Script that loops through sample CSV data and writes EDA results to .txt file
path = 'sample_data'
files = glob.glob(path + "/*.csv")
def eda():
try:
print("Writing sample data exploratory analysis to file 'eda_info.txt'...")
with open('eda_info.txt', 'w') as f:
for filename in files:
f.write(filename + "\n")
data = pd.read_csv(filename)
data.info(verbose = True, buf=f)
print("File successfully written!")
except:
print("File not found.")
finally:
f.close()
print("File closed.")
eda() |
py | 1a47da00eda38c09c3bc4a5b1726ced22aeb004a | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" Test sketch generation. """
import pytest
import tvm
import tvm.testing
from tvm import te, auto_scheduler
from tvm.auto_scheduler import _ffi_api
from tvm.auto_scheduler.loop_state import Stage
from test_auto_scheduler_common import (
matmul_auto_scheduler_test,
double_matmul_auto_scheduler_test,
conv2d_nchw_bn_relu_auto_scheduler_test,
max_pool2d_auto_scheduler_test,
min_nm_auto_scheduler_test,
softmax_nm_auto_scheduler_test,
softmax_abcd_auto_scheduler_test,
conv2d_winograd_nhwc_auto_scheduler_test,
)
def generate_sketches(workload_func, args, target, print_for_debug=False):
task = auto_scheduler.create_task(workload_func, args, tvm.target.Target(target))
policy = auto_scheduler.SketchPolicy(task, verbose=0)
return policy.generate_sketches(print_for_debug)
def assert_compute_at_condition(stage, condition):
assert stage.compute_at == Stage.COMPUTE_AT_TRANS_TABLE[condition]
def assert_is_tiled(stage):
assert _ffi_api.SearchPolicyUtilsIsTiled(stage)
def assert_is_not_tiled(stage):
assert not _ffi_api.SearchPolicyUtilsIsTiled(stage)
def assert_has_cache_write(state, stage_id):
assert _ffi_api.SearchPolicyUtilsHasCacheWriteStage(state, stage_id)
def assert_has_cache_read(state, stage_id):
assert _ffi_api.SearchPolicyUtilsHasCacheReadStage(state, stage_id)
def assert_has_rfactor(state, stage_id):
assert _ffi_api.SearchPolicyUtilsHasRfactorStage(state, stage_id)
def assert_has_cross_thread_reduction(state, stage_id):
assert _ffi_api.SearchPolicyUtilsHasCrossThreadReduction(state, stage_id)
@pytest.mark.skip("neo-ai/tvm: skip due to different number of sketches")
def test_cpu_matmul_sketch():
sketches = generate_sketches(matmul_auto_scheduler_test, (512, 512, 512), "llvm")
""" 3 multi-level tiling sketches
No.0 : Multi-level tiling
No.1 : Multi-level tiling with cache write on position 0
No.2 : Multi-level tiling with cache write on position 1
"""
assert len(sketches) == 3
# Sketch 0
assert_is_tiled(sketches[0].stages[2])
# Sketch 1
assert_is_tiled(sketches[1].stages[2])
assert_has_cache_write(sketches[1], 2)
assert_compute_at_condition(sketches[1].stages[2], "iter")
# Sketch 2
assert_is_tiled(sketches[2].stages[2])
assert_has_cache_write(sketches[2], 2)
assert_compute_at_condition(sketches[2].stages[2], "iter")
assert sketches[1] != sketches[2]
sketches = generate_sketches(matmul_auto_scheduler_test, (8, 8, 512), "llvm")
""" 2 rfactor sketches + 3 multi-level tiling sketches
No.0 : Rfactor with factor position 0
No.1 : Rfactor with factor position 1
No.2 : Multi-level tiling
No.3 : Multi-level tiling with cache write on position 0
No.4 : Multi-level tiling with cache write on position 1
"""
assert len(sketches) == 5
# Sketch 0
assert_has_rfactor(sketches[0], 2)
# Sketch 1
assert_has_rfactor(sketches[1], 2)
assert sketches[0] != sketches[1]
# Sketch 2
assert_is_tiled(sketches[2].stages[2])
# Sketch 3
assert_is_tiled(sketches[3].stages[2])
assert_has_cache_write(sketches[3], 2)
assert_compute_at_condition(sketches[3].stages[2], "iter")
# Sketch 4
assert_is_tiled(sketches[4].stages[2])
assert_has_cache_write(sketches[4], 2)
assert_compute_at_condition(sketches[4].stages[2], "iter")
assert sketches[3] != sketches[4]
sketches = generate_sketches(double_matmul_auto_scheduler_test, (512,), "llvm")
""" 3 multi-level tiling sketches for one matmul, so 3 * 3 = 9 sketches in total """
assert len(sketches) == 9
assert_is_tiled(sketches[8].stages[5])
def test_cpu_conv2d_bn_relu_sketch():
sketches = generate_sketches(
conv2d_nchw_bn_relu_auto_scheduler_test, (1, 56, 56, 512, 512, 3, 1, 1), "llvm"
)
""" 3 multi-level tiling sketches
No.0 : Conv2d multi-level tiling with fusion on position 0
No.1 : Conv2d multi-level tiling with fusion on position 1
No.2 : Conv2d multi-level tiling without fusion
"""
assert len(sketches) == 3
# Sketch 0
assert_is_not_tiled(sketches[0].stages[1])
assert_is_tiled(sketches[0].stages[3])
assert_compute_at_condition(sketches[0].stages[3], "iter")
assert_compute_at_condition(sketches[0].stages[5], "inlined")
assert_compute_at_condition(sketches[0].stages[7], "inlined")
assert_compute_at_condition(sketches[0].stages[9], "inlined")
assert_is_tiled(sketches[0].stages[10])
# Sketch 1
assert_is_not_tiled(sketches[1].stages[1])
assert_is_tiled(sketches[1].stages[3])
assert_compute_at_condition(sketches[1].stages[3], "iter")
assert_compute_at_condition(sketches[1].stages[5], "inlined")
assert_compute_at_condition(sketches[1].stages[7], "inlined")
assert_compute_at_condition(sketches[1].stages[9], "inlined")
assert_is_tiled(sketches[1].stages[10])
# Sketch 2
assert_is_not_tiled(sketches[2].stages[1])
assert_is_tiled(sketches[2].stages[3])
assert_compute_at_condition(sketches[2].stages[3], "root")
assert_compute_at_condition(sketches[2].stages[5], "inlined")
assert_compute_at_condition(sketches[2].stages[7], "inlined")
assert_compute_at_condition(sketches[2].stages[9], "inlined")
assert_is_not_tiled(sketches[2].stages[10])
@pytest.mark.skip("neo-ai/tvm: skip due to different number of sketches")
def test_cpu_max_pool2d_sketch():
sketches = generate_sketches(max_pool2d_auto_scheduler_test, (1, 56, 56, 512, 1), "llvm")
""" 1 default sketch """
assert len(sketches) == 1
# Sketch 0
assert len(sketches[0].transform_steps) == 0
def test_cpu_min_sketch():
sketches = generate_sketches(min_nm_auto_scheduler_test, (10, 1024), "llvm")
""" 2 rfactor sketches + 1 default sketch
No.0 : Rfactor with factor position 0
No.1 : Rfactor with factor position 1
No.2 : Default sketch
"""
assert len(sketches) == 3
# Sketch 0
assert_has_rfactor(sketches[0], 1)
# Sketch 1
assert_has_rfactor(sketches[1], 1)
assert sketches[0] != sketches[1]
# Sketch 2
assert len(sketches[2].transform_steps) == 0
def test_cpu_softmax_sketch():
sketches = generate_sketches(softmax_nm_auto_scheduler_test, (1, 1024), "llvm")
""" (2 rfactor sketches + 1 default sketch) * (2 rfactor sketches + 1 default sketch) """
assert len(sketches) == (3 * 3)
for i in range(0, 3):
for j in range(0, 3):
sketch = sketches[i * 3 + j]
if j in [0, 1]:
assert_has_rfactor(sketch, 1)
if i in [0, 1]:
assert_has_rfactor(sketch, 4 if j in [0, 1] else 3)
assert len(sketches[8].transform_steps) == 0
sketches = generate_sketches(softmax_abcd_auto_scheduler_test, (1, 12, 128, 128), "llvm")
""" (2 rfactor sketches + 1 default sketch) * (2 rfactor sketches + 1 default sketch) """
assert len(sketches) == (3 * 3)
for i in range(0, 3):
for j in range(0, 3):
sketch = sketches[i * 3 + j]
if j in [0, 1]:
assert_has_rfactor(sketch, 1)
if i in [0, 1]:
assert_has_rfactor(sketch, 4 if j in [0, 1] else 3)
assert len(sketches[8].transform_steps) == 0
def test_cpu_conv2d_winograd_sketch():
sketches = generate_sketches(
conv2d_winograd_nhwc_auto_scheduler_test, (1, 28, 28, 128, 128, 3, 1, 1), "llvm"
)
""" 3 multi-level tiling sketches
No.0 : Bgemm multi-level tiling
No.1 : Bgemm multi-level tiling with cache write on position 0
No.2 : Bgemm multi-level tiling with cache write on position 1
"""
assert len(sketches) == 3
# Sketch 0
assert_is_not_tiled(sketches[0].stages[1])
assert_is_not_tiled(sketches[0].stages[2])
assert_compute_at_condition(sketches[0].stages[3], "inlined")
assert_is_tiled(sketches[0].stages[4])
assert_is_tiled(sketches[0].stages[6])
assert_compute_at_condition(sketches[0].stages[7], "inlined")
assert_is_tiled(sketches[0].stages[8])
assert_is_not_tiled(sketches[0].stages[9])
# Sketch 1
assert_is_not_tiled(sketches[1].stages[1])
assert_is_not_tiled(sketches[1].stages[2])
assert_compute_at_condition(sketches[1].stages[3], "inlined")
assert_is_tiled(sketches[1].stages[4])
assert_is_tiled(sketches[1].stages[6])
assert_has_cache_write(sketches[1], 6)
assert_compute_at_condition(sketches[1].stages[6], "iter")
assert_compute_at_condition(sketches[1].stages[8], "inlined")
assert_is_tiled(sketches[1].stages[9])
assert_is_not_tiled(sketches[1].stages[10])
# Sketch 2
assert_is_not_tiled(sketches[2].stages[1])
assert_is_not_tiled(sketches[2].stages[2])
assert_compute_at_condition(sketches[2].stages[3], "inlined")
assert_is_tiled(sketches[2].stages[4])
assert_is_tiled(sketches[2].stages[6])
assert_has_cache_write(sketches[2], 6)
assert_compute_at_condition(sketches[2].stages[6], "iter")
assert_compute_at_condition(sketches[2].stages[8], "inlined")
assert_is_tiled(sketches[2].stages[9])
assert_is_not_tiled(sketches[2].stages[10])
assert sketches[1] != sketches[2]
@tvm.testing.requires_cuda
def test_cuda_matmul_sketch():
sketches = generate_sketches(matmul_auto_scheduler_test, (512, 512, 512), "cuda")
""" 1 multi-level tiling sketch """
assert len(sketches) == 1
assert_has_cache_read(sketches[0], 0)
assert_compute_at_condition(sketches[0].stages[1], "iter")
assert_has_cache_read(sketches[0], 2)
assert_compute_at_condition(sketches[0].stages[3], "iter")
assert_has_cache_write(sketches[0], 4)
assert_is_tiled(sketches[0].stages[4])
assert_compute_at_condition(sketches[0].stages[4], "iter")
assert_is_tiled(sketches[0].stages[5])
sketches = generate_sketches(matmul_auto_scheduler_test, (8, 8, 1024), "cuda")
""" 1 cross thread reuction sketch + 1 multi-level tiling sketch """
assert len(sketches) == 2
# Sketch 0
assert_has_cross_thread_reduction(sketches[0], 2)
# Sketch 1
assert_has_cache_read(sketches[1], 0)
assert_compute_at_condition(sketches[1].stages[1], "iter")
assert_has_cache_read(sketches[1], 2)
assert_compute_at_condition(sketches[1].stages[3], "iter")
assert_has_cache_write(sketches[1], 4)
assert_is_tiled(sketches[1].stages[4])
assert_compute_at_condition(sketches[1].stages[4], "iter")
assert_is_tiled(sketches[1].stages[5])
sketches = generate_sketches(double_matmul_auto_scheduler_test, (512,), "cuda")
""" 1 multi-level tiling sketch for one matmul, so 1 x 1 = 1 sketch in total """
assert len(sketches) == 1
assert_compute_at_condition(sketches[0].stages[5], "root")
assert_compute_at_condition(sketches[0].stages[6], "iter")
@tvm.testing.requires_cuda
def test_cuda_conv2d_bn_relu_sketch():
sketches = generate_sketches(
conv2d_nchw_bn_relu_auto_scheduler_test, (1, 56, 56, 512, 512, 3, 1, 1), "cuda"
)
""" 1 multi-level tiling sketch """
assert len(sketches) == 1
assert_has_cache_read(sketches[0], 1)
assert_compute_at_condition(sketches[0].stages[1], "inlined")
assert_compute_at_condition(sketches[0].stages[2], "iter")
assert_has_cache_read(sketches[0], 3)
assert_compute_at_condition(sketches[0].stages[4], "iter")
assert_is_tiled(sketches[0].stages[5])
assert_compute_at_condition(sketches[0].stages[5], "iter")
assert_compute_at_condition(sketches[0].stages[7], "inlined")
assert_compute_at_condition(sketches[0].stages[9], "inlined")
assert_compute_at_condition(sketches[0].stages[11], "inlined")
assert_is_tiled(sketches[0].stages[12])
@tvm.testing.requires_cuda
def test_cuda_max_pool2d_sketch():
sketches = generate_sketches(max_pool2d_auto_scheduler_test, (1, 56, 56, 512, 0), "cuda")
""" 1 default sketch """
assert len(sketches) == 1
assert len(sketches[0].transform_steps) == 0
@tvm.testing.requires_cuda
def test_cuda_min_sketch():
sketches = generate_sketches(min_nm_auto_scheduler_test, (10, 1024), "cuda")
""" 1 cross thread reuction sketch + 1 default sketch """
assert len(sketches) == 2
# Sketch 0
assert_has_cross_thread_reduction(sketches[0], 1)
# Sketch 1
assert len(sketches[1].transform_steps) == 0
@tvm.testing.requires_cuda
def test_cuda_softmax_sketch():
sketches = generate_sketches(softmax_nm_auto_scheduler_test, (2, 1024), "cuda")
""" (1 cross thread reuction sketch + 1 default sketch) * (1 cross thread reuction sketch + 1 default sketch) """
assert len(sketches) == (2 * 2)
# Sketch 0
assert_has_cross_thread_reduction(sketches[0], 1)
assert_compute_at_condition(sketches[3].stages[2], "inlined")
assert_has_cross_thread_reduction(sketches[0], 3)
# Sketch 1
assert_compute_at_condition(sketches[3].stages[2], "inlined")
assert_has_cross_thread_reduction(sketches[1], 3)
# Sketch 2
assert_has_cross_thread_reduction(sketches[2], 1)
assert_compute_at_condition(sketches[3].stages[2], "inlined")
# Sketch 3
assert_compute_at_condition(sketches[3].stages[2], "inlined")
sketches = generate_sketches(softmax_abcd_auto_scheduler_test, (1, 12, 128, 128), "cuda")
""" (1 cross thread reuction sketch + 1 default sketch) * (1 cross thread reuction sketch + 1 default sketch) """
assert len(sketches) == (2 * 2)
# Sketch 0
assert_has_cross_thread_reduction(sketches[0], 1)
assert_compute_at_condition(sketches[3].stages[2], "inlined")
assert_has_cross_thread_reduction(sketches[0], 3)
# Sketch 1
assert_compute_at_condition(sketches[3].stages[2], "inlined")
assert_has_cross_thread_reduction(sketches[1], 3)
# Sketch 2
assert_has_cross_thread_reduction(sketches[2], 1)
assert_compute_at_condition(sketches[3].stages[2], "inlined")
# Sketch 3
assert_compute_at_condition(sketches[3].stages[2], "inlined")
@tvm.testing.requires_cuda
def test_cuda_conv2d_winograd_sketch():
sketches = generate_sketches(
conv2d_winograd_nhwc_auto_scheduler_test, (1, 28, 28, 128, 128, 3, 1, 1), "cuda"
)
""" 1 multi-level tiling sketch """
assert len(sketches) == 1
assert_compute_at_condition(sketches[0].stages[1], "inlined")
assert_compute_at_condition(sketches[0].stages[2], "inlined")
assert_compute_at_condition(sketches[0].stages[3], "inlined")
assert_is_tiled(sketches[0].stages[4])
assert_has_cache_read(sketches[0], 4)
assert_compute_at_condition(sketches[0].stages[5], "iter")
assert_has_cache_read(sketches[0], 6)
assert_compute_at_condition(sketches[0].stages[7], "iter")
assert_is_not_tiled(sketches[0].stages[8])
assert_compute_at_condition(sketches[0].stages[8], "iter")
assert_compute_at_condition(sketches[0].stages[9], "inlined")
assert_is_tiled(sketches[0].stages[10])
assert_is_not_tiled(sketches[0].stages[11])
if __name__ == "__main__":
# TODO(trevmorr): Disabled in neo-ai/tvm due to missing sketches
# test_cpu_matmul_sketch()
test_cpu_conv2d_bn_relu_sketch()
# test_cpu_max_pool2d_sketch()
test_cpu_min_sketch()
test_cpu_softmax_sketch()
test_cpu_conv2d_winograd_sketch()
test_cuda_matmul_sketch()
test_cuda_conv2d_bn_relu_sketch()
test_cuda_max_pool2d_sketch()
test_cuda_min_sketch()
test_cuda_softmax_sketch()
test_cuda_conv2d_winograd_sketch()
|
py | 1a47da5498555f0b0564b76fac267ad09526c766 | from .dataIO import DataIO
from .dataObjects import PriceVolume, BookEntry, ArbitrageEntry, read_orders, read_orders_generator
|
py | 1a47dacdd210442560379a9a0a6e148d3dd68f63 | #!/usr/bin/env pytest
# -*- coding: utf-8 -*-
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: ogr2ogr testing
# Author: Even Rouault <even dot rouault @ spatialys.com>
#
###############################################################################
# Copyright (c) 2008-2014, Even Rouault <even dot rouault at spatialys.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import sys
import os
import shutil
import pytest
from osgeo import gdal, ogr, osr
import gdaltest
import ogrtest
import test_cli_utilities
###############################################################################
# Simple test
def test_ogr2ogr_1():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
(_, err) = gdaltest.runexternal_out_and_err(test_cli_utilities.get_ogr2ogr_path() + ' tmp/poly.shp ../ogr/data/poly.shp')
assert (err is None or err == ''), 'got error/warning'
ds = ogr.Open('tmp/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 10
feat0 = ds.GetLayer(0).GetFeature(0)
assert feat0.GetFieldAsDouble('AREA') == 215229.266, \
'Did not get expected value for field AREA'
assert feat0.GetFieldAsString('PRFEDEA') == '35043411', \
'Did not get expected value for field PRFEDEA'
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
###############################################################################
# Test -sql
def test_ogr2ogr_2():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/poly.shp ../ogr/data/poly.shp -sql "select * from poly"')
ds = ogr.Open('tmp/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 10
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
###############################################################################
# Test -spat
def test_ogr2ogr_3():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/poly.shp ../ogr/data/poly.shp -spat 479609 4764629 479764 4764817')
ds = ogr.Open('tmp/poly.shp')
if ogrtest.have_geos():
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 4
else:
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 5
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
###############################################################################
# Test -where
def test_ogr2ogr_4():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/poly.shp ../ogr/data/poly.shp -where "EAS_ID=171"')
ds = ogr.Open('tmp/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 1
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
###############################################################################
# Test -append
def test_ogr2ogr_5():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/poly.shp ../ogr/data/poly.shp')
# All 3 variants below should be equivalent
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -update -append tmp/poly.shp ../ogr/data/poly.shp')
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -append tmp/poly.shp ../ogr/data/poly.shp')
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -append -update tmp/poly.shp ../ogr/data/poly.shp')
ds = ogr.Open('tmp/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 40
feat10 = ds.GetLayer(0).GetFeature(10)
assert feat10.GetFieldAsDouble('AREA') == 215229.266, \
'Did not get expected value for field AREA'
assert feat10.GetFieldAsString('PRFEDEA') == '35043411', \
'Did not get expected value for field PRFEDEA'
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
def check_if_has_ogr_pg():
path = '../ogr'
if path not in sys.path:
sys.path.append(path)
try:
import ogr_pg
except:
pytest.skip()
ogr_pg.test_ogr_pg_1()
if gdaltest.pg_ds is None:
pytest.skip()
gdaltest.pg_ds.Destroy()
###############################################################################
# Test -overwrite
def test_ogr2ogr_6():
check_if_has_ogr_pg()
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
if test_cli_utilities.get_ogrinfo_path() is None:
pytest.skip()
gdaltest.runexternal(test_cli_utilities.get_ogrinfo_path() + ' PG:"' + gdaltest.pg_connection_string + '" -sql "DELLAYER:tpoly"')
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f PostgreSQL PG:"' + gdaltest.pg_connection_string + '" ../ogr/data/poly.shp -nln tpoly')
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -update -overwrite -f PostgreSQL PG:"' + gdaltest.pg_connection_string + '" ../ogr/data/poly.shp -nln tpoly')
ds = ogr.Open('PG:' + gdaltest.pg_connection_string)
assert ds is not None and ds.GetLayerByName('tpoly').GetFeatureCount() == 10
ds.Destroy()
gdaltest.runexternal(test_cli_utilities.get_ogrinfo_path() + ' PG:"' + gdaltest.pg_connection_string + '" -sql "DELLAYER:tpoly"')
###############################################################################
# Test -gt
def test_ogr2ogr_7():
check_if_has_ogr_pg()
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
if test_cli_utilities.get_ogrinfo_path() is None:
pytest.skip()
gdaltest.runexternal(test_cli_utilities.get_ogrinfo_path() + ' PG:"' + gdaltest.pg_connection_string + '" -sql "DELLAYER:tpoly"')
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f PostgreSQL PG:"' + gdaltest.pg_connection_string + '" ../ogr/data/poly.shp -nln tpoly -gt 1')
ds = ogr.Open('PG:' + gdaltest.pg_connection_string)
assert ds is not None and ds.GetLayerByName('tpoly').GetFeatureCount() == 10
ds.Destroy()
gdaltest.runexternal(test_cli_utilities.get_ogrinfo_path() + ' PG:"' + gdaltest.pg_connection_string + '" -sql "DELLAYER:tpoly"')
###############################################################################
# Test -t_srs
def test_ogr2ogr_8():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -t_srs EPSG:4326 tmp/poly.shp ../ogr/data/poly.shp')
ds = ogr.Open('tmp/poly.shp')
assert str(ds.GetLayer(0).GetSpatialRef()).find('1984') != -1
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
###############################################################################
# Test -a_srs
def test_ogr2ogr_9():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -a_srs EPSG:4326 tmp/poly.shp ../ogr/data/poly.shp')
ds = ogr.Open('tmp/poly.shp')
assert str(ds.GetLayer(0).GetSpatialRef()).find('1984') != -1
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
###############################################################################
# Test -select
def test_ogr2ogr_10():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
# Voluntary don't use the exact case of the source field names (#4502)
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -select eas_id,prfedea tmp/poly.shp ../ogr/data/poly.shp')
ds = ogr.Open('tmp/poly.shp')
lyr = ds.GetLayer(0)
assert lyr.GetLayerDefn().GetFieldCount() == 2
feat = lyr.GetNextFeature()
ret = 'success'
if feat.GetFieldAsDouble('EAS_ID') != 168:
gdaltest.post_reason('did not get expected value for EAS_ID')
print(feat.GetFieldAsDouble('EAS_ID'))
ret = 'fail'
elif feat.GetFieldAsString('PRFEDEA') != '35043411':
gdaltest.post_reason('did not get expected value for PRFEDEA')
print(feat.GetFieldAsString('PRFEDEA'))
ret = 'fail'
feat = None
ds = None
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
return ret
###############################################################################
# Test -lco
def test_ogr2ogr_11():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -lco SHPT=POLYGONZ tmp/poly.shp ../ogr/data/poly.shp')
ds = ogr.Open('tmp/poly.shp')
assert ds.GetLayer(0).GetLayerDefn().GetGeomType() == ogr.wkbPolygon25D
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
###############################################################################
# Test -nlt
def test_ogr2ogr_12():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -nlt POLYGON25D tmp/poly.shp ../ogr/data/poly.shp')
ds = ogr.Open('tmp/poly.shp')
assert ds.GetLayer(0).GetLayerDefn().GetGeomType() == ogr.wkbPolygon25D
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
###############################################################################
# Add explicit source layer name
def test_ogr2ogr_13():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/poly.shp ../ogr/data/poly.shp poly')
ds = ogr.Open('tmp/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 10
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
###############################################################################
# Test -segmentize
def test_ogr2ogr_14():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -segmentize 100 tmp/poly.shp ../ogr/data/poly.shp poly')
ds = ogr.Open('tmp/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 10
feat = ds.GetLayer(0).GetNextFeature()
assert feat.GetGeometryRef().GetGeometryRef(0).GetPointCount() == 36
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
###############################################################################
# Test -overwrite with a shapefile
def test_ogr2ogr_15():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/poly.shp ../ogr/data/poly.shp')
ds = ogr.Open('tmp/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 10
ds.Destroy()
# Overwrite
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -overwrite tmp ../ogr/data/poly.shp')
ds = ogr.Open('tmp/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 10
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
###############################################################################
# Test -fid
def test_ogr2ogr_16():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -fid 8 tmp/poly.shp ../ogr/data/poly.shp')
src_ds = ogr.Open('../ogr/data/poly.shp')
ds = ogr.Open('tmp/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 1
src_feat = src_ds.GetLayer(0).GetFeature(8)
feat = ds.GetLayer(0).GetNextFeature()
assert feat.GetField("EAS_ID") == src_feat.GetField("EAS_ID")
ds.Destroy()
src_ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
###############################################################################
# Test -progress
def test_ogr2ogr_17():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
ret = gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -progress tmp/poly.shp ../ogr/data/poly.shp')
assert ret.find('0...10...20...30...40...50...60...70...80...90...100 - done.') != -1
ds = ogr.Open('tmp/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 10
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
###############################################################################
# Test -wrapdateline
def test_ogr2ogr_18():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
if not ogrtest.have_geos():
pytest.skip()
try:
os.stat('tmp/wrapdateline_src.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/wrapdateline_src.shp')
except (OSError, AttributeError):
pass
try:
os.stat('tmp/wrapdateline_dst.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/wrapdateline_dst.shp')
except (OSError, AttributeError):
pass
ds = ogr.GetDriverByName('ESRI Shapefile').CreateDataSource('tmp/wrapdateline_src.shp')
srs = osr.SpatialReference()
srs.ImportFromEPSG(32660)
lyr = ds.CreateLayer('wrapdateline_src', srs=srs)
feat = ogr.Feature(lyr.GetLayerDefn())
geom = ogr.CreateGeometryFromWkt('POLYGON((700000 4000000,800000 4000000,800000 3000000,700000 3000000,700000 4000000))')
feat.SetGeometryDirectly(geom)
lyr.CreateFeature(feat)
feat.Destroy()
ds.Destroy()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -wrapdateline -t_srs EPSG:4326 tmp/wrapdateline_dst.shp tmp/wrapdateline_src.shp')
expected_wkt = 'MULTIPOLYGON (((179.222391385437 36.124095832137,180.0 36.1071354434926,180.0 36.107135443432,180.0 27.0904291237556,179.017505655194 27.1079795236266,179.222391385437 36.124095832137)),((-180 36.1071354434425,-179.667822828784 36.0983491954849,-179.974688335432 27.0898861430914,-180 27.0904291237129,-180 27.090429123727,-180 36.107135443432,-180 36.1071354434425)))'
expected_wkt2 = 'MULTIPOLYGON (((179.017505655194 27.1079795236266,179.222391385437 36.124095832137,180.0 36.1071354434926,180.0 36.107135443432,180.0 27.0904291237556,179.017505655194 27.1079795236266)),((-180 27.090429123727,-180 36.107135443432,-180 36.1071354434425,-179.667822828784 36.0983491954849,-179.974688335432 27.0898861430914,-180 27.0904291237129,-180 27.090429123727)))' # with geos OverlayNG
expected_wkt3 = 'MULTIPOLYGON (((180.0 36.1071354434926,180.0 36.107135443432,180.0 27.0904291237556,179.017505655194 27.1079795236266,179.222391385437 36.124095832137,180.0 36.1071354434926)),((-179.667822828784 36.0983491954849,-179.974688335432 27.0898861430914,-180 27.0904291237129,-180 27.090429123727,-180 36.107135443432,-180 36.1071354434425,-179.667822828784 36.0983491954849)))'
ds = ogr.Open('tmp/wrapdateline_dst.shp')
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
got_wkt = feat.GetGeometryRef().ExportToWkt()
ok = ogrtest.check_feature_geometry(feat, expected_wkt) == 0 or \
ogrtest.check_feature_geometry(feat, expected_wkt2) == 0 or \
ogrtest.check_feature_geometry(feat, expected_wkt3) == 0
feat.Destroy()
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/wrapdateline_src.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/wrapdateline_dst.shp')
assert ok, got_wkt
###############################################################################
# Test -clipsrc
def test_ogr2ogr_19():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
if not ogrtest.have_geos():
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/poly.shp ../ogr/data/poly.shp -clipsrc spat_extent -spat 479609 4764629 479764 4764817')
ds = ogr.Open('tmp/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 4
assert ds.GetLayer(0).GetExtent() == (479609, 479764, 4764629, 4764817), \
'unexpected extent'
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
###############################################################################
# Test correct remap of fields when laundering to Shapefile format
# Test that the data is going into the right field
# FIXME: Any field is skipped if a subsequent field with same name is found.
def test_ogr2ogr_20():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
expected_fields = ['a',
'A_1',
'a_1_2',
'aaaaaAAAAA',
'aAaaaAAA_1',
'aaaaaAAAAB',
'aaaaaAAA_2',
'aaaaaAAA_3',
'aaaaaAAA_4',
'aaaaaAAA_5',
'aaaaaAAA_6',
'aaaaaAAA_7',
'aaaaaAAA_8',
'aaaaaAAA_9',
'aaaaaAAA10']
expected_data = ['1',
'2',
'3',
'4',
'5',
'6',
'7',
'8',
'9',
'10',
'11',
'12',
'13',
'14',
'15']
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp data/Fields.csv')
ds = ogr.Open('tmp/Fields.dbf')
assert ds is not None
layer_defn = ds.GetLayer(0).GetLayerDefn()
if layer_defn.GetFieldCount() != 15:
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/Fields.dbf')
pytest.fail('Unexpected field count: ' + str(ds.GetLayer(0).GetLayerDefn().GetFieldCount()))
error_occurred = False
feat = ds.GetLayer(0).GetNextFeature()
for i in range(layer_defn.GetFieldCount()):
if layer_defn.GetFieldDefn(i).GetNameRef() != expected_fields[i]:
print('Expected ', expected_fields[i], ',but got', layer_defn.GetFieldDefn(i).GetNameRef())
error_occurred = True
if feat.GetFieldAsString(i) != expected_data[i]:
print('Expected the value ', expected_data[i], ',but got', feat.GetFieldAsString(i))
error_occurred = True
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/Fields.dbf')
assert not error_occurred
###############################################################################
# Test ogr2ogr when the output driver has already created the fields
# at dataset creation (#3247)
def test_ogr2ogr_21():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.remove('tmp/testogr2ogr21.gtm')
except OSError:
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() +
' -f GPSTrackMaker tmp/testogr2ogr21.gtm data/dataforogr2ogr21.csv ' +
'-sql "SELECT comment, name FROM dataforogr2ogr21" -nlt POINT')
ds = ogr.Open('tmp/testogr2ogr21.gtm')
assert ds is not None
ds.GetLayer(0).GetLayerDefn()
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
if feat.GetFieldAsString('name') != 'NAME' or \
feat.GetFieldAsString('comment') != 'COMMENT':
print(feat.GetFieldAsString('comment'))
ds.Destroy()
os.remove('tmp/testogr2ogr21.gtm')
pytest.fail(feat.GetFieldAsString('name'))
ds.Destroy()
os.remove('tmp/testogr2ogr21.gtm')
###############################################################################
# Test ogr2ogr when the output driver delays the destination layer defn creation (#3384)
def test_ogr2ogr_22():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() +
' -f "MapInfo File" tmp/testogr2ogr22.mif data/dataforogr2ogr21.csv ' +
'-sql "SELECT comment, name FROM dataforogr2ogr21" -nlt POINT')
ds = ogr.Open('tmp/testogr2ogr22.mif')
assert ds is not None
ds.GetLayer(0).GetLayerDefn()
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
if feat.GetFieldAsString('name') != 'NAME' or \
feat.GetFieldAsString('comment') != 'COMMENT':
print(feat.GetFieldAsString('comment'))
ds.Destroy()
ogr.GetDriverByName('MapInfo File').DeleteDataSource('tmp/testogr2ogr22.mif')
pytest.fail(feat.GetFieldAsString('name'))
ds.Destroy()
ogr.GetDriverByName('MapInfo File').DeleteDataSource('tmp/testogr2ogr22.mif')
###############################################################################
# Same as previous but with -select
def test_ogr2ogr_23():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() +
' -f "MapInfo File" tmp/testogr2ogr23.mif data/dataforogr2ogr21.csv ' +
'-sql "SELECT comment, name FROM dataforogr2ogr21" -select comment,name -nlt POINT')
ds = ogr.Open('tmp/testogr2ogr23.mif')
assert ds is not None
ds.GetLayer(0).GetLayerDefn()
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
if feat.GetFieldAsString('name') != 'NAME' or \
feat.GetFieldAsString('comment') != 'COMMENT':
print(feat.GetFieldAsString('comment'))
ds.Destroy()
ogr.GetDriverByName('MapInfo File').DeleteDataSource('tmp/testogr2ogr23.mif')
pytest.fail(feat.GetFieldAsString('name'))
ds.Destroy()
ogr.GetDriverByName('MapInfo File').DeleteDataSource('tmp/testogr2ogr23.mif')
###############################################################################
# Test -clipsrc with WKT geometry (#3530)
def test_ogr2ogr_24():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
if not ogrtest.have_geos():
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/poly.shp ../ogr/data/poly.shp -clipsrc "POLYGON((479609 4764629,479609 4764817,479764 4764817,479764 4764629,479609 4764629))"')
ds = ogr.Open('tmp/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 4
assert ds.GetLayer(0).GetExtent() == (479609, 479764, 4764629, 4764817), \
'unexpected extent'
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
###############################################################################
# Test -clipsrc with clip from external datasource
def test_ogr2ogr_25():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
if not ogrtest.have_geos():
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
f = open('tmp/clip.csv', 'wt')
f.write('foo,WKT\n')
f.write('foo,"POLYGON((479609 4764629,479609 4764817,479764 4764817,479764 4764629,479609 4764629))"\n')
f.close()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/poly.shp ../ogr/data/poly.shp -clipsrc tmp/clip.csv -clipsrcwhere foo=\'foo\'')
ds = ogr.Open('tmp/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 4
assert ds.GetLayer(0).GetExtent() == (479609, 479764, 4764629, 4764817), \
'unexpected extent'
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
os.remove('tmp/clip.csv')
###############################################################################
# Test -clipdst with WKT geometry (#3530)
def test_ogr2ogr_26():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
if not ogrtest.have_geos():
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/poly.shp ../ogr/data/poly.shp -clipdst "POLYGON((479609 4764629,479609 4764817,479764 4764817,479764 4764629,479609 4764629))"')
ds = ogr.Open('tmp/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 4
assert ds.GetLayer(0).GetExtent() == (479609, 479764, 4764629, 4764817), \
'unexpected extent'
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
###############################################################################
# Test -clipdst with clip from external datasource
def test_ogr2ogr_27():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
if not ogrtest.have_geos():
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
f = open('tmp/clip.csv', 'wt')
f.write('foo,WKT\n')
f.write('foo,"POLYGON((479609 4764629,479609 4764817,479764 4764817,479764 4764629,479609 4764629))"\n')
f.close()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -nlt MULTIPOLYGON tmp/poly.shp ../ogr/data/poly.shp -clipdst tmp/clip.csv -clipdstsql "SELECT * from clip"')
ds = ogr.Open('tmp/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 4
assert ds.GetLayer(0).GetExtent() == (479609, 479764, 4764629, 4764817), \
'unexpected extent'
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
os.remove('tmp/clip.csv')
###############################################################################
# Test -wrapdateline on linestrings
def test_ogr2ogr_28():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/wrapdateline_src.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/wrapdateline_src.shp')
except (OSError, AttributeError):
pass
try:
os.stat('tmp/wrapdateline_dst.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/wrapdateline_dst.shp')
except (OSError, AttributeError):
pass
ds = ogr.GetDriverByName('ESRI Shapefile').CreateDataSource('tmp/wrapdateline_src.shp')
srs = osr.SpatialReference()
srs.ImportFromEPSG(4326)
lyr = ds.CreateLayer('wrapdateline_src', srs=srs)
feat = ogr.Feature(lyr.GetLayerDefn())
geom = ogr.CreateGeometryFromWkt('LINESTRING(160 0,165 1,170 2,175 3,177 4,-177 5,-175 6,-170 7,-177 8,177 9,170 10)')
feat.SetGeometryDirectly(geom)
lyr.CreateFeature(feat)
feat.Destroy()
ds.Destroy()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -wrapdateline tmp/wrapdateline_dst.shp tmp/wrapdateline_src.shp')
expected_wkt = 'MULTILINESTRING ((160 0,165 1,170 2,175 3,177 4,180 4.5),(-180 4.5,-177 5,-175 6,-170 7,-177 8,-180 8.5),(180 8.5,177 9,170 10))'
expected_geom = ogr.CreateGeometryFromWkt(expected_wkt)
ds = ogr.Open('tmp/wrapdateline_dst.shp')
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
ret = ogrtest.check_feature_geometry(feat, expected_geom)
feat.Destroy()
expected_geom.Destroy()
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/wrapdateline_src.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/wrapdateline_dst.shp')
assert ret == 0
###############################################################################
# Test -wrapdateline on polygons
def test_ogr2ogr_29():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
if not ogrtest.have_geos():
pytest.skip()
for i in range(2):
try:
os.stat('tmp/wrapdateline_src.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/wrapdateline_src.shp')
except (OSError, AttributeError):
pass
try:
os.stat('tmp/wrapdateline_dst.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/wrapdateline_dst.shp')
except (OSError, AttributeError):
pass
ds = ogr.GetDriverByName('ESRI Shapefile').CreateDataSource('tmp/wrapdateline_src.shp')
srs = osr.SpatialReference()
srs.ImportFromEPSG(4326)
lyr = ds.CreateLayer('wrapdateline_src', srs=srs)
feat = ogr.Feature(lyr.GetLayerDefn())
if i == 0:
geom = ogr.CreateGeometryFromWkt('POLYGON((179 40,179.5 40,-179.5 40,-179 40,-170 40,-165 40,-165 30,-170 30,-179 30,-179.5 30,179.5 30,179 30,179 40))')
else:
geom = ogr.CreateGeometryFromWkt('POLYGON((-165 30,-170 30,-179 30,-179.5 30,179.5 30,179 30,179 40,179.5 40,-179.5 40,-179 40,-170 40,-165 40,-165 30))')
feat.SetGeometry(geom)
lyr.CreateFeature(feat)
feat.Destroy()
ds.Destroy()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -wrapdateline tmp/wrapdateline_dst.shp tmp/wrapdateline_src.shp')
expected_wkt = 'MULTIPOLYGON (((180 30,179.5 30.0,179 30,179 40,179.5 40.0,180 40,180 30)),((-180 40,-179.5 40.0,-179 40,-170 40,-165 40,-165 30,-170 30,-179 30,-179.5 30.0,-180 30,-180 40)))'
expected_geom = ogr.CreateGeometryFromWkt(expected_wkt)
ds = ogr.Open('tmp/wrapdateline_dst.shp')
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
ret = ogrtest.check_feature_geometry(feat, expected_geom)
if ret != 0:
print('src is : %s' % geom.ExportToWkt())
print('got : %s' % feat.GetGeometryRef().ExportToWkt())
feat.Destroy()
expected_geom.Destroy()
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/wrapdateline_src.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/wrapdateline_dst.shp')
assert ret == 0
###############################################################################
# Test -splitlistfields option
def test_ogr2ogr_30():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
ds = ogr.Open('../ogr/data/gml/testlistfields.gml')
if ds is None:
pytest.skip()
ds = None
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -splitlistfields tmp/test_ogr2ogr_30.dbf ../ogr/data/gml/testlistfields.gml')
gdal.Unlink('../ogr/data/gml/testlistfields.gfs')
ds = ogr.Open('tmp/test_ogr2ogr_30.dbf')
assert ds is not None
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
if feat.GetField('attrib11') != 'value1' or \
feat.GetField('attrib12') != 'value2' or \
feat.GetField('attrib2') != 'value3' or \
feat.GetField('attrib31') != 4 or \
feat.GetField('attrib32') != 5 or \
feat.GetField('attrib41') != 6.1 or \
feat.GetField('attrib42') != 7.1:
feat.DumpReadable()
pytest.fail('did not get expected attribs')
ds = None
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_30.dbf')
###############################################################################
# Test that -overwrite work if the output file doesn't yet exist (#3825)
def test_ogr2ogr_31():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -overwrite tmp/poly.shp ../ogr/data/poly.shp')
ds = ogr.Open('tmp/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 10
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
###############################################################################
# Test that -append/-overwrite to a single-file shapefile work without specifying -nln
def test_ogr2ogr_32():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/test_ogr2ogr_32.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_32.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/test_ogr2ogr_32.shp ../ogr/data/poly.shp')
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -append tmp/test_ogr2ogr_32.shp ../ogr/data/poly.shp')
ds = ogr.Open('tmp/test_ogr2ogr_32.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 20, '-append failed'
ds = None
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -overwrite tmp/test_ogr2ogr_32.shp ../ogr/data/poly.shp')
ds = ogr.Open('tmp/test_ogr2ogr_32.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 10, \
'-overwrite failed'
ds = None
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_32.shp')
###############################################################################
# Test -explodecollections
def test_ogr2ogr_33():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/test_ogr2ogr_33_src.csv')
ogr.GetDriverByName('CSV').DeleteDataSource('tmp/test_ogr2ogr_33_src.csv')
except (OSError, AttributeError):
pass
try:
os.stat('tmp/test_ogr2ogr_33_dst.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_33_dst.shp')
except (OSError, AttributeError):
pass
f = open('tmp/test_ogr2ogr_33_src.csv', 'wt')
f.write('foo,WKT\n')
f.write('bar,"MULTIPOLYGON (((10 10,10 11,11 11,11 10,10 10)),((100 100,100 200,200 200,200 100,100 100),(125 125,175 125,175 175,125 175,125 125)))"\n')
f.write('baz,"POLYGON ((0 0,0 1,1 1,1 0,0 0))"\n')
f.close()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -explodecollections tmp/test_ogr2ogr_33_dst.shp tmp/test_ogr2ogr_33_src.csv -select foo')
ds = ogr.Open('tmp/test_ogr2ogr_33_dst.shp')
lyr = ds.GetLayer(0)
assert lyr.GetFeatureCount() == 3, '-explodecollections failed'
feat = lyr.GetFeature(0)
if feat.GetField("foo") != 'bar':
feat.DumpReadable()
pytest.fail()
if feat.GetGeometryRef().ExportToWkt() != 'POLYGON ((10 10,10 11,11 11,11 10,10 10))':
feat.DumpReadable()
pytest.fail()
feat = lyr.GetFeature(1)
if feat.GetField("foo") != 'bar':
feat.DumpReadable()
pytest.fail()
if feat.GetGeometryRef().ExportToWkt() != 'POLYGON ((100 100,100 200,200 200,200 100,100 100),(125 125,175 125,175 175,125 175,125 125))':
feat.DumpReadable()
pytest.fail()
feat = lyr.GetFeature(2)
if feat.GetField("foo") != 'baz':
feat.DumpReadable()
pytest.fail()
if feat.GetGeometryRef().ExportToWkt() != 'POLYGON ((0 0,0 1,1 1,1 0,0 0))':
feat.DumpReadable()
pytest.fail()
ds = None
ogr.GetDriverByName('CSV').DeleteDataSource('tmp/test_ogr2ogr_33_src.csv')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_33_dst.shp')
###############################################################################
# Test 'ogr2ogr someDirThatDoesNotExist src.shp -nln someDirThatDoesNotExist'
# This should result in creating a someDirThatDoesNotExist directory with
# someDirThatDoesNotExist.shp/dbf/shx inside this directory
def test_ogr2ogr_34():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/test_ogr2ogr_34_dir')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_34_dir')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/test_ogr2ogr_34_dir ../ogr/data/poly.shp -nln test_ogr2ogr_34_dir')
ds = ogr.Open('tmp/test_ogr2ogr_34_dir/test_ogr2ogr_34_dir.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 10, \
'initial shapefile creation failed'
ds = None
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -append tmp/test_ogr2ogr_34_dir ../ogr/data/poly.shp -nln test_ogr2ogr_34_dir')
ds = ogr.Open('tmp/test_ogr2ogr_34_dir/test_ogr2ogr_34_dir.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 20, '-append failed'
ds = None
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -overwrite tmp/test_ogr2ogr_34_dir ../ogr/data/poly.shp -nln test_ogr2ogr_34_dir')
ds = ogr.Open('tmp/test_ogr2ogr_34_dir/test_ogr2ogr_34_dir.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 10, \
'-overwrite failed'
ds = None
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_34_dir')
###############################################################################
# Test 'ogr2ogr someDirThatDoesNotExist src.shp'
def test_ogr2ogr_35():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/test_ogr2ogr_35_dir')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_35_dir')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/test_ogr2ogr_35_dir ../ogr/data/poly.shp ')
ds = ogr.Open('tmp/test_ogr2ogr_35_dir/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 10, \
'initial shapefile creation failed'
ds = None
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -append tmp/test_ogr2ogr_35_dir ../ogr/data/poly.shp')
ds = ogr.Open('tmp/test_ogr2ogr_35_dir/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 20, '-append failed'
ds = None
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -overwrite tmp/test_ogr2ogr_35_dir ../ogr/data/poly.shp')
ds = ogr.Open('tmp/test_ogr2ogr_35_dir/poly.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 10, \
'-overwrite failed'
ds = None
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_35_dir')
###############################################################################
# Test ogr2ogr -zfield
def test_ogr2ogr_36():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/test_ogr2ogr_36.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_36.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/test_ogr2ogr_36.shp ../ogr/data/poly.shp -zfield EAS_ID')
ds = ogr.Open('tmp/test_ogr2ogr_36.shp')
feat = ds.GetLayer(0).GetNextFeature()
wkt = feat.GetGeometryRef().ExportToWkt()
ds = None
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_36.shp')
assert wkt.find(' 168,') != -1
###############################################################################
# Test 'ogr2ogr someDirThatDoesNotExist.shp dataSourceWithMultipleLayer'
def test_ogr2ogr_37():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/test_ogr2ogr_37_dir.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_37_dir.shp')
except (OSError, AttributeError):
pass
try:
os.mkdir('tmp/test_ogr2ogr_37_src')
except OSError:
pass
shutil.copy('../ogr/data/poly.shp', 'tmp/test_ogr2ogr_37_src')
shutil.copy('../ogr/data/poly.shx', 'tmp/test_ogr2ogr_37_src')
shutil.copy('../ogr/data/poly.dbf', 'tmp/test_ogr2ogr_37_src')
shutil.copy('../ogr/data/shp/testpoly.shp', 'tmp/test_ogr2ogr_37_src')
shutil.copy('../ogr/data/shp/testpoly.shx', 'tmp/test_ogr2ogr_37_src')
shutil.copy('../ogr/data/shp/testpoly.dbf', 'tmp/test_ogr2ogr_37_src')
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/test_ogr2ogr_37_dir.shp tmp/test_ogr2ogr_37_src')
ds = ogr.Open('tmp/test_ogr2ogr_37_dir.shp')
assert ds is not None and ds.GetLayerCount() == 2
ds = None
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_37_src')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_37_dir.shp')
###############################################################################
# Test that we take into account the fields by the where clause when combining
# -select and -where (#4015)
def test_ogr2ogr_38():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/test_ogr2ogr_38.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_38.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/test_ogr2ogr_38.shp ../ogr/data/poly.shp -select AREA -where "EAS_ID = 170"')
ds = ogr.Open('tmp/test_ogr2ogr_38.shp')
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
assert feat is not None
ds = None
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_38.shp')
###############################################################################
# Test 'ogr2ogr someDirThatDoesNotExist.shp dataSourceWithMultipleLayer -sql "select * from alayer"' (#4268)
def test_ogr2ogr_39():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/test_ogr2ogr_39_dir.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_39.shp')
except (OSError, AttributeError):
pass
try:
os.mkdir('tmp/test_ogr2ogr_39_src')
except OSError:
pass
shutil.copy('../ogr/data/poly.shp', 'tmp/test_ogr2ogr_39_src')
shutil.copy('../ogr/data/poly.shx', 'tmp/test_ogr2ogr_39_src')
shutil.copy('../ogr/data/poly.dbf', 'tmp/test_ogr2ogr_39_src')
shutil.copy('../ogr/data/shp/testpoly.shp', 'tmp/test_ogr2ogr_39_src')
shutil.copy('../ogr/data/shp/testpoly.shx', 'tmp/test_ogr2ogr_39_src')
shutil.copy('../ogr/data/shp/testpoly.dbf', 'tmp/test_ogr2ogr_39_src')
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/test_ogr2ogr_39.shp tmp/test_ogr2ogr_39_src -sql "select * from poly"')
ds = ogr.Open('tmp/test_ogr2ogr_39.shp')
assert ds is not None and ds.GetLayerCount() == 1
ds = None
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_39_src')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_39.shp')
###############################################################################
# Test 'ogr2ogr -update asqlite.db asqlite.db layersrc -nln layerdst' (#4270)
def test_ogr2ogr_40():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
drv = ogr.GetDriverByName('SQLite')
if drv is None:
pytest.skip()
try:
ogr.GetDriverByName('SQLite').DeleteDataSource('tmp/test_ogr2ogr_40.db')
except AttributeError:
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f SQlite tmp/test_ogr2ogr_40.db ../ogr/data/poly.shp')
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -update tmp/test_ogr2ogr_40.db tmp/test_ogr2ogr_40.db poly -nln poly2')
ds = ogr.Open('tmp/test_ogr2ogr_40.db')
lyr = ds.GetLayerByName('poly2')
assert lyr.GetFeatureCount() == 10
ds = None
ogr.GetDriverByName('SQLite').DeleteDataSource('tmp/test_ogr2ogr_40.db')
###############################################################################
# Test 'ogr2ogr -update PG:xxxx PG:xxxx layersrc -nln layerdst' (#4270)
def test_ogr2ogr_41():
check_if_has_ogr_pg()
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
ds = ogr.Open('PG:' + gdaltest.pg_connection_string)
ds.ExecuteSQL('DELLAYER:test_ogr2ogr_41_src')
ds.ExecuteSQL('DELLAYER:test_ogr2ogr_41_target')
lyr = ds.CreateLayer('test_ogr2ogr_41_src')
lyr.CreateField(ogr.FieldDefn('foo', ogr.OFTString))
lyr.StartTransaction()
for i in range(501):
feat = ogr.Feature(lyr.GetLayerDefn())
feat['foo'] = '%d' % i
lyr.CreateFeature(feat)
feat = None
lyr.CommitTransaction()
lyr = None
ds = None
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -update PG:"' + gdaltest.pg_connection_string + '" PG:"' + gdaltest.pg_connection_string + '" test_ogr2ogr_41_src -nln test_ogr2ogr_41_target')
ds = ogr.Open('PG:' + gdaltest.pg_connection_string)
lyr = ds.GetLayerByName('test_ogr2ogr_41_target')
assert lyr.GetFeatureCount() == 501
ds.ExecuteSQL('DELLAYER:test_ogr2ogr_41_src')
ds.ExecuteSQL('DELLAYER:test_ogr2ogr_41_target')
ds = None
###############################################################################
# Test combination of -select and -where FID=xx (#4500)
def test_ogr2ogr_42():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/test_ogr2ogr_42.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_42.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/test_ogr2ogr_42.shp ../ogr/data/poly.shp -select AREA -where "FID = 0"')
ds = ogr.Open('tmp/test_ogr2ogr_42.shp')
lyr = ds.GetLayerByIndex(0)
assert lyr.GetFeatureCount() == 1
ds = None
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_42.shp')
###############################################################################
# Test -dim 3 and -dim 2
def test_ogr2ogr_43():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/test_ogr2ogr_43_3d.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_43_3d.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/test_ogr2ogr_43_3d.shp ../ogr/data/poly.shp -dim 3')
ds = ogr.Open('tmp/test_ogr2ogr_43_3d.shp')
lyr = ds.GetLayerByIndex(0)
assert lyr.GetGeomType() == ogr.wkbPolygon25D
ds = None
try:
os.stat('tmp/test_ogr2ogr_43_2d.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_43_2d.shp')
except (OSError, AttributeError):
pass
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/test_ogr2ogr_43_2d.shp tmp/test_ogr2ogr_43_3d.shp -dim 2')
ds = ogr.Open('tmp/test_ogr2ogr_43_2d.shp')
lyr = ds.GetLayerByIndex(0)
assert lyr.GetGeomType() == ogr.wkbPolygon
ds = None
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_43_2d.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_43_3d.shp')
###############################################################################
# Test -nlt PROMOTE_TO_MULTI for polygon/multipolygon
def test_ogr2ogr_44():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/test_ogr2ogr_44_src.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_44_src.shp')
except (OSError, AttributeError):
pass
gdal.Unlink('tmp/test_ogr2ogr_44.gml')
gdal.Unlink('tmp/test_ogr2ogr_44.xsd')
ds = ogr.GetDriverByName('ESRI Shapefile').CreateDataSource('tmp/test_ogr2ogr_44_src.shp')
lyr = ds.CreateLayer('test_ogr2ogr_44_src', geom_type=ogr.wkbPolygon)
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetGeometry(ogr.CreateGeometryFromWkt('POLYGON((0 0,0 1,1 1,0 0))'))
lyr.CreateFeature(feat)
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetGeometry(ogr.CreateGeometryFromWkt('MULTIPOLYGON(((0 0,0 1,1 1,0 0)),((10 0,10 1,11 1,10 0)))'))
lyr.CreateFeature(feat)
ds = None
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f GML tmp/test_ogr2ogr_44.gml tmp/test_ogr2ogr_44_src.shp -nlt PROMOTE_TO_MULTI')
f = open('tmp/test_ogr2ogr_44.xsd')
data = f.read()
f.close()
assert 'type="gml:MultiSurfacePropertyType"' in data
f = open('tmp/test_ogr2ogr_44.gml')
data = f.read()
f.close()
assert '<gml:MultiSurface gml:id="test_ogr2ogr_44_src.geom.0"><gml:surfaceMember><gml:Polygon gml:id="test_ogr2ogr_44_src.geom.0.0"><gml:exterior><gml:LinearRing><gml:posList>0 0 0 1 1 1 0 0</gml:posList></gml:LinearRing></gml:exterior></gml:Polygon></gml:surfaceMember></gml:MultiSurface>' in data
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_44_src.shp')
os.unlink('tmp/test_ogr2ogr_44.gml')
os.unlink('tmp/test_ogr2ogr_44.xsd')
###############################################################################
# Test -nlt PROMOTE_TO_MULTI for linestring/multilinestring
def test_ogr2ogr_45():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/test_ogr2ogr_45_src.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_45_src.shp')
except (OSError, AttributeError):
pass
gdal.Unlink('tmp/test_ogr2ogr_45.gml')
gdal.Unlink('tmp/test_ogr2ogr_45.xsd')
ds = ogr.GetDriverByName('ESRI Shapefile').CreateDataSource('tmp/test_ogr2ogr_45_src.shp')
lyr = ds.CreateLayer('test_ogr2ogr_45_src', geom_type=ogr.wkbLineString)
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetGeometry(ogr.CreateGeometryFromWkt('LINESTRING(0 0,0 1,1 1,0 0)'))
lyr.CreateFeature(feat)
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetGeometry(ogr.CreateGeometryFromWkt('MULTILINESTRING((0 0,0 1,1 1,0 0),(10 0,10 1,11 1,10 0))'))
lyr.CreateFeature(feat)
ds = None
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f GML tmp/test_ogr2ogr_45.gml tmp/test_ogr2ogr_45_src.shp -nlt PROMOTE_TO_MULTI')
f = open('tmp/test_ogr2ogr_45.xsd')
data = f.read()
f.close()
assert 'type="gml:MultiCurvePropertyType"' in data
f = open('tmp/test_ogr2ogr_45.gml')
data = f.read()
f.close()
assert '<gml:MultiCurve gml:id="test_ogr2ogr_45_src.geom.0"><gml:curveMember><gml:LineString gml:id="test_ogr2ogr_45_src.geom.0.0"><gml:posList>0 0 0 1 1 1 0 0</gml:posList></gml:LineString></gml:curveMember></gml:MultiCurve>' in data
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_45_src.shp')
os.unlink('tmp/test_ogr2ogr_45.gml')
os.unlink('tmp/test_ogr2ogr_45.xsd')
###############################################################################
# Test -gcp (#4604)
def test_ogr2ogr_46():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/test_ogr2ogr_46_src.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_46_src.shp')
except (OSError, AttributeError):
pass
gdal.Unlink('tmp/test_ogr2ogr_46.gml')
gdal.Unlink('tmp/test_ogr2ogr_46.xsd')
ds = ogr.GetDriverByName('ESRI Shapefile').CreateDataSource('tmp/test_ogr2ogr_46_src.shp')
lyr = ds.CreateLayer('test_ogr2ogr_46_src', geom_type=ogr.wkbPoint)
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetGeometry(ogr.CreateGeometryFromWkt('POINT(0 0)'))
lyr.CreateFeature(feat)
feat = ogr.Feature(lyr.GetLayerDefn())
feat.SetGeometry(ogr.CreateGeometryFromWkt('POINT(1 1)'))
lyr.CreateFeature(feat)
ds = None
for option in ['', ' -tps', ' -order 1', ' -a_srs EPSG:4326', ' -s_srs EPSG:4326 -t_srs EPSG:3857']:
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f GML -dsco FORMAT=GML2 tmp/test_ogr2ogr_46.gml tmp/test_ogr2ogr_46_src.shp -gcp 0 0 2 49 -gcp 0 1 2 50 -gcp 1 0 3 49%s' % option)
f = open('tmp/test_ogr2ogr_46.gml')
data = f.read()
f.close()
assert not ('2,49' not in data and '2.0,49.0' not in data and '222638.' not in data), \
option
assert not ('3,50' not in data and '3.0,50.0' not in data and '333958.' not in data), \
option
os.unlink('tmp/test_ogr2ogr_46.gml')
os.unlink('tmp/test_ogr2ogr_46.xsd')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_46_src.shp')
###############################################################################
# Test reprojection with features with different SRS
def test_ogr2ogr_47():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
f = open('tmp/test_ogr2ogr_47_src.gml', 'wt')
f.write("""<foo xmlns:gml="http://www.opengis.net/gml">
<gml:featureMember>
<features>
<geometry>
<gml:Point srsName="http://www.opengis.net/gml/srs/epsg.xml#32630">
<gml:coordinates>500000,4500000</gml:coordinates>
</gml:Point>
</geometry>
</features>
</gml:featureMember>
<gml:featureMember>
<features >
<geometry>
<gml:Point srsName="http://www.opengis.net/gml/srs/epsg.xml#32631">
<gml:coordinates>500000,4500000</gml:coordinates>
</gml:Point>
</geometry>
</features>
</gml:featureMember>
</foo>""")
f.close()
gdal.Unlink('tmp/test_ogr2ogr_47_src.gfs')
ds = ogr.Open('tmp/test_ogr2ogr_47_src.gml')
if ds is None:
os.unlink('tmp/test_ogr2ogr_47_src.gml')
pytest.skip()
ds = None
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f GML -dsco FORMAT=GML2 -t_srs EPSG:4326 tmp/test_ogr2ogr_47_dst.gml tmp/test_ogr2ogr_47_src.gml')
f = open('tmp/test_ogr2ogr_47_dst.gml')
data = f.read()
f.close()
assert ('>-3.0,40.65' in data and '>3.0,40.65' in data) or \
('>-3,40.65' in data and '>3.0,40.65' in data) or \
('>-2.99999999999999,40.65' in data and '>2.99999999999999,40.65' in data), data
os.unlink('tmp/test_ogr2ogr_47_dst.gml')
os.unlink('tmp/test_ogr2ogr_47_dst.xsd')
os.unlink('tmp/test_ogr2ogr_47_src.gml')
os.unlink('tmp/test_ogr2ogr_47_src.gfs')
###############################################################################
# Test fieldmap option
def test_ogr2ogr_48():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp data/Fields.csv')
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -append -fieldmap identity tmp data/Fields.csv')
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -append -fieldmap 14,13,12,11,10,9,8,7,6,5,4,3,2,1,0 tmp data/Fields.csv')
ds = ogr.Open('tmp/Fields.dbf')
assert ds is not None
layer_defn = ds.GetLayer(0).GetLayerDefn()
if layer_defn.GetFieldCount() != 15:
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/Fields.dbf')
pytest.fail('Unexpected field count: ' + str(ds.GetLayer(0).GetLayerDefn().GetFieldCount()))
error_occurred = False
lyr = ds.GetLayer(0)
lyr.GetNextFeature()
feat = lyr.GetNextFeature()
for i in range(layer_defn.GetFieldCount()):
if feat.GetFieldAsString(i) != str(i + 1):
print('Expected the value ', str(i + 1), ',but got', feat.GetFieldAsString(i))
error_occurred = True
feat = lyr.GetNextFeature()
for i in range(layer_defn.GetFieldCount()):
if feat.GetFieldAsString(i) != str(layer_defn.GetFieldCount() - i):
print('Expected the value ', str(layer_defn.GetFieldCount() - i), ',but got', feat.GetFieldAsString(i))
error_occurred = True
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/Fields.dbf')
assert not error_occurred
###############################################################################
# Test detection of duplicated field names in source layer and renaming
# in target layer
def test_ogr2ogr_49():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f CSV tmp/test_ogr2ogr_49.csv data/duplicatedfields.csv')
f = open('tmp/test_ogr2ogr_49.csv')
lines = f.readlines()
f.close()
os.unlink('tmp/test_ogr2ogr_49.csv')
assert (lines[0].find('foo,bar,foo3,foo2,baz,foo4') == 0 and \
lines[1].find('val_foo,val_bar,val_foo3,val_foo2,val_baz,val_foo4') == 0)
###############################################################################
# Test detection of duplicated field names is case insensitive (#5208)
def test_ogr2ogr_49_bis():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f KML tmp/test_ogr2ogr_49_bis.kml data/grid.csv -sql "SELECT field_1 AS name FROM grid WHERE fid = 1"')
f = open('tmp/test_ogr2ogr_49_bis.kml')
lines = f.readlines()
f.close()
os.unlink('tmp/test_ogr2ogr_49_bis.kml')
expected_lines = [
"""<?xml version="1.0" encoding="utf-8" ?>""",
"""<kml xmlns="http://www.opengis.net/kml/2.2">""",
"""<Document id="root_doc">""",
"""<Folder><name>grid</name>""",
""" <Placemark>""",
""" <name>440750.000</name>""",
""" </Placemark>""",
"""</Folder>""",
"""</Document></kml>"""]
assert len(lines) == len(expected_lines)
for i, line in enumerate(lines):
assert line.strip() == expected_lines[i].strip(), lines
###############################################################################
# Test -addfields
def test_ogr2ogr_50():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
f = open('tmp/test_ogr2ogr_50_1.csv', 'wt')
f.write('id,field1\n')
f.write('1,foo\n')
f.close()
f = open('tmp/test_ogr2ogr_50_2.csv', 'wt')
f.write('id,field1,field2\n')
f.write('2,bar,baz\n')
f.close()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/test_ogr2ogr_50.dbf tmp/test_ogr2ogr_50_1.csv -nln test_ogr2ogr_50')
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -addfields tmp/test_ogr2ogr_50.dbf tmp/test_ogr2ogr_50_2.csv -nln test_ogr2ogr_50')
ds = ogr.Open('tmp/test_ogr2ogr_50.dbf')
lyr = ds.GetLayer(0)
feat = lyr.GetNextFeature()
if feat.GetField('field1') != 'foo' or not feat.IsFieldNull('field2'):
feat.DumpReadable()
pytest.fail()
feat = lyr.GetNextFeature()
if feat.GetField('field1') != 'bar' or feat.GetField('field2') != 'baz':
feat.DumpReadable()
pytest.fail()
ds = None
os.unlink('tmp/test_ogr2ogr_50.dbf')
os.unlink('tmp/test_ogr2ogr_50_1.csv')
os.unlink('tmp/test_ogr2ogr_50_2.csv')
###############################################################################
# Test RFC 41 support
def test_ogr2ogr_51():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
f = open('tmp/test_ogr2ogr_51_src.csv', 'wt')
f.write('id,_WKTgeom1_EPSG_4326,foo,_WKTgeom2_EPSG_32631\n')
f.write('1,"POINT(1 2)","bar","POINT(3 4)"\n')
f.close()
# Test conversion from a multi-geometry format into a multi-geometry format
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f CSV tmp/test_ogr2ogr_51_dst.csv tmp/test_ogr2ogr_51_src.csv -nln test_ogr2ogr_51_dst -dsco GEOMETRY=AS_WKT -lco STRING_QUOTING=ALWAYS')
f = open('tmp/test_ogr2ogr_51_dst.csv', 'rt')
lines = f.readlines()
f.close()
expected_lines = ['"_WKTgeom1_EPSG_4326","_WKTgeom2_EPSG_32631","id","foo"', '"POINT (1 2)","POINT (3 4)","1","bar"']
for i in range(2):
assert lines[i].strip() == expected_lines[i]
# Test conversion from a multi-geometry format into a single-geometry format
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/test_ogr2ogr_51_dst.shp tmp/test_ogr2ogr_51_src.csv -nln test_ogr2ogr_51_dst')
ds = ogr.Open('tmp/test_ogr2ogr_51_dst.shp')
lyr = ds.GetLayer(0)
sr = lyr.GetSpatialRef()
assert sr is not None and sr.ExportToWkt().find('GEOGCS["WGS 84"') == 0
feat = lyr.GetNextFeature()
assert feat.GetGeometryRef().ExportToWkt() == 'POINT (1 2)'
ds = None
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_51_dst.shp')
# Test -append into a multi-geometry format
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -append tmp/test_ogr2ogr_51_dst.csv tmp/test_ogr2ogr_51_src.csv -nln test_ogr2ogr_51_dst')
f = open('tmp/test_ogr2ogr_51_dst.csv', 'rt')
lines = f.readlines()
f.close()
expected_lines = ['"_WKTgeom1_EPSG_4326","_WKTgeom2_EPSG_32631","id","foo"',
'"POINT (1 2)","POINT (3 4)","1","bar"',
'"POINT (1 2)","POINT (3 4)","1","bar"']
for i in range(3):
assert lines[i].strip() == expected_lines[i]
os.unlink('tmp/test_ogr2ogr_51_dst.csv')
# Test -select with geometry field names
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -select foo,geom__WKTgeom2_EPSG_32631,id,geom__WKTgeom1_EPSG_4326 -f CSV tmp/test_ogr2ogr_51_dst.csv tmp/test_ogr2ogr_51_src.csv -nln test_ogr2ogr_51_dst -dsco GEOMETRY=AS_WKT -lco STRING_QUOTING=ALWAYS')
f = open('tmp/test_ogr2ogr_51_dst.csv', 'rt')
lines = f.readlines()
f.close()
expected_lines = ['"_WKTgeom2_EPSG_32631","_WKTgeom1_EPSG_4326","foo","id"', '"POINT (3 4)","POINT (1 2)","bar","1"']
for i in range(2):
assert lines[i].strip() == expected_lines[i]
# Test -geomfield option
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -append tmp/test_ogr2ogr_51_dst.csv tmp/test_ogr2ogr_51_src.csv -nln test_ogr2ogr_51_dst -spat 1 2 1 2 -geomfield geom__WKTgeom1_EPSG_4326')
f = open('tmp/test_ogr2ogr_51_dst.csv', 'rt')
lines = f.readlines()
f.close()
expected_lines = ['"_WKTgeom2_EPSG_32631","_WKTgeom1_EPSG_4326","foo","id"',
'"POINT (3 4)","POINT (1 2)","bar","1"',
'"POINT (3 4)","POINT (1 2)","bar","1"']
for i in range(2):
assert lines[i].strip() == expected_lines[i]
os.unlink('tmp/test_ogr2ogr_51_src.csv')
os.unlink('tmp/test_ogr2ogr_51_dst.csv')
###############################################################################
# Test -nlt CONVERT_TO_LINEAR and -nlt CONVERT_TO_CURVE
def test_ogr2ogr_52():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
f = open('tmp/test_ogr2ogr_52_src.csv', 'wt')
f.write('id,WKT\n')
f.write('1,"CIRCULARSTRING(0 0,1 0,0 0)"\n')
f.close()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f CSV tmp/test_ogr2ogr_52_dst.csv tmp/test_ogr2ogr_52_src.csv -select id -nln test_ogr2ogr_52_dst -dsco GEOMETRY=AS_WKT -nlt CONVERT_TO_LINEAR')
f = open('tmp/test_ogr2ogr_52_dst.csv', 'rt')
content = f.read()
f.close()
assert 'LINESTRING (0 0,' in content
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f CSV tmp/test_ogr2ogr_52_dst2.csv tmp/test_ogr2ogr_52_dst.csv -select id -nln test_ogr2ogr_52_dst2 -dsco GEOMETRY=AS_WKT -nlt CONVERT_TO_CURVE')
f = open('tmp/test_ogr2ogr_52_dst2.csv', 'rt')
content = f.read()
f.close()
assert 'COMPOUNDCURVE ((0 0,' in content
os.unlink('tmp/test_ogr2ogr_52_src.csv')
os.unlink('tmp/test_ogr2ogr_52_dst.csv')
os.unlink('tmp/test_ogr2ogr_52_dst2.csv')
###############################################################################
# Test -mapFieldType and 64 bit integers
def test_ogr2ogr_53():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
f = open('tmp/test_ogr2ogr_53.csv', 'wt')
f.write('id,i64,b,WKT\n')
f.write('1,123456789012,true,"POINT(0 0)"\n')
f.close()
f = open('tmp/test_ogr2ogr_53.csvt', 'wt')
f.write('Integer,Integer64,Integer(Boolean),String\n')
f.close()
# Default behaviour with a driver that declares GDAL_DMD_CREATIONFIELDDATATYPES
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f KML tmp/test_ogr2ogr_53.kml tmp/test_ogr2ogr_53.csv -mapFieldType "Integer(Boolean)=String"')
f = open('tmp/test_ogr2ogr_53.kml', 'rt')
content = f.read()
f.close()
assert ('<SimpleField name="id" type="int"></SimpleField>' in content and \
'<SimpleData name="id">1</SimpleData>' in content and \
'<SimpleField name="i64" type="float"></SimpleField>' in content and \
'<SimpleData name="i64">123456789012</SimpleData>' in content and \
'<SimpleField name="b" type="string"></SimpleField>' in content and \
'<SimpleData name="b">1</SimpleData>' in content)
os.unlink('tmp/test_ogr2ogr_53.kml')
# Default behaviour with a driver that does not GDAL_DMD_CREATIONFIELDDATATYPES
#gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f BNA tmp/test_ogr2ogr_53.bna tmp/test_ogr2ogr_53.csv -nlt POINT')
#f = open('tmp/test_ogr2ogr_53.bna', 'rt')
#content = f.read()
#f.close()
#assert '"123456789012.0"' in content
#os.unlink('tmp/test_ogr2ogr_53.bna')
# with -mapFieldType
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f KML tmp/test_ogr2ogr_53.kml tmp/test_ogr2ogr_53.csv -mapFieldType Integer64=String')
f = open('tmp/test_ogr2ogr_53.kml', 'rt')
content = f.read()
f.close()
assert ('<SimpleField name="i64" type="string"></SimpleField>' in content and \
'<SimpleData name="i64">123456789012</SimpleData>' in content)
os.unlink('tmp/test_ogr2ogr_53.kml')
os.unlink('tmp/test_ogr2ogr_53.csv')
os.unlink('tmp/test_ogr2ogr_53.csvt')
###############################################################################
# Test behaviour with nullable fields
def test_ogr2ogr_54():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
f = open('tmp/test_ogr2ogr_54.csv', 'wt')
f.write('fld1,fld2,WKT\n')
f.write('1,2,"POINT(0 0)"\n')
f.close()
f = open('tmp/test_ogr2ogr_54.vrt', 'wt')
f.write("""<OGRVRTDataSource>
<OGRVRTLayer name="test_ogr2ogr_54">
<SrcDataSource relativeToVRT="1" shared="1">test_ogr2ogr_54.csv</SrcDataSource>
<SrcLayer>test_ogr2ogr_54</SrcLayer>
<GeometryType>wkbUnknown</GeometryType>
<GeometryField name="WKT" nullable="false"/>
<Field name="fld1" type="String" src="fld1" nullable="no"/>
<Field name="fld2" type="String" src="fld2"/>
</OGRVRTLayer>
</OGRVRTDataSource>
""")
f.close()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f GML tmp/test_ogr2ogr_54.gml tmp/test_ogr2ogr_54.vrt')
f = open('tmp/test_ogr2ogr_54.xsd', 'rt')
content = f.read()
f.close()
assert ('<xs:element name="WKT" type="gml:GeometryPropertyType" nillable="true" minOccurs="1" maxOccurs="1"/>' in content and \
'<xs:element name="fld1" nillable="true" minOccurs="1" maxOccurs="1">' in content and \
'<xs:element name="fld2" nillable="true" minOccurs="0" maxOccurs="1">' in content)
os.unlink('tmp/test_ogr2ogr_54.gml')
os.unlink('tmp/test_ogr2ogr_54.xsd')
# Test -forceNullable
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -forceNullable -f GML tmp/test_ogr2ogr_54.gml tmp/test_ogr2ogr_54.vrt')
f = open('tmp/test_ogr2ogr_54.xsd', 'rt')
content = f.read()
f.close()
assert ('<xs:element name="WKT" type="gml:GeometryPropertyType" nillable="true" minOccurs="0" maxOccurs="1"/>' in content and \
'<xs:element name="fld1" nillable="true" minOccurs="0" maxOccurs="1">' in content and \
'<xs:element name="fld2" nillable="true" minOccurs="0" maxOccurs="1">' in content)
os.unlink('tmp/test_ogr2ogr_54.gml')
os.unlink('tmp/test_ogr2ogr_54.xsd')
os.unlink('tmp/test_ogr2ogr_54.csv')
os.unlink('tmp/test_ogr2ogr_54.vrt')
###############################################################################
# Test behaviour with default values
def test_ogr2ogr_55():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
f = open('tmp/test_ogr2ogr_55.csv', 'wt')
f.write('fld1,fld2,WKT\n')
f.write('1,,"POINT(0 0)"\n')
f.close()
f = open('tmp/test_ogr2ogr_55.csvt', 'wt')
f.write('Integer,Integer,String\n')
f.close()
f = open('tmp/test_ogr2ogr_55.vrt', 'wt')
f.write("""<OGRVRTDataSource>
<OGRVRTLayer name="test_ogr2ogr_55">
<SrcDataSource relativeToVRT="1" shared="1">test_ogr2ogr_55.csv</SrcDataSource>
<SrcLayer>test_ogr2ogr_55</SrcLayer>
<GeometryType>wkbUnknown</GeometryType>
<GeometryField name="WKT"/>
<Field name="fld1" type="Integer" src="fld1"/>
<Field name="fld2" type="Integer" src="fld2" nullable="false" default="2"/>
</OGRVRTLayer>
</OGRVRTDataSource>
""")
f.close()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f GML tmp/test_ogr2ogr_55.gml tmp/test_ogr2ogr_55.vrt')
f = open('tmp/test_ogr2ogr_55.gml', 'rt')
content = f.read()
f.close()
assert '<ogr:fld2>2</ogr:fld2>' in content
os.unlink('tmp/test_ogr2ogr_55.gml')
os.unlink('tmp/test_ogr2ogr_55.xsd')
# Test -unsetDefault
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -forceNullable -unsetDefault -f GML tmp/test_ogr2ogr_55.gml tmp/test_ogr2ogr_55.vrt')
f = open('tmp/test_ogr2ogr_55.gml', 'rt')
content = f.read()
f.close()
assert '<ogr:fld2>' not in content
os.unlink('tmp/test_ogr2ogr_55.gml')
os.unlink('tmp/test_ogr2ogr_55.xsd')
os.unlink('tmp/test_ogr2ogr_55.csv')
os.unlink('tmp/test_ogr2ogr_55.csvt')
os.unlink('tmp/test_ogr2ogr_55.vrt')
###############################################################################
# Test behaviour when creating a field with same name as FID column.
def test_ogr2ogr_56():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
f = open('tmp/test_ogr2ogr_56.csv', 'wt')
f.write('str,myid,WKT\n')
f.write('aaa,10,"POINT(0 0)"\n')
f.close()
f = open('tmp/test_ogr2ogr_56.csvt', 'wt')
f.write('String,Integer,String\n')
f.close()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f PGDump tmp/test_ogr2ogr_56.sql tmp/test_ogr2ogr_56.csv -lco FID=myid --config PGDUMP_DEBUG_ALLOW_CREATION_FIELD_WITH_FID_NAME NO')
f = open('tmp/test_ogr2ogr_56.sql', 'rt')
content = f.read()
f.close()
assert ("""ALTER TABLE "public"."test_ogr2ogr_56" ADD COLUMN "myid"" """ not in content and \
"""INSERT INTO "public"."test_ogr2ogr_56" ("wkb_geometry" , "myid" , "str", "wkt") VALUES ('010100000000000000000000000000000000000000', 10, 'aaa', 'POINT(0 0)');""" in content)
os.unlink('tmp/test_ogr2ogr_56.sql')
os.unlink('tmp/test_ogr2ogr_56.csv')
os.unlink('tmp/test_ogr2ogr_56.csvt')
###############################################################################
# Test default propagation of FID column name and values, and -unsetFid
def test_ogr2ogr_57():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
f = open('tmp/test_ogr2ogr_57.csv', 'wt')
f.write('id,str,WKT\n')
f.write('10,a,"POINT(0 0)"\n')
f.close()
f = open('tmp/test_ogr2ogr_57.csvt', 'wt')
f.write('Integer,String,String\n')
f.close()
f = open('tmp/test_ogr2ogr_57.vrt', 'wt')
f.write("""<OGRVRTDataSource>
<OGRVRTLayer name="test_ogr2ogr_57">
<SrcDataSource relativeToVRT="1" shared="1">test_ogr2ogr_57.csv</SrcDataSource>
<SrcLayer>test_ogr2ogr_57</SrcLayer>
<GeometryType>wkbUnknown</GeometryType>
<GeometryField name="WKT"/>
<FID name="id">id</FID>
<Field name="str"/>
</OGRVRTLayer>
</OGRVRTDataSource>
""")
f.close()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f PGDump tmp/test_ogr2ogr_57.sql tmp/test_ogr2ogr_57.vrt')
f = open('tmp/test_ogr2ogr_57.sql', 'rt')
content = f.read()
f.close()
assert ("""CREATE TABLE "public"."test_ogr2ogr_57" ( "id" SERIAL, CONSTRAINT "test_ogr2ogr_57_pk" PRIMARY KEY ("id") )""" in content and \
"""INSERT INTO "public"."test_ogr2ogr_57" ("wkt" , "id" , "str") VALUES ('010100000000000000000000000000000000000000', 10, 'a')""" in content)
os.unlink('tmp/test_ogr2ogr_57.sql')
# Test -unsetFid
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f PGDump tmp/test_ogr2ogr_57.sql tmp/test_ogr2ogr_57.vrt -unsetFid')
f = open('tmp/test_ogr2ogr_57.sql', 'rt')
content = f.read()
f.close()
assert ("""CREATE TABLE "public"."test_ogr2ogr_57" ( "ogc_fid" SERIAL, CONSTRAINT "test_ogr2ogr_57_pk" PRIMARY KEY ("ogc_fid") )""" in content and \
"""INSERT INTO "public"."test_ogr2ogr_57" ("wkt" , "str") VALUES ('010100000000000000000000000000000000000000', 'a')""" in content)
os.unlink('tmp/test_ogr2ogr_57.sql')
os.unlink('tmp/test_ogr2ogr_57.csv')
os.unlink('tmp/test_ogr2ogr_57.csvt')
os.unlink('tmp/test_ogr2ogr_57.vrt')
###############################################################################
# Test datasource transactions
def test_ogr2ogr_58():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
if ogr.GetDriverByName('SQLite') is None:
pytest.skip()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -gt 3 -f SQLite tmp/test_ogr2ogr_58.sqlite ../ogr/data/poly.shp')
ds = ogr.Open('tmp/test_ogr2ogr_58.sqlite')
lyr = ds.GetLayer(0)
assert lyr.GetFeatureCount() == 10
ds = None
ogr.GetDriverByName('SQLite').DeleteDataSource('tmp/test_ogr2ogr_58.sqlite')
###############################################################################
# Test metadata support
def test_ogr2ogr_59():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
if ogr.GetDriverByName('GPKG') is None:
pytest.skip()
ds = ogr.GetDriverByName('GPKG').CreateDataSource('tmp/test_ogr2ogr_59_src.gpkg')
ds.SetMetadataItem('FOO', 'BAR')
ds.SetMetadataItem('BAR', 'BAZ', 'another_domain')
lyr = ds.CreateLayer('mylayer')
lyr.SetMetadataItem('lyr_FOO', 'lyr_BAR')
lyr.SetMetadataItem('lyr_BAR', 'lyr_BAZ', 'lyr_another_domain')
ds = None
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f GPKG tmp/test_ogr2ogr_59_dest.gpkg tmp/test_ogr2ogr_59_src.gpkg -mo BAZ=BAW')
ds = ogr.Open('tmp/test_ogr2ogr_59_dest.gpkg')
assert ds.GetMetadata() == {'FOO': 'BAR', 'BAZ': 'BAW'}
assert ds.GetMetadata('another_domain') == {'BAR': 'BAZ'}
lyr = ds.GetLayer(0)
assert lyr.GetMetadata() == {'lyr_FOO': 'lyr_BAR'}
assert lyr.GetMetadata('lyr_another_domain') == {'lyr_BAR': 'lyr_BAZ'}
ds = None
ogr.GetDriverByName('GPKG').DeleteDataSource('tmp/test_ogr2ogr_59_dest.gpkg')
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f GPKG tmp/test_ogr2ogr_59_dest.gpkg tmp/test_ogr2ogr_59_src.gpkg -nomd')
ds = ogr.Open('tmp/test_ogr2ogr_59_dest.gpkg')
assert ds.GetMetadata() == {}
lyr = ds.GetLayer(0)
assert lyr.GetMetadata() == {}
ds = None
ogr.GetDriverByName('GPKG').DeleteDataSource('tmp/test_ogr2ogr_59_dest.gpkg')
ogr.GetDriverByName('GPKG').DeleteDataSource('tmp/test_ogr2ogr_59_src.gpkg')
###############################################################################
# Test forced datasource transactions
def test_ogr2ogr_60():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
if ogr.GetDriverByName('FileGDB') is None:
pytest.skip()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -ds_transaction -f FileGDB tmp/test_ogr2ogr_60.gdb ../ogr/data/poly.shp -mapFieldType Integer64=Integer')
ds = ogr.Open('tmp/test_ogr2ogr_60.gdb')
lyr = ds.GetLayer(0)
assert lyr.GetFeatureCount() == 10
ds = None
ogr.GetDriverByName('FileGDB').DeleteDataSource('tmp/test_ogr2ogr_60.gdb')
###############################################################################
# Test -spat_srs
def test_ogr2ogr_61():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
f = open('tmp/test_ogr2ogr_61.csv', 'wt')
f.write('foo,WKT\n')
f.write('1,"POINT(2 49)"\n')
f.close()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/test_ogr2ogr_61.shp tmp/test_ogr2ogr_61.csv -spat 426857 5427937 426858 5427938 -spat_srs EPSG:32631 -s_srs EPSG:4326 -a_srs EPSG:4326')
ds = ogr.Open('tmp/test_ogr2ogr_61.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 1
ds.Destroy()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/test_ogr2ogr_61_2.shp tmp/test_ogr2ogr_61.shp -spat 426857 5427937 426858 5427938 -spat_srs EPSG:32631')
ds = ogr.Open('tmp/test_ogr2ogr_61_2.shp')
assert ds is not None and ds.GetLayer(0).GetFeatureCount() == 1
ds.Destroy()
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_61.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/test_ogr2ogr_61_2.shp')
os.unlink('tmp/test_ogr2ogr_61.csv')
###############################################################################
# Test -noNativeData
def test_ogr2ogr_62():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
# Default behaviour
fp = open('tmp/test_ogr2ogr_62_in.json', 'wt')
fp.write('{"type": "FeatureCollection", "foo": "bar", "features":[ { "type": "Feature", "bar": "baz", "properties": { "myprop": "myvalue" }, "geometry": null } ]}')
fp = None
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + """ -f GeoJSON tmp/test_ogr2ogr_62.json tmp/test_ogr2ogr_62_in.json""")
fp = gdal.VSIFOpenL('tmp/test_ogr2ogr_62.json', 'rb')
assert fp is not None
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
os.unlink('tmp/test_ogr2ogr_62.json')
assert 'bar' in data and 'baz' in data
# Test -noNativeData
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + """ -f GeoJSON tmp/test_ogr2ogr_62.json tmp/test_ogr2ogr_62_in.json -noNativeData""")
fp = gdal.VSIFOpenL('tmp/test_ogr2ogr_62.json', 'rb')
assert fp is not None
data = gdal.VSIFReadL(1, 10000, fp).decode('ascii')
gdal.VSIFCloseL(fp)
os.unlink('tmp/test_ogr2ogr_62.json')
os.unlink('tmp/test_ogr2ogr_62_in.json')
assert 'bar' not in data and 'baz' not in data
###############################################################################
# Test --formats
def test_ogr2ogr_63():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
os.stat('tmp/poly.shp')
ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource('tmp/poly.shp')
except (OSError, AttributeError):
pass
(ret, err) = gdaltest.runexternal_out_and_err(test_cli_utilities.get_ogr2ogr_path() + ' --formats')
assert 'Supported Formats' in ret, err
assert 'ERROR' not in err, ret
###############################################################################
# Test appending multiple layers, whose one already exists (#6345)
def test_ogr2ogr_64():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
try:
shutil.rmtree('tmp/in_csv')
except OSError:
pass
try:
shutil.rmtree('tmp/out_csv')
except OSError:
pass
os.mkdir('tmp/in_csv')
open('tmp/in_csv/lyr1.csv', 'wt').write("id,col\n1,1\n")
open('tmp/in_csv/lyr2.csv', 'wt').write("id,col\n1,1\n")
ds = ogr.Open('tmp/in_csv')
first_layer = ds.GetLayer(0).GetName()
second_layer = ds.GetLayer(1).GetName()
ds = None
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -f CSV tmp/out_csv tmp/in_csv ' + second_layer)
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' -append tmp/out_csv tmp/in_csv')
ds = ogr.Open('tmp/out_csv')
assert ds.GetLayerByName(first_layer).GetFeatureCount() == 1
assert ds.GetLayerByName(second_layer).GetFeatureCount() == 2
ds = None
shutil.rmtree('tmp/in_csv')
shutil.rmtree('tmp/out_csv')
###############################################################################
# Test detection of extension
def test_ogr2ogr_65():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + ' tmp/out.csv ../ogr/data/poly.shp')
ds = gdal.OpenEx('tmp/out.csv')
assert ds.GetDriver().ShortName == 'CSV'
ds = None
gdal.Unlink('tmp/out.csv')
(ret, err) = gdaltest.runexternal_out_and_err(test_cli_utilities.get_ogr2ogr_path() + ' /vsimem/out.xxx ../ogr/data/poly.shp')
if "Cannot guess" not in err:
print(ret)
pytest.fail('expected a warning about probably wrong extension')
###############################################################################
# Test accidental overriding of dataset when dst and src filenames are the same (#1465)
def test_ogr2ogr_66():
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
(ret, err) = gdaltest.runexternal_out_and_err(test_cli_utilities.get_ogr2ogr_path() + ' ../ogr/data/poly.shp ../ogr/data/poly.shp')
assert "Source and destination datasets must be different in non-update mode" in err, \
ret
def hexify_double(val):
val = hex(val)
# On 32bit Linux, we might get a trailing L
return val.rstrip('L').lstrip('0x').zfill(16).upper()
def check_identity_transformation(x, y, srid):
import struct
if test_cli_utilities.get_ogr2ogr_path() is None:
pytest.skip()
shape_drv = ogr.GetDriverByName('ESRI Shapefile')
for output_shp in ['tmp/output_point.shp', 'tmp/output_point2.shp']:
try:
os.stat(output_shp)
shape_drv.DeleteDataSource(output_shp)
except OSError:
pass
# Generate CSV file with test point
xy_wkb = '0101000000' + ''.join(hexify_double(q) for q in struct.unpack('>QQ', struct.pack("<dd", x, y)))
f = open('tmp/input_point.csv', 'wt')
f.write('id,wkb_geom\n')
f.write('1,' + xy_wkb + '\n')
f.close()
# To check that the transformed values are identical to the original ones we need
# to use a binary format with the same accuracy as the source (WKB).
# CSV cannot be used for this purpose because WKB is not supported as a geometry output format.
# Note that when transforming CSV to SHP the same internal definition of EPSG:srid is being used for source and target,
# so that this transformation will have identically defined input and output units
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + " tmp/output_point.shp tmp/input_point.csv -oo GEOM_POSSIBLE_NAMES=wkb_geom -s_srs EPSG:%(srid)d -t_srs EPSG:%(srid)d" % locals())
ds = ogr.Open('tmp/output_point.shp')
feat = ds.GetLayer(0).GetNextFeature()
ok = feat.GetGeometryRef().GetX() == x and feat.GetGeometryRef().GetY() == y
feat.Destroy()
ds.Destroy()
if ok:
# Now, transforming SHP to SHP will have a different definition of the SRS (EPSG:srid) which comes from the previously saved .prj file
# For angular units in degrees the .prj is saved with greater precision than the internally used value.
# We perform this additional transformation to exercise the case of units defined with different precision
gdaltest.runexternal(test_cli_utilities.get_ogr2ogr_path() + " tmp/output_point2.shp tmp/output_point.shp -t_srs EPSG:%(srid)d" % locals())
ds = ogr.Open('tmp/output_point2.shp')
feat = ds.GetLayer(0).GetNextFeature()
ok = feat.GetGeometryRef().GetX() == x and feat.GetGeometryRef().GetY() == y
feat.Destroy()
ds.Destroy()
shape_drv.DeleteDataSource('tmp/output_point2.shp')
shape_drv.DeleteDataSource('tmp/output_point.shp')
os.remove('tmp/input_point.csv')
assert ok
###############################################################################
# Test coordinates values are preserved for identity transformations
def test_ogr2ogr_67():
# Test coordinates
# The x value is such that x * k * (1/k) != x with k the common factor used in degrees unit definition
# If the coordinates are converted to radians and back to degrees the value of x will be altered
x = float.fromhex('0x1.5EB3ED959A307p6')
y = 0.0
# Now we will check the value of x is preserved in a transformation with same target and source SRS,
# both as latitutude/longitude in degrees.
ret = check_identity_transformation(x, y, 4326)
return ret
|
py | 1a47daf688920cbbfad72caeb1081d995a30c7b7 | # coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 4
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from isi_sdk_8_0_1.models.ndmp_session import NdmpSession # noqa: F401,E501
class NdmpSessions(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'sessions': 'list[NdmpSession]'
}
attribute_map = {
'sessions': 'sessions'
}
def __init__(self, sessions=None): # noqa: E501
"""NdmpSessions - a model defined in Swagger""" # noqa: E501
self._sessions = None
self.discriminator = None
if sessions is not None:
self.sessions = sessions
@property
def sessions(self):
"""Gets the sessions of this NdmpSessions. # noqa: E501
:return: The sessions of this NdmpSessions. # noqa: E501
:rtype: list[NdmpSession]
"""
return self._sessions
@sessions.setter
def sessions(self, sessions):
"""Sets the sessions of this NdmpSessions.
:param sessions: The sessions of this NdmpSessions. # noqa: E501
:type: list[NdmpSession]
"""
self._sessions = sessions
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, NdmpSessions):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
py | 1a47db88e783c93837acb0886207b13554395d53 |
APP_NAME='shournal-to-snakemake' |
py | 1a47dbf31208baa02a2462084be944f4699948df | from pydantic import BaseModel, validator, Extra
import typing
from pydantic.error_wrappers import ValidationError
from pydantic.errors import ArbitraryTypeError
try:
from typing import Literal
except ImportError:
# backport
from typing_extensions import Literal
class CommandTypes:
"""Easy access to command types.
Attributes:
BOOLEAN (int): Represents Type 5
CHANNEL (int): Represents Type 7
INTEGER (int): Represents Type 4
ROLE (int): Represents Type 8
STRING (int): Represents Type 3
SUB_COMMAND (int): Represents Type 1
SUB_COMMAND_GROUP (int): Represents Type 2
USER (int): Represents Type 6
"""
SUB_COMMAND = 1
SUB_COMMAND_GROUP = 2
STRING = 3
INTEGER = 4
BOOLEAN = 5
USER = 6
CHANNEL = 7
ROLE = 8
class CommandChoice(BaseModel):
"""Represents a key-value command choice."""
name: str
value: str
class CommandOption(BaseModel):
"""Represents a standard command option (not a subcommand)."""
class Config:
arbitrary_types_allowed = True
name: str
description: str
type: int
required: bool = False
choices: typing.Optional[
typing.Union[typing.List[dict], typing.List[CommandChoice]]
] = None
options: typing.Optional[
typing.Union[typing.List[CommandChoice], typing.List]
] = None
# @validator("options")
# def options_allow_only_if_subcommand(cls, v):
# if cls.type != 1:
# raise ValidationError("Type must be 1 in order to have options.")
# return v
class SubcommandOption(BaseModel):
"""Represents a subcommand, usually you would put this as an option in a DiscordCommand"""
class Config:
arbitrary_types_allowed = True
name: str
description: str
type: Literal[2] = 2
options: typing.List[CommandOption]
@validator("options")
def options_must_contain_type_1(cls, v): # pylint: disable=no-self-argument
item: CommandOption
for item_location, item in enumerate(v):
if item.type != 1:
raise ValueError(
f"CommandOptions <{item.name}> located <{item_location}> must be have type of 1 due to parent being a subcommand."
)
return v
class DiscordCommand(BaseModel):
"""Represents a discord command."""
id: typing.Optional[int]
name: str
description: str
options: typing.List[typing.Union[SubcommandOption, CommandOption]]
|
py | 1a47dc8cbe5300ba705da2da1ca1a1a50760fe7f | # -*- python -*-
#
# pyqmc.utils.gafqmc_cost module
#
# Wirawan Purwanto
# Created: 20130923
#
#
"""
pyqmc.utils.gafqmc_cost
Cost estimator and analyzer for GAFQMC code.
"""
import numpy
from pyqmc.utils import cost
from pyqmc.utils import linalg_cost
class gafqmc_sparse1_cost_estimator(cost.qmc_cost_estimator):
"""Cost estimator specifically for GAFQMC calculation.
Names of precomputed objects:
* Vijkl = four-indexed two-body operator
* Vss = product of two trial wfn orbitals (same spin) with Vijkl
* Vxs = product of two trial wfn orbitals (opposite spins) with Vijkl
* Ls = product of one trial wfn orbital (for all spins) with L
one-body operator.
"""
# (SPARSE) PRECOMPUTED MATRICES
# Default sparse matrix density (ballpark estimate)
# These values MAY NOT BE CORRECT for your particular calculation!
# These are ok for GTO-basis calculations without frozen core:
dens_Vss = 0.5
dens_Vxs = 0.5
dens_Ls = 0.7
tpref_gf1_ovlp = 9.74193418e-09
tpref_gf1_ovlpinv = 2.25385979e-09
tpref_FB = 1.83499926e-08
tpref_Elocal = 1.09604036e-08
# Turns out, empirically Vss and Vxs densities are the same
def __init__(self):
self.linalg = linalg_cost.linalg_cost_ops()
def compute_mem_cost(self, Print=False):
"""
Estimate a calculation's MEMORY cost based on the given input sizes.
For original sparse method due to Wissam.
Required input:
- nbasis
- nflds
- nwlkmax
- nptot, nup, ndn
- npsitdet
Objects:
Output:
- mem_Vss =
- mem_Vxs =
- mem_rhos =
"""
nwlkmax_proc = self.get_nwlkmax_proc
(M, Nptot, Nu, Nd, F, D) = self.params_wlkr(0)
(dpc, dp, it) = self.params_sys(0)
self.wlk_size = Nptot * M * dpc
self.mem_wlk = self.wlk_size * nwlkmax_proc
self.mem_Lvec = self.get_hsop_dim * F * dp # so far it is double precision
# number of elements in the sparse objects, per determinant
self.count_Vuu_det = self.dens_Vss * M**2 * Nu**2
self.count_Vdd_det = self.dens_Vss * M**2 * Nd**2
self.count_Vud_det = self.dens_Vxs * M**2 * Nu*Nd
self.count_Ls_det = self.dens_Ls * F * M * (Nu+Nd)
# number of elements in the sparse objects, ALL determinants
self.count_Vuu = D * self.count_Vuu_det
self.count_Vdd = D * self.count_Vdd_det
self.count_Vud = D * self.count_Vud_det
self.count_Ls = self.dens_Ls * D * F * M * (Nu+Nd)
# Sparse object are currently stored as records, so here are their sizes:
self.size_Vuu1 = dp + 4 * it
self.size_Vdd1 = dp + 4 * it
self.size_Vud1 = dp + 4 * it
self.size_Ls1 = dp + 2 * it
# memory required by the sparse objects, ALL determinants
self.mem_Vss = (self.count_Vuu + self.count_Vdd) * self.size_Vuu1
self.mem_Vxs = (self.count_Vud) * self.size_Vud1
self.mem_Ls = (self.count_Ls) * self.size_Ls1
if Print:
self.printout_mem()
def printout_mem(self, out=None):
"""
Prints out a report for memory estimate.
"""
# Tentative way to compute naive multithreading task sharing
def task_div(self, D, th):
"""`Task divide-and-share':
The division of D iterations into th threads ---
to approximately account for imperfect task balance in OpenMP way.
"""
inv_th = 1.0 / th
return numpy.ceil(D * inv_th)
def compute_step_cost(self, Print=False):
# Placeholder: will be replaced by fancier stuff later
# for more "symbolic" feel, or function that can give more actual
# estimate of the operation cost.
# For now these are merely
LA = self.linalg
mxm, mxv, vdot, mmtrace, tmmtrace = LA.mxm, LA.mxv, LA.vdot, LA.mmtrace, LA.tmmtrace
(M, Nptot, Nu, Nd, F, D) = self.params_wlkr(0)
try:
th = self.num_threads
except:
th = 1
d_fac = self.task_div(D, th)
#self.cost_pre_Q = d_fac * F * mxm(M,N,N)
#self.cost_Theta = d_fac * mxm(M,N,N) # -- not considered for now
self.ops_gf1_ovlp = d_fac * (mxm(Nu,Nu,M) + mxm(Nd,Nd,M)) # matmul of Psi^hc * Phi
self.ops_gf1_ovlpinv = d_fac * (mxm(Nu,Nu,Nu) + mxm(Nd,Nd,Nd)) # the inverse of ovlp matrix
self.ops_FB = d_fac * self.dens_Ls * F * (mmtrace(M,Nu) + mmtrace(M,Nd)) # the trace part
self.ops_Elocal = d_fac * self.dens_Vss * (2*tmmtrace(M,M,Nu,Nu) + 2*tmmtrace(M,M,Nd,Nd) + tmmtrace(M,M,Nu,Nd)) # the trace part
self.cost_gf1_ovlp = self.tpref_gf1_ovlp * self.ops_gf1_ovlp
self.cost_gf1_ovlpinv = self.tpref_gf1_ovlpinv * self.ops_gf1_ovlpinv
self.cost_FB = self.tpref_FB * self.ops_FB
self.cost_Elocal = self.tpref_FB * self.ops_Elocal
if Print:
self.printout_compute()
|
py | 1a47ddc04baf80b4c2d181b02b0f9e85155fe717 | """Tests for autoscale."""
import unittest
import mock
from botocore import exceptions as botoexc
from treadmill import context
from treadmill_aws import autoscale
from treadmill_aws import awscontext
def _mock_cell(admin_mock, stateapi_mock,
partitions, servers, servers_state, apps_state):
admin_cell_mock = admin_mock.cell.return_value
admin_srv_mock = admin_mock.server.return_value
admin_cell_mock.get.return_value = {
'partitions': partitions,
}
admin_srv_mock.list.return_value = servers
apps_columns = ['instance', 'partition', 'server']
servers_columns = ['name', 'state', 'cpu', 'mem', 'disk']
stateapi_mock.return_value = (
{'columns': apps_columns, 'data': apps_state},
{'columns': servers_columns, 'data': servers_state},
)
def _raise_if(check, err):
if check:
raise err
@mock.patch('treadmill.context.GLOBAL.zk', mock.Mock())
class AutoscaleTest(unittest.TestCase):
"""Test autoscale."""
def setUp(self):
context.GLOBAL.cell = 'test'
context.GLOBAL.ldap_suffix = 'dc=test'
context.GLOBAL.dns_domain = 'foo.com'
awscontext.GLOBAL.ipa_domain = 'foo.com'
@mock.patch('treadmill_aws.autoscale.create_n_servers', mock.Mock())
@mock.patch('treadmill_aws.autoscale.delete_servers_by_name', mock.Mock())
@mock.patch('treadmill_aws.autoscale._query_stateapi')
@mock.patch('treadmill.context.Context.admin')
def test_scale_up_no_idle(self, admin_mock, stateapi_mock):
"""Test scaling up with no idle servers present."""
mock_zkclient = context.GLOBAL.zk.conn
mock_zkclient.get_children.return_value = []
# Ratio: 0.5
# Pending apps: 3, no servers - create 2 servers.
_mock_cell(
admin_mock, stateapi_mock,
partitions=[
{'_id': 'partition',
'data': {'autoscale': {'min_servers': 1, 'max_servers': 9}}},
],
servers=[],
servers_state=[],
apps_state=[
('proid.app#001', 'partition', None),
('proid.app#002', 'partition', None),
('proid.app#003', 'partition', None),
],
)
autoscale.scale(0.5)
autoscale.create_n_servers.assert_called_once_with(
2, 'partition', pool=None
)
autoscale.delete_servers_by_name.assert_not_called()
autoscale.create_n_servers.reset_mock()
autoscale.delete_servers_by_name.reset_mock()
# Ratio: 0.5
# Pending apps: 100, no servers, max servers: 9 - create 9 severs.
_mock_cell(
admin_mock, stateapi_mock,
partitions=[
{'_id': 'partition',
'data': {'autoscale': {'min_servers': 1, 'max_servers': 9}}},
],
servers=[],
servers_state=[],
apps_state=[
('proid.app#%03d' % i, 'partition', None) for i in range(100)
],
)
autoscale.scale(0.5)
autoscale.create_n_servers.assert_called_once_with(
9, 'partition', pool=None
)
autoscale.delete_servers_by_name.assert_not_called()
autoscale.create_n_servers.reset_mock()
autoscale.delete_servers_by_name.reset_mock()
# Ratio: 1.0 (down and frozen servers excluded).
# Pending apps: 3, no idle servers - create 3 servers.
# Down and frozen servers have apps placed on them - don't delete.
_mock_cell(
admin_mock, stateapi_mock,
partitions=[
{'_id': 'partition',
'data': {'autoscale': {'min_servers': 1, 'max_servers': 9}}},
],
servers=[
{'_id': 'server1', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server2', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server3', 'partition': 'partition',
'_create_timestamp': 100.0},
],
servers_state=[
('server1', 'up', 100, 100, 100),
('server2', 'down', 100, 100, 100),
('server3', 'frozen', 100, 100, 100),
],
apps_state=[
('proid.app#001', 'partition', 'server1'),
('proid.app#002', 'partition', 'server2'),
('proid.app#003', 'partition', 'server3'),
('proid.app#004', 'partition', 'server3'),
('proid.app#005', 'partition', 'server3'),
('proid.app#006', 'partition', 'server3'),
('proid.app#007', 'partition', None),
('proid.app#008', 'partition', None),
('proid.app#009', 'partition', None),
],
)
autoscale.scale(0.5)
autoscale.create_n_servers.assert_called_once_with(
3, 'partition', pool=None
)
autoscale.delete_servers_by_name.assert_not_called()
@mock.patch('treadmill_aws.autoscale.create_n_servers', mock.Mock())
@mock.patch('treadmill_aws.autoscale.delete_servers_by_name', mock.Mock())
@mock.patch('treadmill_aws.autoscale._query_stateapi')
@mock.patch('treadmill.context.Context.admin')
@mock.patch('time.time', mock.Mock(return_value=1000.0))
def test_scale_up_with_idle(self, admin_mock, stateapi_mock):
"""Test scaling up with some idle servers present."""
mock_zkclient = context.GLOBAL.zk.conn
mock_zkclient.get_children.return_value = []
# Ratio: 0.5
# Pending apps: 1, idle servers: 1 - don't create anything.
_mock_cell(
admin_mock, stateapi_mock,
partitions=[
{'_id': 'partition',
'data': {'autoscale': {'min_servers': 1, 'max_servers': 9}}},
],
servers=[
{'_id': 'server1', 'partition': 'partition',
'_create_timestamp': 100.0},
],
servers_state=[
('server1', 'up', 100, 100, 100),
],
apps_state=[
('proid.app#001', 'partition', None),
],
)
autoscale.scale(0.5)
autoscale.create_n_servers.assert_not_called()
autoscale.delete_servers_by_name.assert_not_called()
autoscale.create_n_servers.reset_mock()
autoscale.delete_servers_by_name.reset_mock()
# Ratio: 0.5
# Pending apps: 3, idle servers: 2 - don't create anything.
_mock_cell(
admin_mock, stateapi_mock,
partitions=[
{'_id': 'partition',
'data': {'autoscale': {'min_servers': 1, 'max_servers': 9}}},
],
servers=[
{'_id': 'server1', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server2', 'partition': 'partition',
'_create_timestamp': 100.0},
],
servers_state=[
('server1', 'up', 100, 100, 100),
('server2', 'up', 100, 100, 100),
],
apps_state=[
('proid.app#001', 'partition', None),
('proid.app#002', 'partition', None),
('proid.app#003', 'partition', None),
],
)
autoscale.scale(0.5)
autoscale.create_n_servers.assert_not_called()
autoscale.delete_servers_by_name.assert_not_called()
autoscale.create_n_servers.reset_mock()
autoscale.delete_servers_by_name.reset_mock()
# Ratio: 1.0.
# Pending apps: 3, idle servers: 2 - create 1 server.
_mock_cell(
admin_mock, stateapi_mock,
partitions=[
{'_id': 'partition',
'data': {'autoscale': {'min_servers': 1, 'max_servers': 9}}},
],
servers=[
{'_id': 'server1', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server2', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server3', 'partition': 'partition',
'_create_timestamp': 999.0},
{'_id': 'server4', 'partition': 'partition',
'_create_timestamp': 100.0},
],
servers_state=[
('server1', 'up', 100, 100, 100),
('server2', 'up', 100, 100, 100),
('server3', 'down', None, None, None), # Didn't report - new.
('server4', 'down', None, None, None), # Didn't report - down.
],
apps_state=[
('proid.app#001', 'partition', 'server1'),
('proid.app#002', 'partition', None),
('proid.app#003', 'partition', None),
('proid.app#004', 'partition', None),
],
)
autoscale.scale(0.5)
autoscale.create_n_servers.assert_called_once_with(
1, 'partition', pool=None
)
autoscale.delete_servers_by_name.assert_called_once_with(['server4'])
@mock.patch('treadmill_aws.autoscale.create_n_servers', mock.Mock())
@mock.patch('treadmill_aws.autoscale.delete_servers_by_name', mock.Mock())
@mock.patch('treadmill_aws.autoscale._query_stateapi')
@mock.patch('treadmill.context.Context.admin')
def test_scale_up_min_servers(self, admin_mock, stateapi_mock):
"""Test scaling up to min (active) servers."""
mock_zkclient = context.GLOBAL.zk.conn
mock_zkclient.get_children.return_value = []
# Empty partition, min servers: 3 - create 3 servers.
_mock_cell(
admin_mock, stateapi_mock,
partitions=[
{'_id': 'partition',
'data': {'autoscale': {'min_servers': 3, 'max_servers': 9}}},
],
servers=[],
servers_state=[],
apps_state=[],
)
autoscale.scale(0.5)
autoscale.create_n_servers.assert_called_once_with(
3, 'partition', pool=None
)
autoscale.delete_servers_by_name.assert_not_called()
autoscale.create_n_servers.reset_mock()
autoscale.delete_servers_by_name.reset_mock()
# 1 up, 1 down and 1 frozen server, min servers: 3 - create 2 servers.
# Down and frozen servers have apps placed on them - don't delete.
_mock_cell(
admin_mock, stateapi_mock,
partitions=[
{'_id': 'partition',
'data': {'autoscale': {'min_servers': 3, 'max_servers': 9}}},
],
servers=[
{'_id': 'server1', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server2', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server3', 'partition': 'partition',
'_create_timestamp': 100.0},
],
servers_state=[
('server1', 'up', 100, 100, 100),
('server2', 'down', 100, 100, 100),
('server3', 'frozen', 100, 100, 100),
],
apps_state=[
('proid.app#001', 'partition', 'server1'),
('proid.app#002', 'partition', 'server2'),
('proid.app#003', 'partition', 'server3'),
],
)
autoscale.scale(0.5)
autoscale.create_n_servers.assert_called_once_with(
2, 'partition', pool=None
)
autoscale.delete_servers_by_name.assert_not_called()
@mock.patch('treadmill_aws.autoscale.create_n_servers', mock.Mock())
@mock.patch('treadmill_aws.autoscale.delete_servers_by_name', mock.Mock())
@mock.patch('treadmill_aws.autoscale._query_stateapi')
@mock.patch('treadmill.context.Context.admin')
def test_scale_down(self, admin_mock, stateapi_mock):
"""Test scaling down."""
mock_zkclient = context.GLOBAL.zk.conn
mock_zkclient.get_children.return_value = []
# Pending apps: 1, idle servers: 2 - delete 1 server.
_mock_cell(
admin_mock, stateapi_mock,
partitions=[
{'_id': 'partition',
'data': {'autoscale': {'min_servers': 3, 'max_servers': 9}}},
],
servers=[
{'_id': 'server1', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server2', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server3', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server4', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server5', 'partition': 'partition',
'_create_timestamp': 100.0},
],
servers_state=[
('server1', 'up', 100, 100, 100),
('server2', 'up', 100, 100, 100),
('server3', 'up', 100, 100, 100),
('server4', 'up', 100, 100, 100),
('server5', 'up', 100, 100, 100),
],
apps_state=[
('proid.app#001', 'partition', 'server1'),
('proid.app#002', 'partition', 'server2'),
('proid.app#003', 'partition', 'server3'),
('proid.app#004', 'partition', None),
],
)
autoscale.scale(0.5)
autoscale.create_n_servers.assert_not_called()
autoscale.delete_servers_by_name.assert_called_once_with(['server4'])
autoscale.create_n_servers.reset_mock()
autoscale.delete_servers_by_name.reset_mock()
# No pending apps, idle servers: 5, min servers: 3 - delete 2 servers.
_mock_cell(
admin_mock, stateapi_mock,
partitions=[
{'_id': 'partition',
'data': {'autoscale': {'min_servers': 3, 'max_servers': 9}}},
],
servers=[
{'_id': 'server1', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server2', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server3', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server4', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server5', 'partition': 'partition',
'_create_timestamp': 100.0},
],
servers_state=[
('server1', 'up', 100, 100, 100),
('server2', 'up', 100, 100, 100),
('server3', 'up', 100, 100, 100),
('server4', 'up', 100, 100, 100),
('server5', 'up', 100, 100, 100),
],
apps_state=[],
)
autoscale.scale(0.5)
autoscale.create_n_servers.assert_not_called()
autoscale.delete_servers_by_name.assert_called_once_with(
['server1', 'server2']
)
autoscale.create_n_servers.reset_mock()
autoscale.delete_servers_by_name.reset_mock()
# Delete empty down and frozen servers.
_mock_cell(
admin_mock, stateapi_mock,
partitions=[
{'_id': 'partition',
'data': {'autoscale': {'min_servers': 3, 'max_servers': 9}}},
],
servers=[
{'_id': 'server1', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server2', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server3', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server4', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server5', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server6', 'partition': 'partition',
'_create_timestamp': 100.0},
{'_id': 'server7', 'partition': 'partition',
'_create_timestamp': 100.0},
],
servers_state=[
('server1', 'up', 100, 100, 100),
('server2', 'up', 100, 100, 100),
('server3', 'up', 100, 100, 100),
('server4', 'down', 100, 100, 100),
('server5', 'down', 100, 100, 100),
('server6', 'down', 100, 100, 100),
('server7', 'frozen', 100, 100, 100),
],
apps_state=[
('proid.app#001', 'partition', 'server6'),
],
)
mock_zkclient.get_children.return_value = ['server5']
autoscale.scale(0.5)
autoscale.create_n_servers.assert_not_called()
autoscale.delete_servers_by_name.assert_called_once_with(
['server4']
)
@mock.patch('treadmill.context.Context.ldap',
mock.Mock(url=['ldap://foo:1234']))
@mock.patch('treadmill.context.Context.admin')
@mock.patch('treadmill.syscall.krb5.get_host_realm',
mock.Mock(return_value=['FOO.COM']))
@mock.patch('treadmill_aws.hostmanager.create_host')
@mock.patch('treadmill_aws.hostmanager.create_otp', mock.Mock())
@mock.patch('treadmill_aws.awscontext.AWSContext.ec2', mock.Mock())
@mock.patch('treadmill_aws.awscontext.AWSContext.sts', mock.Mock())
@mock.patch('treadmill_aws.awscontext.AWSContext.ipaclient', mock.Mock())
@mock.patch('time.time', mock.Mock(return_value=1000.0))
@mock.patch('random.shuffle', mock.Mock(side_effect=lambda x: x))
def test_create_n_servers(self, create_host_mock, admin_mock):
"""Test creating new servers in the cell."""
admin_cell_mock = admin_mock.cell.return_value
admin_cell_mock.get.return_value = {
'data': {
'image': 'ami-test',
'size': 'm5.large',
'subnets': ['subnet-4c76610a', 'subnet-4c76610b'],
'secgroup': 'test',
'hostgroups': ['test'],
'instance_profile': 'test',
'disk_size': '100',
'aws_account': 'test',
}
}
admin_part_mock = admin_mock.partition.return_value
admin_part_mock.get.return_value = {
'data': {
'instance_types': ['m5.large', 'm5.xlarge'],
'spot_instance_types': ['m5.large', 'm5.xlarge', 'm5.2xlarge'],
}
}
# Create 3 on-demand hosts.
hosts_created = autoscale.create_n_servers(3, 'partition')
self.assertEqual(
hosts_created,
[
{
'hostname': 'test-partition-8s6u9ns20000.foo.com',
'type': 'm5.large',
'lifecycle': 'on-demand',
'subnet': 'subnet-4c76610a',
},
{
'hostname': 'test-partition-8s6u9ns20001.foo.com',
'type': 'm5.large',
'lifecycle': 'on-demand',
'subnet': 'subnet-4c76610a',
},
{
'hostname': 'test-partition-8s6u9ns20002.foo.com',
'type': 'm5.large',
'lifecycle': 'on-demand',
'subnet': 'subnet-4c76610a',
},
]
)
create_host_mock.reset_mock()
# Create 1 on-demand host and 2 spot hosts.
hosts_created = autoscale.create_n_servers(
3, 'partition', min_on_demand=1, max_on_demand=1
)
self.assertEqual(
hosts_created,
[
{
'hostname': 'test-partition-8s6u9ns20000.foo.com',
'type': 'm5.large',
'lifecycle': 'on-demand',
'subnet': 'subnet-4c76610a',
},
{
'hostname': 'test-partition-8s6u9ns20001.foo.com',
'type': 'm5.large',
'lifecycle': 'spot',
'subnet': 'subnet-4c76610a',
},
{
'hostname': 'test-partition-8s6u9ns20002.foo.com',
'type': 'm5.large',
'lifecycle': 'spot',
'subnet': 'subnet-4c76610a',
},
]
)
create_host_mock.reset_mock()
# Create 1 on-demand host and try 2 spot hosts, fallback to on-demand.
create_host_mock.side_effect = lambda *args, **kwargs: _raise_if(
kwargs['spot'],
botoexc.ClientError(
{'Error': {'Code': 'SpotMaxPriceTooLow'}}, None
)
)
hosts_created = autoscale.create_n_servers(
3, 'partition', min_on_demand=1, max_on_demand=3
)
self.assertEqual(
hosts_created,
[
{
'hostname': 'test-partition-8s6u9ns20000.foo.com',
'type': 'm5.large',
'lifecycle': 'on-demand',
'subnet': 'subnet-4c76610a',
},
{
'hostname': 'test-partition-8s6u9ns20001.foo.com',
'type': 'm5.large',
'lifecycle': 'on-demand',
'subnet': 'subnet-4c76610a',
},
{
'hostname': 'test-partition-8s6u9ns20002.foo.com',
'type': 'm5.large',
'lifecycle': 'on-demand',
'subnet': 'subnet-4c76610a',
},
]
)
# Check if each spot type was tried once in each subnet, check order.
self.assertEqual(
[
(kwargs['instance_type'], kwargs['subnet'])
for _args, kwargs in create_host_mock.call_args_list
if kwargs['spot']
],
[
('m5.large', 'subnet-4c76610a'),
('m5.large', 'subnet-4c76610b'),
('m5.xlarge', 'subnet-4c76610a'),
('m5.xlarge', 'subnet-4c76610b'),
('m5.2xlarge', 'subnet-4c76610a'),
('m5.2xlarge', 'subnet-4c76610b'),
]
)
create_host_mock.reset_mock()
# Create 1 on-demand host and try 2 spot hosts, no fallback - fail.
with self.assertRaisesRegex(
Exception,
'Failed to create host test-partition-8s6u9ns20001.foo.com'
):
hosts_created = autoscale.create_n_servers(
3, 'partition', min_on_demand=1, max_on_demand=1
)
create_host_mock.reset_mock()
# Create 1 on-demand host and 2 spot hosts, m5.large spot not feasible.
create_host_mock.side_effect = lambda *args, **kwargs: _raise_if(
kwargs['spot'] and kwargs['instance_type'] == 'm5.large',
botoexc.ClientError(
{'Error': {'Code': 'InsufficientInstanceCapacity'}}, None
)
)
hosts_created = autoscale.create_n_servers(
3, 'partition', min_on_demand=1, max_on_demand=1
)
self.assertEqual(
hosts_created,
[
{
'hostname': 'test-partition-8s6u9ns20000.foo.com',
'type': 'm5.large',
'lifecycle': 'on-demand',
'subnet': 'subnet-4c76610a',
},
{
'hostname': 'test-partition-8s6u9ns20001.foo.com',
'type': 'm5.xlarge',
'lifecycle': 'spot',
'subnet': 'subnet-4c76610a',
},
{
'hostname': 'test-partition-8s6u9ns20002.foo.com',
'type': 'm5.xlarge',
'lifecycle': 'spot',
'subnet': 'subnet-4c76610a',
},
]
)
# Check if m5.large spot was tried once in each subnet, then m5.xlarge.
self.assertEqual(
[
(kwargs['instance_type'], kwargs['subnet'])
for _args, kwargs in create_host_mock.call_args_list
if kwargs['spot']
],
[
('m5.large', 'subnet-4c76610a'),
('m5.large', 'subnet-4c76610b'),
('m5.xlarge', 'subnet-4c76610a'),
('m5.xlarge', 'subnet-4c76610a'),
]
)
create_host_mock.reset_mock()
# Create 1 on-demand host and 2 spot hosts, m5.large not feasible in
# subnet-4c76610a, but feasible in subnet-4c76610b.
create_host_mock.side_effect = lambda *args, **kwargs: _raise_if(
(
kwargs['instance_type'] == 'm5.large' and
kwargs['subnet'] == 'subnet-4c76610a'
),
botoexc.ClientError(
{'Error': {'Code': 'InsufficientInstanceCapacity'}}, None
)
)
hosts_created = autoscale.create_n_servers(
3, 'partition', min_on_demand=1, max_on_demand=1
)
self.assertEqual(
hosts_created,
[
{
'hostname': 'test-partition-8s6u9ns20000.foo.com',
'type': 'm5.large',
'lifecycle': 'on-demand',
'subnet': 'subnet-4c76610b',
},
{
'hostname': 'test-partition-8s6u9ns20001.foo.com',
'type': 'm5.large',
'lifecycle': 'spot',
'subnet': 'subnet-4c76610b',
},
{
'hostname': 'test-partition-8s6u9ns20002.foo.com',
'type': 'm5.large',
'lifecycle': 'spot',
'subnet': 'subnet-4c76610b',
},
]
)
# Check if subnet-4c76610a was tried once for on-demand and spot.
self.assertEqual(
[
(kwargs['instance_type'], kwargs['spot'], kwargs['subnet'])
for _args, kwargs in create_host_mock.call_args_list
],
[
('m5.large', False, 'subnet-4c76610a'),
('m5.large', False, 'subnet-4c76610b'),
('m5.large', True, 'subnet-4c76610a'),
('m5.large', True, 'subnet-4c76610b'),
('m5.large', True, 'subnet-4c76610b'),
]
)
create_host_mock.reset_mock()
# Create 1 on-demand host and 2 spot hosts, subnet-4c76610a exhausted.
create_host_mock.side_effect = lambda *args, **kwargs: _raise_if(
(
kwargs['subnet'] == 'subnet-4c76610a'
),
botoexc.ClientError(
{'Error': {'Code': 'InsufficientFreeAddressesInSubnet'}}, None
)
)
hosts_created = autoscale.create_n_servers(
3, 'partition', min_on_demand=1, max_on_demand=1
)
self.assertEqual(
hosts_created,
[
{
'hostname': 'test-partition-8s6u9ns20000.foo.com',
'type': 'm5.large',
'lifecycle': 'on-demand',
'subnet': 'subnet-4c76610b',
},
{
'hostname': 'test-partition-8s6u9ns20001.foo.com',
'type': 'm5.large',
'lifecycle': 'spot',
'subnet': 'subnet-4c76610b',
},
{
'hostname': 'test-partition-8s6u9ns20002.foo.com',
'type': 'm5.large',
'lifecycle': 'spot',
'subnet': 'subnet-4c76610b',
},
]
)
# Check if subnet-4c76610a was tried once (entire subnet excluded).
self.assertEqual(
[
(kwargs['instance_type'], kwargs['spot'], kwargs['subnet'])
for _args, kwargs in create_host_mock.call_args_list
],
[
('m5.large', False, 'subnet-4c76610a'),
('m5.large', False, 'subnet-4c76610b'),
('m5.large', True, 'subnet-4c76610b'),
('m5.large', True, 'subnet-4c76610b'),
]
)
create_host_mock.reset_mock()
# Create 1 on-demand host and 2 spot hosts, retry on InternalError.
raise_err = {
'test-partition-8s6u9ns20000.foo.com': True,
'test-partition-8s6u9ns20001.foo.com': True,
'test-partition-8s6u9ns20002.foo.com': True,
}
create_host_mock.side_effect = lambda *args, **kwargs: _raise_if(
raise_err.pop(kwargs['hostname'], False),
botoexc.ClientError(
{'Error': {'Code': 'InternalError'}}, None
)
)
hosts_created = autoscale.create_n_servers(
3, 'partition', min_on_demand=1, max_on_demand=1
)
self.assertEqual(
hosts_created,
[
{
'hostname': 'test-partition-8s6u9ns20000.foo.com',
'type': 'm5.large',
'lifecycle': 'on-demand',
'subnet': 'subnet-4c76610a',
},
{
'hostname': 'test-partition-8s6u9ns20001.foo.com',
'type': 'm5.large',
'lifecycle': 'spot',
'subnet': 'subnet-4c76610a',
},
{
'hostname': 'test-partition-8s6u9ns20002.foo.com',
'type': 'm5.large',
'lifecycle': 'spot',
'subnet': 'subnet-4c76610a',
},
]
)
# Check retries.
self.assertEqual(
[
(
kwargs['hostname'],
kwargs['instance_type'],
kwargs['spot'],
kwargs['subnet']
)
for _args, kwargs in create_host_mock.call_args_list
],
[
(
'test-partition-8s6u9ns20000.foo.com',
'm5.large', False, 'subnet-4c76610a'
),
(
'test-partition-8s6u9ns20000.foo.com',
'm5.large', False, 'subnet-4c76610a'
),
(
'test-partition-8s6u9ns20001.foo.com',
'm5.large', True, 'subnet-4c76610a'
),
(
'test-partition-8s6u9ns20001.foo.com',
'm5.large', True, 'subnet-4c76610a'
),
(
'test-partition-8s6u9ns20002.foo.com',
'm5.large', True, 'subnet-4c76610a'
),
(
'test-partition-8s6u9ns20002.foo.com',
'm5.large', True, 'subnet-4c76610a'
),
]
)
create_host_mock.reset_mock()
@mock.patch('treadmill.presence.kill_node')
@mock.patch('treadmill.context.Context.ldap',
mock.Mock(url=['ldap://foo:1234']))
@mock.patch('treadmill.context.Context.admin')
@mock.patch('treadmill_aws.hostmanager.delete_hosts')
@mock.patch('treadmill_aws.awscontext.AWSContext.ec2', mock.Mock())
@mock.patch('treadmill_aws.awscontext.AWSContext.ipaclient', mock.Mock())
def test_delete_servers_by_name(self, delete_hosts_mock, admin_mock,
kill_node_mock):
"""Test deleting servers by name."""
admin_srv_mock = admin_mock.server.return_value
autoscale.delete_servers_by_name([
'test-partition-dq2opb2qrfj.foo.com',
'test-partition-dq2opbqskkq.foo.com',
'test-partition-dq2opc7ao37.foo.com',
])
delete_hosts_mock.assert_called_once_with(
ipa_client=mock.ANY,
ec2_conn=mock.ANY,
hostnames=[
'test-partition-dq2opb2qrfj.foo.com',
'test-partition-dq2opbqskkq.foo.com',
'test-partition-dq2opc7ao37.foo.com',
],
)
admin_srv_mock.delete.assert_has_calls([
mock.call('test-partition-dq2opb2qrfj.foo.com'),
mock.call('test-partition-dq2opbqskkq.foo.com'),
mock.call('test-partition-dq2opc7ao37.foo.com'),
])
kill_node_mock.assert_has_calls([
mock.call(mock.ANY, 'test-partition-dq2opb2qrfj.foo.com'),
mock.call(mock.ANY, 'test-partition-dq2opbqskkq.foo.com'),
mock.call(mock.ANY, 'test-partition-dq2opc7ao37.foo.com'),
])
@mock.patch('treadmill.presence.kill_node')
@mock.patch('treadmill.context.Context.ldap',
mock.Mock(url=['ldap://foo:1234']))
@mock.patch('treadmill.context.Context.admin')
@mock.patch('treadmill_aws.hostmanager.delete_hosts')
@mock.patch('treadmill_aws.awscontext.AWSContext.ec2', mock.Mock())
@mock.patch('treadmill_aws.awscontext.AWSContext.ipaclient', mock.Mock())
def test_delete_n_servers(self, delete_hosts_mock, admin_mock,
kill_node_mock):
"""Test deleting n servers."""
admin_srv_mock = admin_mock.server.return_value
admin_srv_mock.list.return_value = [
{'_id': 'test-partition-dq2opb2qrfj.foo.com'},
{'_id': 'test-partition-dq2opbqskkq.foo.com'},
{'_id': 'test-partition-dq2opc7ao37.foo.com'},
]
autoscale.delete_n_servers(2, partition='partition')
delete_hosts_mock.assert_called_once_with(
ipa_client=mock.ANY,
ec2_conn=mock.ANY,
hostnames=[
'test-partition-dq2opb2qrfj.foo.com',
'test-partition-dq2opbqskkq.foo.com',
],
)
admin_srv_mock.delete.assert_has_calls([
mock.call('test-partition-dq2opb2qrfj.foo.com'),
mock.call('test-partition-dq2opbqskkq.foo.com'),
])
kill_node_mock.assert_has_calls([
mock.call(mock.ANY, 'test-partition-dq2opb2qrfj.foo.com'),
mock.call(mock.ANY, 'test-partition-dq2opbqskkq.foo.com'),
])
|
py | 1a47de32628e16bb3a4870305a95c84223a19818 | from typing import Any, Dict, List
from sciwing.data.seq_label import SeqLabel
from sciwing.data.line import Line
from sciwing.data.token import Token
from sciwing.data.datasets_manager import DatasetsManager
from sciwing.metrics.BaseMetric import BaseMetric
import subprocess
import wasabi
from collections import defaultdict, Counter
import pathlib
import os
import numpy as np
import uuid
class SummarizationMetrics(BaseMetric):
"""
Returns rouge for every namespace.
The conll2003 metric assumes that the conlleval perl script is available
It writes a file with true labels and pred labels for a namespace
Parses the span level statistics which can then be used to select the model with the best
F1 score
"""
def __init__(
self,
datasets_manager: DatasetsManager,
predicted_tags_namespace_prefix="predicted_tags",
words_namespace: str = "tokens",
):
super(SummarizationMetrics, self).__init__(datasets_manager=datasets_manager)
self.datasets_manager = datasets_manager
self.label_namespaces = datasets_manager.label_namespaces
self.words_namespace = words_namespace
self.namespace_to_vocab = self.datasets_manager.namespace_to_vocab
self.predicted_tags_namespace_prefix = predicted_tags_namespace_prefix
self.msg_printer = wasabi.Printer()
self.rouge_1_counter: Dict[str, List[float]] = defaultdict(list)
self.rouge_2_counter: Dict[str, List[float]] = defaultdict(list)
self.rouge_l_counter: Dict[str, List[float]] = defaultdict(list)
def calc_metric(
self, lines: List[Line], labels: List[Line], model_forward_dict: Dict[str, Any]
) -> None:
# line_tokens: List[List[Token]] = [line.tokens["tokens"] for line in lines]
# true_label_text = [label.text for label in labels]
cwd = os.path.dirname(os.path.realpath(__file__))
for namespace in [self.words_namespace]:
predicted_tags = model_forward_dict.get(
f"{self.predicted_tags_namespace_prefix}_{namespace}"
)
true_summary_tokens: List[List[Token]] = [
summary.tokens[namespace] for summary in labels
]
true_summary_token_strs: List[List[str]] = [
[token.text for token in tokens] for tokens in true_summary_tokens
]
namespace_filename = f"{cwd}/{str(uuid.uuid4())}_{namespace}_pred.txt"
namespace_filename = pathlib.Path(namespace_filename)
predicted_summary_token_strs = []
with open(namespace_filename, "w") as fp:
for line, true_summary_token_strs_, predicted_tags_ in zip(
lines, true_summary_token_strs, predicted_tags
):
predicted_summary_token_strs_ = []
for predicted_tag in predicted_tags_:
predicted_tag = self.namespace_to_vocab[
namespace
].get_token_from_idx(predicted_tag)
predicted_summary_token_strs_.append(predicted_tag)
predicted_summary_token_strs.append(predicted_summary_token_strs_)
fp.write(line.text)
fp.write("Ground Truth")
fp.write(
" ".join([f'"{token}"' for token in true_summary_token_strs_])
)
fp.write("Predicted")
fp.write(
" ".join(
[f'"{token}"' for token in predicted_summary_token_strs_]
)
)
fp.write("\n")
for true_summary_token_strs_, predicted_summary_token_strs_ in zip(
true_summary_token_strs, predicted_summary_token_strs
):
rouge_1 = self._rouge_n(
predicted_summary_token_strs_, true_summary_token_strs_, 1
)
rouge_2 = self._rouge_n(
predicted_summary_token_strs_, true_summary_token_strs_, 2
)
rouge_l = self._rouge_l(
predicted_summary_token_strs_, true_summary_token_strs_
)
rouge_1 = np.round(rouge_1, decimals=3)
rouge_2 = np.round(rouge_2, decimals=3)
rouge_l = np.round(rouge_l, decimals=3)
# update the counter
self.rouge_1_counter[namespace].append(rouge_1)
self.rouge_2_counter[namespace].append(rouge_2)
self.rouge_l_counter[namespace].append(rouge_l)
def get_metric(self) -> Dict[str, Any]:
metrics = {}
for namespace in [self.words_namespace]:
rouge_1s = self.rouge_1_counter[namespace]
rouge_2s = self.rouge_2_counter[namespace]
rouge_ls = self.rouge_l_counter[namespace]
rouge_1 = sum(rouge_1s) / len(rouge_1s)
rouge_2 = sum(rouge_2s) / len(rouge_2s)
rouge_l = sum(rouge_ls) / len(rouge_ls)
rouge_1 = np.round(rouge_1, decimals=3)
rouge_2 = np.round(rouge_2, decimals=3)
rouge_l = np.round(rouge_l, decimals=3)
metrics[namespace] = {
"rouge_1": rouge_1,
"rouge_2": rouge_2,
"rouge_l": rouge_l,
}
return metrics
def report_metrics(self, report_type: str = "wasabi") -> Any:
reports = {}
if report_type == "wasabi":
for namespace in [self.words_namespace]:
metric = self.get_metric()[namespace]
rouge_1 = metric["rouge_1"]
rouge_2 = metric["rouge_2"]
rouge_l = metric["rouge_l"]
# build table
header_row = ["Metric", "Value"]
rows = [
("Rouge_1", rouge_1),
("Rouge_2", rouge_2),
("Rouge_l", rouge_l),
]
table = wasabi.table(rows, header=header_row, divider=True)
reports[namespace] = table
return reports
def reset(self):
self.rouge_1_counter = defaultdict(list)
self.rouge_2_counter = defaultdict(list)
self.rouge_l_counter = defaultdict(list)
def _calc_f1(self, matches, count_for_recall, count_for_precision, alpha):
def safe_div(x1, x2):
return 0 if x2 == 0 else x1 / x2
recall = safe_div(matches, count_for_recall)
precision = safe_div(matches, count_for_precision)
denom = (1.0 - alpha) * precision + alpha * recall
return safe_div(precision * recall, denom)
def _lcs(self, a, b):
longer = a
base = b
if len(longer) < len(base):
longer, base = base, longer
if len(base) == 0:
return 0
row = [0] * len(base)
for c_a in longer:
left = 0
upper_left = 0
for i, c_b in enumerate(base):
up = row[i]
if c_a == c_b:
value = upper_left + 1
else:
value = max(left, up)
row[i] = value
left = value
upper_left = up
return left
def _len_ngram(self, words, n):
return max(len(words) - n + 1, 0)
def _ngram_iter(self, words, n):
for i in range(self._len_ngram(words, n)):
n_gram = words[i : i + n]
yield tuple(n_gram)
def _count_ngrams(self, words, n):
c = Counter(self._ngram_iter(words, n))
return c
def _count_overlap(self, summary_ngrams, reference_ngrams):
result = 0
for k, v in summary_ngrams.items():
result += min(v, reference_ngrams[k])
return result
def _rouge_n(self, pred_summary, true_summary, n, alpha=0.5):
"""
Calculate ROUGE-N score.
Parameters
----------
pred_summary: list of list of str
generated summary after tokenization
true_summary: list of list of str
reference or references to evaluate summary
n: int
ROUGE kind. n=1, calculate when ROUGE-1
alpha: float (0~1)
alpha -> 0: recall is more important
alpha -> 1: precision is more important
F = 1/(alpha * (1/P) + (1 - alpha) * (1/R))
Returns
-------
f1: float
f1 score
"""
pred_ngrams = self._count_ngrams(pred_summary, n)
r_ngrams = self._count_ngrams(true_summary, n)
matches = self._count_overlap(pred_ngrams, r_ngrams)
count_for_recall = self._len_ngram(true_summary, n)
count_for_prec = self._len_ngram(pred_summary, n)
f1 = self._calc_f1(matches, count_for_recall, count_for_prec, alpha)
return f1
def _rouge_l(self, pred_summary, true_summary, alpha=0.5):
"""
Calculate ROUGE-L score.
Parameters
----------
pred_summary: list of list of str
generated summary after tokenization
true_summary: list of list of str
reference or references to evaluate summary
n: int
ROUGE kind. n=1, calculate when ROUGE-1
alpha: float (0~1)
alpha -> 0: recall is more important
alpha -> 1: precision is more important
F = 1/(alpha * (1/P) + (1 - alpha) * (1/R))
Returns
-------
f1: float
f1 score
"""
matches = self._lcs(true_summary, pred_summary)
count_for_recall = len(true_summary)
count_for_prec = len(pred_summary)
f1 = self._calc_f1(matches, count_for_recall, count_for_prec, alpha)
return f1
|
py | 1a47dedd2dcb9d28eff3209bb8e591e2d6a91f39 | import os
try:
import cPickle as pickle
except ImportError:
import pickle
from rtree import index
from rtree.index import Rtree
data = """34.3776829412 26.7375853734 49.3776829412 41.7375853734
-51.7912278527 56.5716384064 -36.7912278527 71.5716384064
-132.417278478 -96.7177218184 -117.417278478 -81.7177218184
19.9788779448 -53.1068061438 34.9788779448 -38.1068061438
50.9432853241 53.830194296 65.9432853241 68.830194296
114.777310066 -42.0534139041 129.777310066 -27.0534139041
-80.5201136918 -60.5173650142 -65.5201136918 -45.5173650142
-109.709042971 -88.8853631128 -94.7090429709 -73.8853631128
163.797701593 49.0535662325 178.797701593 64.0535662325
119.52474488 -47.8047995045 134.52474488 -32.8047995045
-49.6358346107 25.7591536504 -34.6358346107 40.7591536504
43.1951329802 -61.7003551556 58.1951329802 -46.7003551556
5.07182469992 -32.9621617938 20.0718246999 -17.9621617938
157.392784956 -59.9967638674 172.392784956 -44.9967638674
169.761387556 77.3118040104 184.761387556 92.3118040104
-90.9030625259 23.7969275036 -75.9030625259 38.7969275036
13.3161023563 35.5651016032 28.3161023563 50.5651016032
-71.4124633746 -27.8098115487 -56.4124633746 -12.8098115487
-101.490578923 40.5161619529 -86.4905789231 55.5161619529
-22.5493804457 -9.48190527182 -7.54938044566 5.51809472818
22.7819453953 81.6043699778 37.7819453953 96.6043699778
163.851232856 52.6576397095 178.851232856 67.6576397095
8.7520267341 -82.9532179134 23.7520267341 -67.9532179134
-25.1295517688 -52.9753074372 -10.1295517688 -37.9753074372
125.380855923 53.093317371 140.380855923 68.093317371
-79.9963004315 -8.58901526761 -64.9963004315 6.41098473239
-3.49476632412 -93.5592177527 11.5052336759 -78.5592177527
5.12311663372 38.9766284779 20.1231166337 53.9766284779
-126.802193031 72.7620993955 -111.802193031 87.7620993955
144.816733092 33.8296664631 159.816733092 48.8296664631
-124.187243051 30.4856075292 -109.187243051 45.4856075292
63.8011147852 -64.8232471563 78.8011147852 -49.8232471563
125.091625278 10.0243913301 140.091625278 25.0243913301
-79.6265618345 37.4238531184 -64.6265618345 52.4238531184
84.0917344559 -61.9889564492 99.0917344559 -46.9889564492
44.1303873224 36.9948838398 59.1303873224 51.9948838398
57.579189376 -44.3308895399 72.579189376 -29.3308895399
-135.915887605 -68.4604833795 -120.915887605 -53.4604833795
-52.5931165731 -83.132095062 -37.5931165731 -68.132095062
-3.66134703734 -24.6160151663 11.3386529627 -9.61601516627
50.9138603775 6.66349450637 65.9138603775 21.6634945064
-59.0308862561 -28.7050068456 -44.0308862561 -13.7050068456
51.6601755093 -32.4794848001 66.6601755093 -17.4794848001
-174.739939684 35.8453347176 -159.739939684 50.8453347176
-107.905359545 -33.9905804035 -92.9053595447 -18.9905804035
-43.8298865873 -38.8139629115 -28.8298865873 -23.8139629115
-186.673789279 15.8707951216 -171.673789279 30.8707951216
13.0878151873 18.9267257542 28.0878151873 33.9267257542
-19.7764534411 -15.1648038653 -4.7764534411 -0.16480386529
-136.725385806 -62.3357813894 -121.725385806 -47.3357813894
56.3180682679 27.7748493606 71.3180682679 42.7748493606
-117.234207271 -95.984091959 -102.234207271 -80.984091959
-112.676334783 69.8614225716 -97.6763347829 84.8614225716
63.4481415226 49.5185084111 78.4481415226 64.5185084111
-164.583933393 -24.3224792074 -149.583933393 -9.32247920738
29.8740632141 -94.4036564677 44.8740632141 -79.4036564677
111.222002785 27.3091348937 126.222002785 42.3091348937
153.388416036 -51.7982686059 168.388416036 -36.7982686059
101.187835391 -79.2096166175 116.187835391 -64.2096166175
88.5716895369 -0.592196575665 103.571689537 14.4078034243
121.697565289 -20.4740930579 136.697565289 -5.47409305786
-57.6430699458 32.6596016791 -42.6430699458 47.6596016791
-51.9988160106 -16.5263906642 -36.9988160106 -1.52639066423
-128.45654531 40.0833021378 -113.45654531 55.0833021378
104.084274855 1.04302798395 119.084274855 16.0430279839
-65.3078063084 52.8659272125 -50.3078063084 67.8659272125
-185.575231871 0.603830128936 -170.575231871 15.6038301289
-99.670852574 63.077063843 -84.670852574 78.077063843
-97.5397037499 24.1544066414 -82.5397037499 39.1544066414
17.1213365558 80.8998469932 32.1213365558 95.8998469932
-66.0514693697 -67.879371904 -51.0514693697 -52.879371904
-165.624597131 -28.2121530482 -150.624597131 -13.2121530482
-153.938620771 -22.5333324395 -138.938620771 -7.5333324395
108.059653776 -30.1015722619 123.059653776 -15.1015722619
66.3357992327 33.4460170804 81.3357992327 48.4460170804
122.051245261 62.1986667929 137.051245261 77.1986667929
-9.14331797752 -4.94220638202 5.85668202248 10.057793618
-6.21767716831 -37.4474638489 8.78232283169 -22.4474638489
-10.2422235441 -36.7771789022 4.75777645591 -21.7771789022
151.39952872 5.78259379576 166.39952872 20.7825937958
53.0412866301 27.1060539476 68.0412866301 42.1060539476
-179.969415049 -86.9431323167 -164.969415049 -71.9431323167
-122.143517094 52.4812451482 -107.143517094 67.4812451482
126.651232891 -71.3593917404 141.651232891 -56.3593917404
35.5628371672 -44.4833782826 50.5628371672 -29.4833782826
106.338230585 74.4980976394 121.338230585 89.4980976394
2.49246106376 64.4571886404 17.4924610638 79.4571886404
26.9239556956 74.8154250821 41.9239556956 89.8154250821
-145.467051901 -23.3901235678 -130.467051901 -8.39012356782
-31.1747618493 -78.3450857919 -16.1747618493 -63.3450857919
-45.6363494594 41.8549865381 -30.6363494594 56.8549865381
-139.598628861 -76.0620586165 -124.598628861 -61.0620586165
75.3893757582 -96.3227872859 90.3893757582 -81.3227872859
66.4127845964 -29.3758752649 81.4127845964 -14.3758752649
71.002709831 5.93248532466 86.002709831 20.9324853247
-166.73585749 -91.958750292 -151.73585749 -76.958750292
-122.966652056 -44.5184865975 -107.966652056 -29.5184865975
-114.787601823 -21.1179486167 -99.7876018227 -6.11794861667
-37.7449906403 -70.1494304858 -22.7449906403 -55.1494304858
70.2802523802 34.6578320934 85.2802523802 49.6578320934"""
boxes15 = []
for line in data.split('\n'):
if not line:
break
[left, bottom, right, top] = [float(x) for x in line.split()]
boxes15.append((left, bottom, right, top))
def boxes15_stream(interleaved=True):
for i, (minx, miny, maxx, maxy) in enumerate(boxes15):
if interleaved:
yield (i, (minx, miny, maxx, maxy), 42)
else:
yield (i, (minx, maxx, miny, maxy), 42)
def check_c_api():
assert int(str(index.__c_api_version__).split('.')[1]) >= 7
def test_non_stream_input():
p = index.Property()
idx = index.Index(properties=p)
for i, coords in enumerate(boxes15):
idx.add(i, coords)
assert 0 in idx.intersection((0, 0, 60, 60))
hits = list(idx.intersection((0, 0, 60, 60)))
assert hits == [0, 4, 16, 27, 35, 40, 47, 50, 76, 80]
def test_stream_input():
p = index.Property()
sindex = index.Index(boxes15_stream(), properties=p)
bounds = (0, 0, 60, 60)
hits = sindex.intersection(bounds)
assert sorted(hits) == [0, 4, 16, 27, 35, 40, 47, 50, 76, 80]
def test_index():
idx = index.Index()
unpickled = pickle.loads(pickle.dumps(idx))
assert idx.handle is not unpickled.handle
assert idx.properties.as_dict() == unpickled.properties.as_dict()
assert idx.interleaved == unpickled.interleaved
def test_property():
p = index.Property()
unpickled = pickle.loads(pickle.dumps(p))
assert p.handle is not unpickled.handle
assert p.as_dict() == unpickled.as_dict()
check_c_api()
test_non_stream_input()
test_stream_input()
test_index()
test_property()
|
py | 1a47dfa668e92f954a7d79884659b83f19954660 | # Copyright 2013-2014, Simon Kennedy, [email protected]
#
# Part of 'hiss' the asynchronous notification library
import uuid
import asyncio
import logging
from itertools import product
from collections import namedtuple
from hiss.target import Target
from hiss.exception import NotifierError
from hiss.notification import Notification
from hiss.handler.gntp.async import GNTPHandler
from hiss.handler.snp import SNPHandler
from hiss.handler.kodi import KodiHandler
__all__ = ['Notifier', 'USE_NOTIFIER', 'USE_REGISTERED']
NotificationInfo = namedtuple('NotificationInfo', ['name', 'title', 'text',
'icon', 'sound', 'enabled'])
USE_NOTIFIER = object()
USE_REGISTERED = object()
class Notifier(object):
"""Maintains a list of targets to handle notifications for.
:param name: The name of this notifier
:type name: str
:param signature: Application signature for this notifier.
:type signature: str
:param icon: Notifier icon. Used when registering the notifier and
as the default icon for notifications.
:type icon: :class:`~hiss.resource.Icon` or str
:param sound: Sound to play when displaying the notification.
:type sound: str
:param response_handler: Coroutine that is called whenever a response
is received.
:type response_handler: asyncio coroutine
:param async_handler: Coroutine that is called whenever an asynchronous
event arrives. Receives a list of Responses
:type async_handler: asyncio coroutine
:param loop: :mod:`asyncio` event loop to use.
:type loop: :class:`asyncio.BaseEventLoop`
"""
#TODO: standardised icon and sound handling between handler types
def __init__(self, name, signature,
icon=None, sound=None,
asynchronous=True,
handlers=(None, None),
loop=None):
self.name = name
self.signature = signature
self.icon = icon
self.sound = sound
self.asynchronous = asynchronous
self.notification_classes = {}
self.targets = TargetList()
if asynchronous and handlers[0] and not asyncio.iscoroutinefunction(handlers[0]):
raise ValueError('response_handler must be an asyncio coroutine')
self._response_handler = handlers[0]
if asynchronous and handlers[1] and not asyncio.iscoroutinefunction(handlers[1]):
raise ValueError('async_handler must be an asyncio coroutine')
self._async_handler = handlers[1]
if asynchronous:
if loop is None:
self.loop = asyncio.get_event_loop()
else:
self.loop = loop
self.loop = None
self._handlers = {}
self._notifications = {}
def add_notification(self, name, title=None, text=None,
icon=None, sound=None,
enabled=True, class_id=None):
"""Add a notification class.
:param name: Notification class name
:type name: str
:param title: Default notification title
:type title: str or None
:param text: Default notification text
:type text: str or None
:param icon: Default notification icon
:type icon: str or None
:param sound: Default notification sound
:type sound: str or None
:param enabled: Whether the notification is enabled or not
:type enabled: bool
:param class_id: The class id to use. If not provided one will be
generated.
:type class_id: int
:returns: The class id of the newly added notification
:rtype: int
Default values will be used when creating a notification with
:meth:`~hiss.notifier.Notifier.create_notification`
"""
ni = NotificationInfo(name, title, text, icon, sound, enabled)
if class_id is None or class_id in self.notification_classes:
if len(self.notification_classes) == 0:
class_id = 1
else:
class_id = max(self.notification_classes.keys()) + 1
self.notification_classes[class_id] = ni
return class_id
def create_notification(self, class_id=-1, name='',
title=USE_REGISTERED,
text=USE_REGISTERED,
icon=USE_REGISTERED,
sound=USE_REGISTERED):
"""Create a notification that is ready to send.
Either ``class_id`` or ``name`` can be provided. If ``class_id`` is
provided it will be used instead of ``name`` to
lookup the defaults registered in
:meth:`~hiss.notifier.Notifier.add_notification`
:param class_id: The notification class id
:type class_id: int
:param name: The notification name
:type name: str
:param title: The title of the notification
:type title: str, None for no title or
:data:`~hiss.notifier.USE_REGISTERED` (default)
to use title provided during registration,
:data:`~hiss.notifier.USE_NOTIFIER` to the use the
Notifier's name
:param text: The text to display in the notification
:type text: str, None for no text or
:data:`~hiss.notifier.USE_REGISTERED` (default)
to use text provided during registration
:param icon: Icon to display
:type icon: str, :class:`~hiss.resource.Icon`, None for no
icon or
:data:`~hiss.notifier.USE_REGISTERED` (default)
to use icon provided during registration
:param sound: Sound to play when showing notification
:type sound: str, None (default) for no sound or
:data:`~hiss.notifier.USE_REGISTERED`
to use sound provided during registration
"""
if class_id != -1:
if class_id not in self.notification_classes:
raise NotifierError('%d is not a known notification class id' % \
str(class_id))
registration_info = self.notification_classes[class_id]
elif name != '':
registration_info = self.find_notification(name)
else:
raise NotifierError('Either a class id or name must be specified.',
'hiss.notifier.Notifier')
if title is USE_REGISTERED:
title = registration_info.title
elif title is USE_NOTIFIER:
title = self.name
if text is USE_REGISTERED:
text = registration_info.text
if icon is None:
if registration_info.icon is not None:
icon = registration_info.icon
else:
icon = self.icon
if registration_info.sound is None:
sound = None
elif sound == USE_REGISTERED:
sound = registration_info.sound
uid = self._unique_id()
n = Notification(title, text, icon, sound, uid=uid)
n.name = registration_info.name
n.class_id = class_id
n.notifier = self
return n
def find_notification(self, name):
for value in self.notification_classes.values():
if value.name == name:
return value
raise NotifierError('Notification name %s not found.' % name)
@asyncio.coroutine
def add_target(self, targets):
"""Add a single target or list of targets to the known targets
and connects to them
:param targets: The Target or list of Targets to add.
:type targets: :class:`~hiss.target.Target`
:returns: Result dict or list of dict if more than one target added.
"""
if isinstance(targets, Target):
targets = [targets]
if self.asynchronous:
wait_for = []
for target in targets:
if target.scheme in self._handlers:
handler = self._handlers[target.scheme]
elif target.scheme == 'snp':
handler = SNPHandler(self.loop)
self._handlers[target.scheme] = handler
elif target.scheme == 'gntp':
handler = GNTPHandler(self.loop)
self._handlers[target.scheme] = handler
elif target.scheme == 'xbmc':
handler = XBMCHandler()
self._handlers[target.scheme] = handler
wait_for.append(handler.connect(target))
done, _pending = yield from asyncio.wait(wait_for)
results = []
for task in done:
tr = task.result()
result = {}
result['target'] = tr.target
if result is None:
result['status'] = 'ERROR'
result['reason'] = 'Unable to connect to target'
else:
result['status'] = 'OK'
self.targets.append(tr.target)
results.append(result)
else:
self.targets.append(target)
results = [dict(target=target,
status='OK')]
if len(results) == 1:
return results[0]
else:
return results
def remove_target(self, target):
"""Remove a target from the known targets.
:param target: The Target to remove.
:type target: :class:`~hiss.target.Target`
"""
self.targets.remove(target)
def log(self, message):
logging.log(logging.DEBUG, message)
@asyncio.coroutine
def register(self, targets=None):
"""Register this notifier with the target specified.
:param targets: The target or targets to register with or ``None``
to register with all known target
:type targets: :class:`~hiss.target.Target`,
[:class:`~hiss.target.Target`] or ``None``
"""
targets = self.targets.valid_targets(targets)
wait_for = []
for target in targets:
wait_for.append(target.handler.register(self, target))
done, _pending = yield from asyncio.wait(wait_for)
results = []
for task in done:
result = task.result()
response = {}
response.update(result)
results.append(response)
if len(results) == 1:
return results[0]
else:
return results
@asyncio.coroutine
def notify(self, notifications, targets=None):
"""Send a notification to a specific targets or all targets.
:param notifications: A notification or list of notifications to send
:type notifications: :class:`hiss.notification.Notification`
:param targets: The targets to send the notification to. If no
targets is specified then the notification will
be sent to all known targets.
:type targets: :class:`hiss.target.Target` or ``None``
"""
if isinstance(notifications, Notification):
notifications = [notifications]
for notification in notifications:
notification.notifier = self
targets = self.targets.valid_targets(targets)
wait_for = []
combos = product(notifications, targets)
for notification, target in combos:
wait_for.append(target.handler.notify(notification, target))
done, _pending = yield from asyncio.wait(wait_for)
#TODO: Handling of sticky notifications for show/hide
responses = []
for task in done:
result = task.result()
response = {}
response.update(result)
responses.append(response)
if len(responses) == 1:
return responses[0]
else:
return responses
@asyncio.coroutine
def subscribe(self, signatures=[], targets=None):
"""Subscribe to notifications from a list of signatures.
:param signatures: List of signatures to listen to events from. If an
empty list is specified then subscribe to events
from all applications.
:type signatures: List of strings or empty list.
"""
targets = self.targets.valid_targets(targets)
responses = []
for target in targets:
response = yield from target.handler.subscribe(self, signatures,
target)
response['handler'] = target.handler.__name__
responses.append(response)
if len(responses) == 1:
return responses[0]
else:
return responses
@asyncio.coroutine
def unregister(self, targets=None):
"""Unregister this notifier with all targets
:param targets: The targets to register with
If not specified or ``None`` then the
will be registered with all known targets
:type targets: :class:`hiss.Target` or ``None``
"""
for handler in self._handlers.values():
if handler.capabilities['unregister']:
handler.unregister(targets, notifier=self)
@asyncio.coroutine
def show(self, uid):
"""If ``uid`` is in the list of current notifications then show it."""
if uid in self._notifications:
for handler in self._handlers.values():
if 'show' in handler.capabilities:
handler.show(self, uid)
@asyncio.coroutine
def hide(self, uid):
"""If ``uid`` is in the list of current notifications then hide it."""
if uid in self._notifications:
for handler in self._handlers.values():
if 'hide' in handler.capabilities:
handler.hide(self, uid)
@asyncio.coroutine
def responses_received(self, responses):
"""Event handler for callback events.
Default handler calls the response handler provided to `init`.
:param responses: The event
:type responses: :class:`hiss.NotificationEvent`
"""
if self._response_handler:
yield from self._response_handler(responses)
@asyncio.coroutine
def events_received(self, events):
"""Event handler for callback events.
Default handler calls the event handler provided to `init`.
:param responses: The event
:type responses: :class:`hiss.NotificationEvent`
"""
if self._async_handler:
yield from self._async_handler(events)
@asyncio.coroutine
def _handler(self, responses):
logging.debug(responses)
self.responses_received(responses)
def _unique_id(self):
return str(uuid.uuid4())
class TargetList(object):
def __init__(self):
self.targets = []
def __contains__(self, target):
for t in self.targets:
if target == t:
return True
return False
def __iter__(self):
return self.targets.__iter__()
def append(self, target):
self.targets.append(target)
def remove(self, target):
index_to_delete = -1
for idx, t in enumerate(self.targets):
if target == t:
index_to_delete = idx
break
if index_to_delete != -1:
del self.targets[index_to_delete]
def valid_targets(self, target_or_targets):
if target_or_targets is None:
target_or_targets = self.targets
else:
target_or_targets = self._known_targets(target_or_targets)
return target_or_targets
def _known_targets(self, target_or_targets):
"""Filter out unknown target_or_targets"""
if isinstance(target_or_targets, Target) and target_or_targets in self.targets:
return [target_or_targets]
_targets = []
for target in target_or_targets:
if target in self.targets:
_targets.append(target)
return _targets
|
py | 1a47e00277fb3a9c85045a0bcb980168a83cbdf4 | import logging
from logging import getLogger
from typing import Sequence, Optional
import base58
from indy_crypto import IndyCryptoError
from crypto.bls.bls_crypto import GroupParams, BlsGroupParamsLoader, BlsCryptoVerifier, BlsCryptoSigner
from indy_crypto.bls import BlsEntity, Generator, VerKey, SignKey, Bls, \
Signature, MultiSignature, ProofOfPossession
logging.getLogger("indy_crypto").setLevel(logging.WARNING)
logger = getLogger()
class BlsGroupParamsLoaderIndyCrypto(BlsGroupParamsLoader):
def load_group_params(self) -> GroupParams:
group_name = 'generator'
g = "3LHpUjiyFC2q2hD7MnwwNmVXiuaFbQx2XkAFJWzswCjgN1utjsCeLzHsKk1nJvFEaS4fcrUmVAkdhtPCYbrVyATZcmzwJReTcJqwqBCPTmTQ9uWPwz6rEncKb2pYYYFcdHa8N17HzVyTqKfgPi4X9pMetfT3A5xCHq54R2pDNYWVLDX"
return GroupParams(group_name, g)
class IndyCryptoBlsUtils:
SEED_LEN = 32
@staticmethod
def bls_to_str(v: BlsEntity) -> str:
try:
return base58.b58encode(v.as_bytes()).decode("utf-8")
except ValueError:
logger.warning('BLS: BLS Entity can not be encoded as base58')
@staticmethod
def bls_from_str(v: str, cls) -> Optional[BlsEntity]:
try:
bts = base58.b58decode(v)
except ValueError:
logger.warning('BLS: value {} can not be decoded to base58'.format(v))
return None
try:
return cls.from_bytes(bts)
except IndyCryptoError as e:
logger.warning('BLS: Indy Crypto error: {}'.format(e))
return None
@staticmethod
def bls_pk_from_str(v: str) -> Optional[VerKey]:
return IndyCryptoBlsUtils.bls_from_str(v, VerKey)
@staticmethod
def prepare_seed(seed):
seed_bytes = None
if isinstance(seed, str):
seed_bytes = seed.encode()
if isinstance(seed, (bytes, bytearray)):
seed_bytes = seed
# TODO: FIXME: indy-crypto supports 32-bit seeds only
if seed_bytes:
if len(seed_bytes) < IndyCryptoBlsUtils.SEED_LEN:
seed_bytes += b'0' * (IndyCryptoBlsUtils.SEED_LEN - len(seed_bytes))
assert (len(seed_bytes) >= IndyCryptoBlsUtils.SEED_LEN)
return seed_bytes
class BlsCryptoVerifierIndyCrypto(BlsCryptoVerifier):
def __init__(self, params: GroupParams):
self._generator = \
IndyCryptoBlsUtils.bls_from_str(params.g, Generator) # type: Generator
def verify_sig(self, signature: str, message: bytes, bls_pk: Optional[VerKey]) -> bool:
bls_signature = IndyCryptoBlsUtils.bls_from_str(signature, Signature)
if bls_signature is None:
return False
if bls_pk is None:
return False
return Bls.verify(bls_signature,
message,
bls_pk,
self._generator)
def verify_multi_sig(self, signature: str, message: bytes, pks: Sequence[Optional[VerKey]]) -> bool:
# TODO: is it expected that we return False if one of the keys is None?
if None in pks:
return False
multi_signature = \
IndyCryptoBlsUtils.bls_from_str(signature, MultiSignature) # type: MultiSignature
if multi_signature is None:
return False
return Bls.verify_multi_sig(multi_sig=multi_signature,
message=message,
ver_keys=pks,
gen=self._generator)
def create_multi_sig(self, signatures: Sequence[str]) -> str:
sigs = [IndyCryptoBlsUtils.bls_from_str(s, Signature) for s in signatures]
bts = MultiSignature.new(sigs)
return IndyCryptoBlsUtils.bls_to_str(bts)
def verify_key_proof_of_possession(self, key_proof: Optional[ProofOfPossession], bls_pk: Optional[VerKey]) -> bool:
if None in [key_proof, bls_pk]:
return False
return Bls.verify_pop(key_proof,
bls_pk,
self._generator)
class BlsCryptoSignerIndyCrypto(BlsCryptoSigner):
def __init__(self, sk: SignKey, pk: VerKey, params: GroupParams):
self._sk = sk # type: SignKey
self.pk = pk # type: VerKey
self._generator = \
IndyCryptoBlsUtils.bls_from_str(params.g, Generator) # type: Generator
@staticmethod
def generate_keys(params: GroupParams, seed=None) -> (SignKey, VerKey, ProofOfPossession):
seed = IndyCryptoBlsUtils.prepare_seed(seed)
gen = IndyCryptoBlsUtils.bls_from_str(params.g, Generator)
sk = SignKey.new(seed)
vk = VerKey.new(gen, sk)
key_proof = ProofOfPossession.new(ver_key=vk, sign_key=sk)
return sk, vk, key_proof
@staticmethod
def generate_key_proof(sk: SignKey, pk: VerKey) -> ProofOfPossession:
return ProofOfPossession.new(ver_key=pk, sign_key=sk)
def sign(self, message: bytes) -> str:
sign = Bls.sign(message, self._sk)
return IndyCryptoBlsUtils.bls_to_str(sign)
|
py | 1a47e0150f541b4a150cc6f8b78926407f38d66d | # ----------------------------------------------------------------------------
# Copyright (c) 2016-2020, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import unittest
from qiime2.core.type import (
parse_primitive, Int, Float, Bool, Str, List, Set, Metadata,
MetadataColumn)
class TestParsePrimitiveNonCollectionsSimple(unittest.TestCase):
def test_metadata_expr(self):
with self.assertRaisesRegex(ValueError, 'Metadata may not be parsed'):
parse_primitive(Metadata, '42')
def test_metadata_column_expr(self):
with self.assertRaisesRegex(ValueError,
'MetadataColumn.* may not be parsed'):
parse_primitive(MetadataColumn, '42')
def test_int_type_int_value(self):
obs = parse_primitive(Int, '42')
self.assertEqual(obs, 42)
self.assertIsInstance(obs, int)
def test_float_type_int_value(self):
obs = parse_primitive(Float, '42')
self.assertEqual(obs, 42.0)
self.assertIsInstance(obs, float)
def test_bool_type_int_value(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(Bool, '42')
def test_str_type_int_value(self):
obs = parse_primitive(Str, '42')
self.assertEqual(obs, '42')
self.assertIsInstance(obs, str)
def test_int_type_float_value(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(Int, '42.0')
def test_float_type_float_value(self):
obs = parse_primitive(Float, '42.0')
self.assertEqual(obs, 42.0)
self.assertIsInstance(obs, float)
def test_bool_type_float_value(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(Bool, '42.0')
def test_str_type_float_value(self):
obs = parse_primitive(Str, '42.0')
self.assertEqual(obs, '42.0')
self.assertIsInstance(obs, str)
def test_int_type_bool_value(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(Int, 'True')
def test_float_type_bool_value(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(Float, 'True')
def test_bool_type_bool_value(self):
obs = parse_primitive(Bool, 'True')
self.assertEqual(obs, True)
self.assertIsInstance(obs, bool)
def test_str_type_bool_value(self):
obs = parse_primitive(Str, 'True')
self.assertEqual(obs, 'True')
self.assertIsInstance(obs, str)
def test_int_type_str_value(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(Int, 'peanut')
def test_float_type_str_value(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(Float, 'peanut')
def test_bool_type_str_value(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(Bool, 'peanut')
def test_str_type_str_value(self):
obs = parse_primitive(Str, 'peanut')
self.assertEqual(obs, 'peanut')
self.assertIsInstance(obs, str)
class TestParsePrimitiveNonCollectionNonStringInputs(unittest.TestCase):
def test_int_type_int_value(self):
obs = parse_primitive(Int, 1)
self.assertEqual(obs, 1)
self.assertIsInstance(obs, int)
def test_float_type_float_value(self):
obs = parse_primitive(Float, 3.3)
self.assertEqual(obs, 3.3)
self.assertIsInstance(obs, float)
def test_bool_type_bool_value(self):
obs = parse_primitive(Bool, True)
self.assertEqual(obs, True)
self.assertIsInstance(obs, bool)
def test_str_type_str_value(self):
obs = parse_primitive(Str, 'peanut')
self.assertEqual(obs, 'peanut')
self.assertIsInstance(obs, str)
def test_int_type_bool_value(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(Int, True)
class TestParsePrimitiveNonCollectionsSimpleUnions(unittest.TestCase):
def setUp(self):
super().setUp()
self.exprs = [
Int | Bool,
Int | Str,
Float | Bool,
Float | Str,
Bool | Str,
]
def test_int_union_float_expr_int_value(self):
# Int | Float == Float
obs = parse_primitive(Int | Float, '42')
self.assertEqual(obs, 42.0)
self.assertIsInstance(obs, float)
def test_int_union_float_expr_float_value(self):
# Int | Float == Float
obs = parse_primitive(Int | Float, '42.0')
self.assertEqual(obs, 42.0)
self.assertIsInstance(obs, float)
def test_int_union_float_expr_bool_value(self):
# Int | Float == Float
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(Int | Float, 'True')
def test_int_union_float_expr_str_value(self):
# Int | Float == Float
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(Int | Float, 'peanut')
def test_simple_unions_with_int_value(self):
for expr in self.exprs:
with self.subTest(expr=expr):
obs = parse_primitive(expr, '42')
self.assertEqual(obs, 42)
self.assertIsInstance(obs, int)
def test_simple_unions_with_float_value(self):
for expr in self.exprs:
with self.subTest(expr=expr):
obs = parse_primitive(expr, '42.1')
self.assertEqual(obs, 42.1)
self.assertIsInstance(obs, float)
def test_simple_unions_with_bool_value(self):
for expr in self.exprs:
with self.subTest(expr=expr):
obs = parse_primitive(expr, 'True')
self.assertEqual(obs, True)
self.assertIsInstance(obs, bool)
def test_simple_unions_with_str_value(self):
for expr in self.exprs:
with self.subTest(expr=expr):
obs = parse_primitive(expr, 'peanut')
self.assertEqual(obs, 'peanut')
self.assertIsInstance(obs, str)
class TestParsePrimitiveCollectionsSimple(unittest.TestCase):
def test_list_of_int(self):
obs = parse_primitive(List[Int], ('1', '2', '3'))
self.assertEqual(obs, [1, 2, 3])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], int)
def test_list_of_int_bad_value_variant_a(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(List[Int], ('True', '2', '3'))
def test_list_of_int_bad_value_variant_b(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(List[Int], ('1', '2', 'False'))
def test_set_of_int(self):
obs = parse_primitive(Set[Int], ('1', '2', '3'))
self.assertEqual(obs, {1, 2, 3})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), int)
def test_list_of_float(self):
obs = parse_primitive(List[Float], ('1.0', '2.0', '3.0'))
self.assertEqual(obs, [1.0, 2.0, 3.0])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], float)
def test_set_of_float(self):
obs = parse_primitive(Set[Float], ('1.0', '2.0', '3.0'))
self.assertEqual(obs, {1.0, 2.0, 3.0})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), float)
def test_list_of_bool(self):
obs = parse_primitive(List[Bool], ('True', 'False', 'True'))
self.assertEqual(obs, [True, False, True])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], bool)
def test_set_of_bool(self):
obs = parse_primitive(Set[Bool], ('True', 'False'))
self.assertEqual(obs, {True, False})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), bool)
def test_list_of_str(self):
obs = parse_primitive(List[Str], ('peanut', 'the', 'dog'))
self.assertEqual(obs, ['peanut', 'the', 'dog'])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], str)
def test_set_of_str(self):
obs = parse_primitive(Set[Str], ('peanut', 'the', 'dog'))
self.assertEqual(obs, {'peanut', 'the', 'dog'})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), str)
# The next tests _aren't_ monomorphic, because unions of Int and Float
# always yield a Float (List[Int] | List[Float] == List[Float]).
def test_list_int_or_float_with_int_value(self):
obs = parse_primitive(List[Int] | List[Float], ('1', '2', '3'))
self.assertEqual(obs, [1.0, 2.0, 3.0])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], float)
def test_set_int_or_float_with_int_value(self):
obs = parse_primitive(Set[Int] | Set[Float], ('1', '2', '3'))
self.assertEqual(obs, {1.0, 2.0, 3.0})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), float)
def test_list_int_or_float_with_float_value(self):
obs = parse_primitive(List[Int] | List[Float], ('1.1', '2.2', '3.3'))
self.assertEqual(obs, [1.1, 2.2, 3.3])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], float)
def test_set_int_or_float_with_float_value(self):
obs = parse_primitive(Set[Int] | Set[Float], ('1.1', '2.2', '3.3'))
self.assertEqual(obs, {1.1, 2.2, 3.3})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), float)
def test_list_int_or_float_int_value(self):
obs = parse_primitive(List[Int | Float], ('1', '2', '3'))
self.assertEqual(obs, [1.0, 2.0, 3.0])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], float)
def test_set_int_or_float_int_value(self):
obs = parse_primitive(Set[Int | Float], ('1', '2', '3'))
self.assertEqual(obs, {1.0, 2.0, 3.0})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), float)
class TestParsePrimitiveCollectionsMonomorphic(unittest.TestCase):
def test_list_int_or_bool_with_int_value(self):
obs = parse_primitive(List[Int] | List[Bool], ('1', '2', '3'))
self.assertEqual(obs, [1, 2, 3])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], int)
def test_list_int_or_bool_with_bool_value(self):
obs = parse_primitive(List[Int] | List[Bool],
('True', 'False', 'True'))
self.assertEqual(obs, [True, False, True])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], bool)
def test_list_int_or_bool_with_mixed_value_variant_a(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(List[Int] | List[Bool], ('True', '2', '3'))
def test_list_int_or_bool_with_mixed_value_variant_b(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(List[Int] | List[Bool], ('1', '2', 'True'))
def test_list_int_or_bool_with_mixed_value_variant_c(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(List[Int] | List[Bool], ('False', '2', 'True'))
def test_set_int_or_bool_with_int_value(self):
obs = parse_primitive(Set[Int] | Set[Bool], ('1', '2', '3'))
self.assertEqual(obs, {1, 2, 3})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), int)
def test_set_int_or_bool_with_bool_value(self):
obs = parse_primitive(Set[Int] | Set[Bool], ('True', 'False'))
self.assertEqual(obs, {True, False})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), bool)
def test_list_int_or_str_with_int_value(self):
obs = parse_primitive(List[Int] | List[Str], ('1', '2', '3'))
self.assertEqual(obs, [1, 2, 3])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], int)
def test_list_int_or_str_with_str_value(self):
obs = parse_primitive(List[Int] | List[Str], ('peanut', 'the', 'dog'))
self.assertEqual(obs, ['peanut', 'the', 'dog'])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], str)
def test_list_int_or_str_with_mixed_value_variant_a(self):
obs = parse_primitive(List[Int] | List[Str], ('1', 'the', 'dog'))
self.assertEqual(obs, ['1', 'the', 'dog'])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], str)
self.assertIsInstance(obs[1], str)
def test_list_int_or_str_with_mixed_value_variant_b(self):
obs = parse_primitive(List[Int] | List[Str], ('peanut', 'the', '1'))
self.assertEqual(obs, ['peanut', 'the', '1'])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], str)
self.assertIsInstance(obs[2], str)
def test_set_int_or_str_with_int_value(self):
obs = parse_primitive(Set[Int] | Set[Str], ('1', '2', '3'))
self.assertEqual(obs, {1, 2, 3})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), int)
def test_set_int_or_str_with_str_value(self):
obs = parse_primitive(Set[Int] | Set[Str], ('peanut', 'the', 'dog'))
self.assertEqual(obs, {'peanut', 'the', 'dog'})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), str)
def test_list_float_or_bool_with_float_value(self):
obs = parse_primitive(List[Float] | List[Bool], ('1.1', '2.2', '3.3'))
self.assertEqual(obs, [1.1, 2.2, 3.3])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], float)
def test_list_float_or_bool_with_bool_value(self):
obs = parse_primitive(List[Float] | List[Bool],
('True', 'False', 'True'))
self.assertEqual(obs, [True, False, True])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], bool)
def test_list_float_or_bool_with_mixed_value_variant_a(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(List[Float] | List[Bool],
('1.1', 'False', 'True'))
def test_list_float_or_bool_with_mixed_value_variant_b(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(List[Float] | List[Bool],
('True', 'False', '3.3'))
def test_set_float_or_bool_with_float_value(self):
obs = parse_primitive(Set[Float] | Set[Bool], ('1.1', '2.2', '3.3'))
self.assertEqual(obs, {1.1, 2.2, 3.3})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), float)
def test_set_float_or_bool_with_bool_value(self):
obs = parse_primitive(Set[Float] | Set[Bool],
('True', 'False', 'True'))
self.assertEqual(obs, {True, False})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), bool)
def test_list_float_or_str_with_float_value(self):
obs = parse_primitive(List[Float] | List[Str], ('1.1', '2.2', '3.3'))
self.assertEqual(obs, [1.1, 2.2, 3.3])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], float)
def test_list_float_or_str_with_str_value(self):
obs = parse_primitive(List[Float] | List[Str],
('peanut', 'the', 'dog'))
self.assertEqual(obs, ['peanut', 'the', 'dog'])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], str)
def test_list_float_or_str_with_mixed_value_variant_a(self):
obs = parse_primitive(List[Float] | List[Str],
('1.1', 'the', 'dog'))
self.assertEqual(obs, ['1.1', 'the', 'dog'])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], str)
def test_list_float_or_str_with_mixed_value_variant_b(self):
obs = parse_primitive(List[Float] | List[Str],
('peanut', 'the', '3.3'))
self.assertEqual(obs, ['peanut', 'the', '3.3'])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[-1], str)
def test_set_float_or_str_with_float_value(self):
obs = parse_primitive(Set[Float] | Set[Str], ('1.1', '2.2', '3.3'))
self.assertEqual(obs, {1.1, 2.2, 3.3})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), float)
def test_set_float_or_str_with_str_value(self):
obs = parse_primitive(Set[Float] | Set[Str], ('peanut', 'the', 'dog'))
self.assertEqual(obs, {'peanut', 'the', 'dog'})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), str)
def test_list_bool_or_str_with_bool_value(self):
obs = parse_primitive(List[Bool] | List[Str],
('True', 'False', 'True'))
self.assertEqual(obs, [True, False, True])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], bool)
def test_list_bool_or_str_with_str_value(self):
obs = parse_primitive(List[Bool] | List[Str], ('peanut', 'the', 'dog'))
self.assertEqual(obs, ['peanut', 'the', 'dog'])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], str)
def test_list_bool_or_str_with_mixed_value_variant_a(self):
obs = parse_primitive(List[Bool] | List[Str], ('True', 'the', 'dog'))
self.assertEqual(obs, ['True', 'the', 'dog'])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], str)
def test_list_bool_or_str_with_mixed_value_variant_b(self):
obs = parse_primitive(List[Bool] | List[Str],
('peanut', 'the', 'True'))
self.assertEqual(obs, ['peanut', 'the', 'True'])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[-1], str)
def test_set_bool_or_str_with_bool_value(self):
obs = parse_primitive(Set[Bool] | Set[Str],
('True', 'False', 'True'))
self.assertEqual(obs, {True, False})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), bool)
def test_set_bool_or_str_with_str_value(self):
obs = parse_primitive(Set[Bool] | Set[Str], ('peanut', 'the', 'dog'))
self.assertEqual(obs, {'peanut', 'the', 'dog'})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), str)
def test_list_bool_or_str_with_mixed_value(self):
obs = parse_primitive(List[Bool] | List[Str],
('peanut', 'the', 'True'))
self.assertEqual(obs, ['peanut', 'the', 'True'])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], str)
self.assertIsInstance(obs[-1], str)
class TestParsePrimitiveCollectionsComposite(unittest.TestCase):
def test_list_int_or_bool_with_int_value(self):
obs = parse_primitive(List[Int | Bool], ('1', '2', '3'))
self.assertEqual(obs, [1, 2, 3])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], int)
def test_list_int_or_bool_with_float_value(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(List[Int | Bool], ('1.1', '2.2', '3.3'))
def test_list_int_or_bool_with_bool_value(self):
obs = parse_primitive(List[Int | Bool], ('True', 'False', 'True'))
self.assertEqual(obs, [True, False, True])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], bool)
def test_list_int_or_bool_with_str_value(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(List[Int | Bool], ('peanut', 'the', 'dog'))
def test_list_int_or_bool_with_mixed_value(self):
obs = parse_primitive(List[Int | Bool], ('1', 'False', '2', 'True'))
self.assertEqual(obs, [1, False, 2, True])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], int)
self.assertIsInstance(obs[1], bool)
def test_list_int_or_bool_with_mixed_value_variant_a(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(List[Int | Bool], ('peanut', 'False', '2', 'True'))
def test_list_int_or_bool_with_mixed_value_variant_b(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(List[Int | Bool], ('1', 'False', '2', 'peanut'))
def test_list_int_or_bool_with_bad_mix_value(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(List[Int | Bool], ('1', 'True', 'dog'))
def test_set_int_or_bool_with_int_value(self):
obs = parse_primitive(Set[Int | Bool], ('1', '2', '3'))
self.assertEqual(obs, {1, 2, 3})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), int)
def test_set_int_or_bool_with_bool_value(self):
obs = parse_primitive(Set[Int | Bool], ('True', 'False', 'True'))
self.assertEqual(obs, {True, False})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), bool)
def test_set_int_or_bool_with_mixed_value(self):
obs = parse_primitive(Set[Int | Bool], ('1', 'False', '2', 'True'))
self.assertEqual(obs, {1, False, 2, True})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), bool)
self.assertIsInstance(obs.pop(), int)
def test_list_int_or_str_with_int_value(self):
obs = parse_primitive(List[Int | Str], ('1', '2', '3'))
self.assertEqual(obs, [1, 2, 3])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], int)
def test_list_int_or_str_with_str_value(self):
obs = parse_primitive(List[Int | Str], ('peanut', 'the', 'dog'))
self.assertEqual(obs, ['peanut', 'the', 'dog'])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], str)
def test_list_int_or_str_with_mixed_value_variant_a(self):
obs = parse_primitive(List[Int | Str], ('1', 'the', 'dog'))
self.assertEqual(obs, [1, 'the', 'dog'])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], int)
self.assertIsInstance(obs[1], str)
def test_list_int_or_str_with_mixed_value_variant_b(self):
obs = parse_primitive(List[Int | Str], ('peanut', 'the', '1'))
self.assertEqual(obs, ['peanut', 'the', 1])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], str)
self.assertIsInstance(obs[2], int)
def test_set_int_or_str_with_int_value(self):
obs = parse_primitive(Set[Int | Str], ('1', '2', '3'))
self.assertEqual(obs, {1, 2, 3})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), int)
def test_set_int_or_str_with_str_value(self):
obs = parse_primitive(Set[Int | Str], ('peanut', 'the', 'dog'))
self.assertEqual(obs, {'peanut', 'the', 'dog'})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), str)
def test_set_int_or_str_with_mixed_value(self):
obs = parse_primitive(Set[Int | Str], ('1', 'the', '2', 'dog'))
self.assertEqual(obs, {1, 'the', 2, 'dog'})
self.assertIsInstance(obs, set)
def test_list_float_or_bool_with_float_value(self):
obs = parse_primitive(List[Float | Bool], ('1.1', '2.2', '3.3'))
self.assertEqual(obs, [1.1, 2.2, 3.3])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], float)
def test_list_float_or_bool_with_bool_value(self):
obs = parse_primitive(List[Float | Bool], ('True', 'False', 'True'))
self.assertEqual(obs, [True, False, True])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], bool)
def test_list_float_or_bool_with_mixed_value_variant_a(self):
obs = parse_primitive(List[Float | Bool], ('True', '2.2', '3.3'))
self.assertEqual(obs, [True, 2.2, 3.3])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], bool)
self.assertIsInstance(obs[1], float)
def test_list_float_or_bool_with_mixed_value_variant_b(self):
obs = parse_primitive(List[Float | Bool], ('1.1', '2.2', 'False'))
self.assertEqual(obs, [1.1, 2.2, False])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], float)
self.assertIsInstance(obs[-1], bool)
def test_list_float_or_bool_with_bad_mix_value(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(List[Float | Bool], ('1.1', '2.2', 'peanut'))
def test_set_float_or_bool_with_float_value(self):
obs = parse_primitive(Set[Float | Bool], ('1.1', '2.2', '3.3'))
self.assertEqual(obs, {1.1, 2.2, 3.3})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), float)
def test_set_float_or_bool_with_bool_value(self):
obs = parse_primitive(Set[Float | Bool], ('True', 'False', 'True'))
self.assertEqual(obs, {True, False})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), bool)
def test_list_float_or_str_with_float_value(self):
obs = parse_primitive(List[Float | Str], ('1.1', '2.2', '3.3'))
self.assertEqual(obs, [1.1, 2.2, 3.3])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], float)
def test_list_float_or_str_with_str_value(self):
obs = parse_primitive(List[Float | Str], ('peanut', 'the', 'dog'))
self.assertEqual(obs, ['peanut', 'the', 'dog'])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], str)
def test_list_float_or_str_with_mixed_value_variant_a(self):
obs = parse_primitive(List[Float | Str], ('peanut', '2.2', '3.3'))
self.assertEqual(obs, ['peanut', 2.2, 3.3])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], str)
self.assertIsInstance(obs[1], float)
def test_list_float_or_str_with_mixed_value_variant_b(self):
obs = parse_primitive(List[Float | Str], ('1.1', '2.2', 'dog'))
self.assertEqual(obs, [1.1, 2.2, 'dog'])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], float)
self.assertIsInstance(obs[-1], str)
def test_set_float_or_str_with_float_value(self):
obs = parse_primitive(Set[Float | Str], ('1.1', '2.2', '3.3'))
self.assertEqual(obs, {1.1, 2.2, 3.3})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), float)
def test_set_float_or_str_with_str_value(self):
obs = parse_primitive(Set[Float | Str], ('peanut', 'the', 'dog'))
self.assertEqual(obs, {'peanut', 'the', 'dog'})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), str)
def test_list_bool_or_str_with_bool_value(self):
obs = parse_primitive(List[Bool | Str], ('True', 'False', 'True'))
self.assertEqual(obs, [True, False, True])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], bool)
def test_list_bool_or_str_with_str_value(self):
obs = parse_primitive(List[Bool | Str], ('peanut', 'the', 'dog'))
self.assertEqual(obs, ['peanut', 'the', 'dog'])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], str)
def test_list_bool_or_str_with_mixed_value_variant_a(self):
obs = parse_primitive(List[Bool | Str], ('True', 'the', 'dog'))
self.assertEqual(obs, [True, 'the', 'dog'])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], bool)
self.assertIsInstance(obs[-1], str)
def test_list_bool_or_str_with_mixed_value_variant_b(self):
obs = parse_primitive(List[Bool | Str], ('peanut', 'the', 'True'))
self.assertEqual(obs, ['peanut', 'the', True])
self.assertIsInstance(obs, list)
self.assertIsInstance(obs[0], str)
self.assertIsInstance(obs[-1], bool)
def test_set_bool_or_str_with_bool_value(self):
obs = parse_primitive(Set[Bool | Str], ('True', 'False', 'True'))
self.assertEqual(obs, {True, False})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), bool)
def test_set_bool_or_str_with_str_value(self):
obs = parse_primitive(Set[Bool | Str], ('peanut', 'the', 'dog'))
self.assertEqual(obs, {'peanut', 'the', 'dog'})
self.assertIsInstance(obs, set)
self.assertIsInstance(obs.pop(), str)
class TestParsePrimitiveCollectionsComplex(unittest.TestCase):
def test_list_int_bool_or_list_float_with_bool_int_value(self):
obs = parse_primitive(List[Int | Bool] | List[Float],
('1', '2', 'True', 'False'))
self.assertEqual(obs, [1, 2, True, False])
def test_list_int_bool_or_list_float_with_float_value(self):
obs = parse_primitive(List[Int | Bool] | List[Float],
('1.1', '2.2', '3.3', '4.4'))
self.assertEqual(obs, [1.1, 2.2, 3.3, 4.4])
def test_list_int_bool_or_list_float_with_bad_value(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(List[Int | Bool] | List[Float],
('1', '2.2', 'True', 'False'))
def test_list_int_str_or_list_float_with_str_int_value(self):
obs = parse_primitive(List[Int | Str] | List[Float],
('1', '2', 'peanut', 'the'))
self.assertEqual(obs, [1, 2, 'peanut', 'the'])
def test_list_int_str_or_list_float_with_float_value(self):
obs = parse_primitive(List[Int | Str] | List[Float],
('1.1', '2.2', '3.3', '4.4'))
self.assertEqual(obs, [1.1, 2.2, 3.3, 4.4])
def test_list_int_str_or_list_float_str_with_float_value(self):
obs = parse_primitive(List[Int | Str] | List[Float | Str],
('1.1', '2.2', '3.3', '4.4'))
self.assertEqual(obs, [1.1, 2.2, 3.3, 4.4])
def test_list_int_str_or_list_float_str_bool_with_float_value(self):
obs = parse_primitive(List[Int | Str] | List[Float | Str | Bool],
('1.1', '2.2', '3.3', '4.4'))
self.assertEqual(obs, [1.1, 2.2, 3.3, 4.4])
def test_list_int_str_or_list_float_str_bool_with_float_str_value(self):
obs = parse_primitive(List[Int | Str] | List[Float | Str | Bool],
('1.1', '2.2', 'the', 'peanut'))
self.assertEqual(obs, [1.1, 2.2, 'the', 'peanut'])
def test_list_int_str_or_list_float_str_bool_with_float_bool_value(self):
obs = parse_primitive(List[Int | Str] | List[Float | Str | Bool],
('1.1', '2.2', 'True', 'False'))
self.assertEqual(obs, [1.1, 2.2, True, False])
def test_list_int_str_or_list_float_with_mixed_value(self):
obs = parse_primitive(List[Int | Str] | List[Float],
('1.1', '2', 'True', 'peanut'))
self.assertEqual(obs, ['1.1', 2, 'True', 'peanut'])
def test_list_float_bool_or_list_str_with_float_bool_value(self):
obs = parse_primitive(List[Float | Bool] | List[Int],
('1', '2', 'True', 'False'))
self.assertEqual(obs, [1, 2, True, False])
def test_list_float_bool_or_list_str_with_int_value(self):
obs = parse_primitive(List[Float | Bool] | List[Int],
('1', '2', '3', '4'))
self.assertEqual(obs, [1, 2, 3, 4])
def test_list_float_bool_or_list_str_with_bad_value(self):
with self.assertRaisesRegex(ValueError, 'Could not coerce'):
parse_primitive(List[Float | Bool] | List[Int],
('1', '2.2', 'True', 'peanut'))
def test_set_int_bool_or_list_float_with_bool_int_value(self):
obs = parse_primitive(Set[Int | Bool] | Set[Float],
('1', '2', 'True', 'False'))
self.assertEqual(obs, {1, 2, True, False})
if __name__ == '__main__':
unittest.main()
|
py | 1a47e019784e19783e653e3a9a766edd055672f4 | #!/usr/bin/env python
"""Tests client actions related to administrating the client."""
import os
import psutil
import requests
from grr import config
from grr.client import comms
from grr.client.client_actions import admin
from grr.lib import flags
from grr.lib import rdfvalue
from grr.lib import stats
from grr.lib import utils
from grr.lib.rdfvalues import client as rdf_client
from grr.lib.rdfvalues import protodict as rdf_protodict
from grr.test_lib import client_test_lib
from grr.test_lib import test_lib
class ConfigActionTest(client_test_lib.EmptyActionTest):
"""Tests the client actions UpdateConfiguration and GetConfiguration."""
def setUp(self):
super(ConfigActionTest, self).setUp()
# These tests change the config so we preserve state.
self.config_stubber = test_lib.PreserveConfig()
self.config_stubber.Start()
def tearDown(self):
super(ConfigActionTest, self).tearDown()
self.config_stubber.Stop()
def testUpdateConfiguration(self):
"""Test that we can update the config."""
# A unique name on the filesystem for the writeback.
self.config_file = os.path.join(self.temp_dir, "ConfigActionTest.yaml")
# In a real client, the writeback location should be set to something real,
# but for this test we make it the same as the config file..
config.CONFIG.SetWriteBack(self.config_file)
# Make sure the file is gone
self.assertRaises(IOError, open, self.config_file)
location = ["http://www.example1.com/", "http://www.example2.com/"]
request = rdf_protodict.Dict()
request["Client.server_urls"] = location
request["Client.foreman_check_frequency"] = 3600
result = self.RunAction(admin.UpdateConfiguration, request)
self.assertEqual(result, [])
self.assertEqual(config.CONFIG["Client.foreman_check_frequency"], 3600)
# Test the config file got written.
data = open(self.config_file, "rb").read()
self.assertTrue("server_urls: {0}".format(",".join(location)) in data)
self.urls = []
# Now test that our location was actually updated.
def FakeUrlOpen(url=None, data=None, **_):
self.urls.append(url)
response = requests.Response()
response.status_code = 200
response._content = data
return response
with utils.Stubber(requests, "request", FakeUrlOpen):
client_context = comms.GRRHTTPClient(worker=MockClientWorker())
client_context.MakeRequest("")
# Since the request is successful we only connect to one location.
self.assertTrue(location[0] in self.urls[0])
def testUpdateConfigBlacklist(self):
"""Tests that disallowed fields are not getting updated."""
with test_lib.ConfigOverrider({
"Client.server_urls": ["http://something.com/"],
"Client.server_serial_number": 1
}):
location = ["http://www.example.com"]
request = rdf_protodict.Dict()
request["Client.server_urls"] = location
request["Client.server_serial_number"] = 10
self.RunAction(admin.UpdateConfiguration, request)
# Location can be set.
self.assertEqual(config.CONFIG["Client.server_urls"], location)
# But the server serial number can not be updated.
self.assertEqual(config.CONFIG["Client.server_serial_number"], 1)
def testGetConfig(self):
"""Check GetConfig client action works."""
# Use UpdateConfig to generate a config.
location = ["http://example.com/"]
request = rdf_protodict.Dict()
request["Client.server_urls"] = location
request["Client.foreman_check_frequency"] = 3600
self.RunAction(admin.UpdateConfiguration, request)
# Check that our GetConfig actually gets the real data.
self.RunAction(admin.GetConfiguration)
self.assertEqual(config.CONFIG["Client.foreman_check_frequency"], 3600)
self.assertEqual(config.CONFIG["Client.server_urls"], location)
class MockStatsCollector(object):
"""Mock stats collector for GetClientStatsActionTest."""
# First value in every tuple is a timestamp (as if it was returned by
# time.time()).
cpu_samples = [
(rdfvalue.RDFDatetime().FromSecondsFromEpoch(100), 0.1, 0.1, 10.0),
(rdfvalue.RDFDatetime().FromSecondsFromEpoch(110), 0.1, 0.2, 15.0),
(rdfvalue.RDFDatetime().FromSecondsFromEpoch(120), 0.1, 0.3, 20.0)
] # pyformat: disable
io_samples = [(rdfvalue.RDFDatetime().FromSecondsFromEpoch(100), 100, 100),
(rdfvalue.RDFDatetime().FromSecondsFromEpoch(110), 200, 200),
(rdfvalue.RDFDatetime().FromSecondsFromEpoch(120), 300, 300)]
class MockClientWorker(object):
"""Mock client worker for GetClientStatsActionTest."""
def __init__(self):
self.stats_collector = MockStatsCollector()
class GetClientStatsActionTest(client_test_lib.EmptyActionTest):
"""Test GetClientStats client action."""
def setUp(self):
super(GetClientStatsActionTest, self).setUp()
self.old_boot_time = psutil.boot_time
psutil.boot_time = lambda: 100
def tearDown(self):
super(GetClientStatsActionTest, self).tearDown()
psutil.boot_time = self.old_boot_time
def testReturnsAllDataByDefault(self):
"""Checks that stats collection works."""
stats.STATS.RegisterCounterMetric("grr_client_received_bytes")
stats.STATS.IncrementCounter("grr_client_received_bytes", 1566)
stats.STATS.RegisterCounterMetric("grr_client_sent_bytes")
stats.STATS.IncrementCounter("grr_client_sent_bytes", 2000)
results = self.RunAction(
admin.GetClientStats,
grr_worker=MockClientWorker(),
arg=rdf_client.GetClientStatsRequest())
response = results[0]
self.assertEqual(response.bytes_received, 1566)
self.assertEqual(response.bytes_sent, 2000)
self.assertEqual(len(response.cpu_samples), 3)
for i in range(3):
self.assertEqual(
response.cpu_samples[i].timestamp,
rdfvalue.RDFDatetime().FromSecondsFromEpoch(100 + i * 10))
self.assertAlmostEqual(response.cpu_samples[i].user_cpu_time, 0.1)
self.assertAlmostEqual(response.cpu_samples[i].system_cpu_time,
0.1 * (i + 1))
self.assertAlmostEqual(response.cpu_samples[i].cpu_percent, 10.0 + 5 * i)
self.assertEqual(len(response.io_samples), 3)
for i in range(3):
self.assertEqual(
response.io_samples[i].timestamp,
rdfvalue.RDFDatetime().FromSecondsFromEpoch(100 + i * 10))
self.assertEqual(response.io_samples[i].read_bytes, 100 * (i + 1))
self.assertEqual(response.io_samples[i].write_bytes, 100 * (i + 1))
self.assertEqual(response.boot_time, long(100 * 1e6))
def testFiltersDataPointsByStartTime(self):
start_time = rdfvalue.RDFDatetime().FromSecondsFromEpoch(117)
results = self.RunAction(
admin.GetClientStats,
grr_worker=MockClientWorker(),
arg=rdf_client.GetClientStatsRequest(start_time=start_time))
response = results[0]
self.assertEqual(len(response.cpu_samples), 1)
self.assertEqual(response.cpu_samples[0].timestamp,
rdfvalue.RDFDatetime().FromSecondsFromEpoch(120))
self.assertEqual(len(response.io_samples), 1)
self.assertEqual(response.io_samples[0].timestamp,
rdfvalue.RDFDatetime().FromSecondsFromEpoch(120))
def testFiltersDataPointsByEndTime(self):
end_time = rdfvalue.RDFDatetime().FromSecondsFromEpoch(102)
results = self.RunAction(
admin.GetClientStats,
grr_worker=MockClientWorker(),
arg=rdf_client.GetClientStatsRequest(end_time=end_time))
response = results[0]
self.assertEqual(len(response.cpu_samples), 1)
self.assertEqual(response.cpu_samples[0].timestamp,
rdfvalue.RDFDatetime().FromSecondsFromEpoch(100))
self.assertEqual(len(response.io_samples), 1)
self.assertEqual(response.io_samples[0].timestamp,
rdfvalue.RDFDatetime().FromSecondsFromEpoch(100))
def testFiltersDataPointsByStartAndEndTimes(self):
start_time = rdfvalue.RDFDatetime().FromSecondsFromEpoch(109)
end_time = rdfvalue.RDFDatetime().FromSecondsFromEpoch(113)
results = self.RunAction(
admin.GetClientStats,
grr_worker=MockClientWorker(),
arg=rdf_client.GetClientStatsRequest(
start_time=start_time, end_time=end_time))
response = results[0]
self.assertEqual(len(response.cpu_samples), 1)
self.assertEqual(response.cpu_samples[0].timestamp,
rdfvalue.RDFDatetime().FromSecondsFromEpoch(110))
self.assertEqual(len(response.io_samples), 1)
self.assertEqual(response.io_samples[0].timestamp,
rdfvalue.RDFDatetime().FromSecondsFromEpoch(110))
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
flags.StartMain(main)
|
py | 1a47e16bbc1c9a27bb44a250b95b6dc46f70cbad | # ======================================================================== #
#
# Copyright (c) 2017 - 2020 scVAE authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ======================================================================== #
import numpy
from scvae.data.sparse import sparsity
from scvae.data.utilities import standard_deviation
MAXIMUM_NUMBER_OF_VALUES_FOR_NORMAL_STATISTICS_COMPUTATION = 5e8
def summary_statistics(x, name="", tolerance=1e-3, skip_sparsity=False):
batch_size = None
if x.size > MAXIMUM_NUMBER_OF_VALUES_FOR_NORMAL_STATISTICS_COMPUTATION:
batch_size = 1000
x_mean = x.mean()
x_std = standard_deviation(x, ddof=1, batch_size=batch_size)
x_min = x.min()
x_max = x.max()
x_dispersion = x_std**2 / x_mean
if skip_sparsity:
x_sparsity = numpy.nan
else:
x_sparsity = sparsity(x, tolerance=tolerance, batch_size=batch_size)
statistics = {
"name": name,
"mean": x_mean,
"standard deviation": x_std,
"minimum": x_min,
"maximum": x_max,
"dispersion": x_dispersion,
"sparsity": x_sparsity
}
return statistics
def format_summary_statistics(statistics_sets, name="Data set"):
if not isinstance(statistics_sets, list):
statistics_sets = [statistics_sets]
name_width = max(
[len(name)]
+ [len(statistics_set["name"]) for statistics_set in statistics_sets]
)
table_heading = " ".join([
"{:{}}".format(name, name_width),
" mean ", "std. dev. ", "dispersion",
" minimum ", " maximum ", "sparsity"
])
table_rows = [table_heading]
for statistics_set in statistics_sets:
table_row_parts = [
"{:{}}".format(statistics_set["name"], name_width),
"{:<9.5g}".format(statistics_set["mean"]),
"{:<9.5g}".format(statistics_set["standard deviation"]),
"{:<9.5g}".format(statistics_set["dispersion"]),
"{:<11.5g}".format(statistics_set["minimum"]),
"{:<11.5g}".format(statistics_set["maximum"]),
"{:<7.5g}".format(statistics_set["sparsity"]),
]
table_row = " ".join(table_row_parts)
table_rows.append(table_row)
table = "\n".join(table_rows)
return table
|
py | 1a47e1847565ef50632f11f2b4b0ff97bac7713a | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('gitrepo', '0002_gitbranchtrailentry_order'),
('commandrepo', '0004_commandgroupentry_user'),
('bluesteel', '0007_remove_bluesteellayoutentry_archive'),
]
operations = [
migrations.CreateModel(
name='BenchmarkDefinitionEntry',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(default=b'Default benchmark name', max_length=128)),
('revision', models.IntegerField(default=0)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('command_set', models.ForeignKey(related_name='benchmark_command_set', to='commandrepo.CommandSetEntry')),
('layout', models.ForeignKey(related_name='benchmark_layout', to='bluesteel.BluesteelLayoutEntry')),
('project', models.ForeignKey(related_name='benchmark_project', to='bluesteel.BluesteelProjectEntry')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='BenchmarkExecutionEntry',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('invalidated', models.BooleanField(default=False)),
('revision_target', models.IntegerField(default=-1)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('commit', models.ForeignKey(related_name='benchmark_exec_commit', to='gitrepo.GitCommitEntry')),
('definition', models.ForeignKey(related_name='benchmark_exec_definition', to='benchmark.BenchmarkDefinitionEntry')),
('report', models.ForeignKey(related_name='benchmark_exec_command_set', to='commandrepo.CommandSetEntry')),
],
options={
},
bases=(models.Model,),
),
]
|
py | 1a47e24ae3fd1a293895e53787f8520009e603f6 | """Linear covariance function."""
from typing import Optional
import numpy as np
import probnum.utils as _utils
from probnum.typing import IntArgType, ScalarArgType
from ._kernel import Kernel
_InputType = np.ndarray
class Linear(Kernel[_InputType]):
"""Linear kernel.
Linear covariance function defined by :math:`k(x_0, x_1) = x_0^\\top x_1 + c`.
Parameters
----------
input_dim :
Input dimension of the kernel.
constant
Constant offset :math:`c`.
See Also
--------
Polynomial : Polynomial covariance function.
Examples
--------
>>> import numpy as np
>>> from probnum.kernels import Linear
>>> K = Linear(input_dim=2)
>>> K(np.array([[1, 2], [2, 3]]))
array([[ 5., 8.],
[ 8., 13.]])
"""
def __init__(self, input_dim: IntArgType, constant: ScalarArgType = 0.0):
self.constant = _utils.as_numpy_scalar(constant)
super().__init__(input_dim=input_dim, output_dim=1)
def __call__(self, x0: _InputType, x1: Optional[_InputType] = None) -> np.ndarray:
x0, x1, kernshape = self._check_and_reshape_inputs(x0, x1)
# Compute kernel matrix
if x1 is None:
x1 = x0
kernmat = x0 @ x1.T + self.constant
return Kernel._reshape_kernelmatrix(kernmat, newshape=kernshape)
|
py | 1a47e28f14ee13e50dff68ccf84da36eeaac209f | # -*- coding: utf-8 -*-
"""
Copyright 2021 Tianshu AI Platform. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=============================================================
"""
from django.test import TestCase
import json
import random
class TestScalarRequest(TestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
def init_test(self):
if getattr(self, "category", None) is None:
_init_res = self.client.get("/api/init", format="json")
self.assertEqual(_init_res.status_code, 200)
_init_json = json.loads(_init_res.content)
self.assertEqual(_init_json["code"], 200)
_cate_res = self.client.get("/api/getCategory", format="json")
self.category = json.loads(_cate_res.content)["data"]
def test_get_scalar(self):
self.init_test()
_run = ""
_tag = ""
for k in self.category.keys():
if "scalar" in self.category[k].keys():
_run = k
_tag = random.choice(list(self.category[k]['scalar'].keys()))
break
try:
assert _run != "" and _tag != "", "There is no scalar data in test logs."
except AssertionError as e:
import logging
logging.error(str(e))
return
res = self.client.get("/api/scalar", {'run': _run, 'tag': _tag})
_json = json.loads(res.content)
self.assertEqual(_json["code"], 200)
|
py | 1a47e29ebbb28dda2529e147a2e24f1c7e0235d9 | from flask import render_template
from flask.ext.login import login_required
from .views import frontend
@frontend.route('/about/')
@login_required
def about():
nav = 'about'
return render_template('frontend/about.html', **locals())
|
py | 1a47e2a5629eb6d50125cf769f5fb845dd9a365f | '''
module for implementation
of bucket sort
'''
from pyalgo.sort.insertion_sort import insertion_sort
def bucket_sort(arr: list):
l = []
slot_num = 10
for i in range(slot_num):
l.append([])
for j in arr:
index_b = int(slot_num * j)
l[index_b].append(j)
for i in range(slot_num):
l[i] = insertion_sort(l[i])
k = 0
for i in range(slot_num):
for j in range(len(l[i])):
arr[k] = l[i][j]
k += 1
return arr
'''
PyAlgo
Devansh Singh, 2021
''' |
py | 1a47e2ebdc10eb6f704cce98c5f84d234612f658 |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import maximum_paths
import multipath
import dampening
import table_map
import graceful_restart
class af_common_cmds_holder(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-common-def - based on the path /routing-system/router/router-bgp/address-family/ipv4/ipv4-unicast/default-vrf/af-common-cmds-holder. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__maximum_paths','__multipath','__always_propagate','__default_information_originate','__rib_route_limit','__client_to_client_reflection','__dampening','__default_metric','__next_hop_enable_default','__table_map','__update_time','__graceful_restart',)
_yang_name = 'af-common-cmds-holder'
_rest_name = ''
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__client_to_client_reflection = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="client-to-client-reflection", rest_name="client-to-client-reflection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'cli-run-template': u'$(.?:no client-to-client-reflection\n)', u'info': u'Configure client to client route reflection'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__dampening = YANGDynClass(base=dampening.dampening, is_container='container', presence=False, yang_name="dampening", rest_name="dampening", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable route-flap dampening', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__graceful_restart = YANGDynClass(base=graceful_restart.graceful_restart, is_container='container', presence=False, yang_name="graceful-restart", rest_name="graceful-restart", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enables the BGP graceful restart capability', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__default_metric = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..4294967295']}), is_leaf=True, yang_name="default-metric", rest_name="default-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set metric of redistributed routes'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='default-metric-number', is_config=True)
self.__maximum_paths = YANGDynClass(base=maximum_paths.maximum_paths, is_container='container', presence=False, yang_name="maximum-paths", rest_name="maximum-paths", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Forward packets over multiple paths', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__next_hop_enable_default = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="next-hop-enable-default", rest_name="next-hop-enable-default", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable default route for BGP next-hop lookup'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__always_propagate = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="always-propagate", rest_name="always-propagate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Allow readvertisement of best BGP routes not in IP Forwarding table'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__update_time = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..30']}), is_leaf=True, yang_name="update-time", rest_name="update-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure igp route update interval'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='update-time-type', is_config=True)
self.__default_information_originate = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="default-information-originate", rest_name="default-information-originate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Originate Default Information'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
self.__table_map = YANGDynClass(base=table_map.table_map, is_container='container', presence=False, yang_name="table-map", rest_name="table-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Map external entry attributes into routing table', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__multipath = YANGDynClass(base=multipath.multipath, is_container='container', presence=False, yang_name="multipath", rest_name="multipath", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable multipath for ibgp or ebgp neighbors only', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
self.__rib_route_limit = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..4294967295']}), is_leaf=True, yang_name="rib-route-limit", rest_name="rib-route-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Limit BGP rib count in routing table'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='rib-route-number', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'routing-system', u'router', u'router-bgp', u'address-family', u'ipv4', u'ipv4-unicast', u'default-vrf', u'af-common-cmds-holder']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'router', u'bgp', u'address-family', u'ipv4', u'unicast']
def _get_maximum_paths(self):
"""
Getter method for maximum_paths, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/maximum_paths (container)
"""
return self.__maximum_paths
def _set_maximum_paths(self, v, load=False):
"""
Setter method for maximum_paths, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/maximum_paths (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_maximum_paths is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_maximum_paths() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=maximum_paths.maximum_paths, is_container='container', presence=False, yang_name="maximum-paths", rest_name="maximum-paths", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Forward packets over multiple paths', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """maximum_paths must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=maximum_paths.maximum_paths, is_container='container', presence=False, yang_name="maximum-paths", rest_name="maximum-paths", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Forward packets over multiple paths', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__maximum_paths = t
if hasattr(self, '_set'):
self._set()
def _unset_maximum_paths(self):
self.__maximum_paths = YANGDynClass(base=maximum_paths.maximum_paths, is_container='container', presence=False, yang_name="maximum-paths", rest_name="maximum-paths", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Forward packets over multiple paths', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_multipath(self):
"""
Getter method for multipath, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/multipath (container)
"""
return self.__multipath
def _set_multipath(self, v, load=False):
"""
Setter method for multipath, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/multipath (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_multipath is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_multipath() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=multipath.multipath, is_container='container', presence=False, yang_name="multipath", rest_name="multipath", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable multipath for ibgp or ebgp neighbors only', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """multipath must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=multipath.multipath, is_container='container', presence=False, yang_name="multipath", rest_name="multipath", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable multipath for ibgp or ebgp neighbors only', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__multipath = t
if hasattr(self, '_set'):
self._set()
def _unset_multipath(self):
self.__multipath = YANGDynClass(base=multipath.multipath, is_container='container', presence=False, yang_name="multipath", rest_name="multipath", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable multipath for ibgp or ebgp neighbors only', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_always_propagate(self):
"""
Getter method for always_propagate, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/always_propagate (empty)
"""
return self.__always_propagate
def _set_always_propagate(self, v, load=False):
"""
Setter method for always_propagate, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/always_propagate (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_always_propagate is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_always_propagate() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="always-propagate", rest_name="always-propagate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Allow readvertisement of best BGP routes not in IP Forwarding table'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """always_propagate must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="always-propagate", rest_name="always-propagate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Allow readvertisement of best BGP routes not in IP Forwarding table'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__always_propagate = t
if hasattr(self, '_set'):
self._set()
def _unset_always_propagate(self):
self.__always_propagate = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="always-propagate", rest_name="always-propagate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Allow readvertisement of best BGP routes not in IP Forwarding table'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_default_information_originate(self):
"""
Getter method for default_information_originate, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/default_information_originate (empty)
"""
return self.__default_information_originate
def _set_default_information_originate(self, v, load=False):
"""
Setter method for default_information_originate, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/default_information_originate (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_default_information_originate is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_default_information_originate() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="default-information-originate", rest_name="default-information-originate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Originate Default Information'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """default_information_originate must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="default-information-originate", rest_name="default-information-originate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Originate Default Information'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__default_information_originate = t
if hasattr(self, '_set'):
self._set()
def _unset_default_information_originate(self):
self.__default_information_originate = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="default-information-originate", rest_name="default-information-originate", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Originate Default Information'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_rib_route_limit(self):
"""
Getter method for rib_route_limit, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/rib_route_limit (rib-route-number)
"""
return self.__rib_route_limit
def _set_rib_route_limit(self, v, load=False):
"""
Setter method for rib_route_limit, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/rib_route_limit (rib-route-number)
If this variable is read-only (config: false) in the
source YANG file, then _set_rib_route_limit is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_rib_route_limit() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..4294967295']}), is_leaf=True, yang_name="rib-route-limit", rest_name="rib-route-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Limit BGP rib count in routing table'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='rib-route-number', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """rib_route_limit must be of a type compatible with rib-route-number""",
'defined-type': "brocade-bgp:rib-route-number",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..4294967295']}), is_leaf=True, yang_name="rib-route-limit", rest_name="rib-route-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Limit BGP rib count in routing table'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='rib-route-number', is_config=True)""",
})
self.__rib_route_limit = t
if hasattr(self, '_set'):
self._set()
def _unset_rib_route_limit(self):
self.__rib_route_limit = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..4294967295']}), is_leaf=True, yang_name="rib-route-limit", rest_name="rib-route-limit", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Limit BGP rib count in routing table'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='rib-route-number', is_config=True)
def _get_client_to_client_reflection(self):
"""
Getter method for client_to_client_reflection, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/client_to_client_reflection (empty)
"""
return self.__client_to_client_reflection
def _set_client_to_client_reflection(self, v, load=False):
"""
Setter method for client_to_client_reflection, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/client_to_client_reflection (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_client_to_client_reflection is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_client_to_client_reflection() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="client-to-client-reflection", rest_name="client-to-client-reflection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'cli-run-template': u'$(.?:no client-to-client-reflection\n)', u'info': u'Configure client to client route reflection'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """client_to_client_reflection must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="client-to-client-reflection", rest_name="client-to-client-reflection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'cli-run-template': u'$(.?:no client-to-client-reflection\n)', u'info': u'Configure client to client route reflection'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__client_to_client_reflection = t
if hasattr(self, '_set'):
self._set()
def _unset_client_to_client_reflection(self):
self.__client_to_client_reflection = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="client-to-client-reflection", rest_name="client-to-client-reflection", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'cli-run-template': u'$(.?:no client-to-client-reflection\n)', u'info': u'Configure client to client route reflection'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_dampening(self):
"""
Getter method for dampening, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/dampening (container)
"""
return self.__dampening
def _set_dampening(self, v, load=False):
"""
Setter method for dampening, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/dampening (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_dampening is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_dampening() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=dampening.dampening, is_container='container', presence=False, yang_name="dampening", rest_name="dampening", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable route-flap dampening', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """dampening must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=dampening.dampening, is_container='container', presence=False, yang_name="dampening", rest_name="dampening", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable route-flap dampening', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__dampening = t
if hasattr(self, '_set'):
self._set()
def _unset_dampening(self):
self.__dampening = YANGDynClass(base=dampening.dampening, is_container='container', presence=False, yang_name="dampening", rest_name="dampening", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable route-flap dampening', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_default_metric(self):
"""
Getter method for default_metric, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/default_metric (default-metric-number)
"""
return self.__default_metric
def _set_default_metric(self, v, load=False):
"""
Setter method for default_metric, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/default_metric (default-metric-number)
If this variable is read-only (config: false) in the
source YANG file, then _set_default_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_default_metric() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..4294967295']}), is_leaf=True, yang_name="default-metric", rest_name="default-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set metric of redistributed routes'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='default-metric-number', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """default_metric must be of a type compatible with default-metric-number""",
'defined-type': "brocade-bgp:default-metric-number",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..4294967295']}), is_leaf=True, yang_name="default-metric", rest_name="default-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set metric of redistributed routes'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='default-metric-number', is_config=True)""",
})
self.__default_metric = t
if hasattr(self, '_set'):
self._set()
def _unset_default_metric(self):
self.__default_metric = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..4294967295']}), is_leaf=True, yang_name="default-metric", rest_name="default-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Set metric of redistributed routes'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='default-metric-number', is_config=True)
def _get_next_hop_enable_default(self):
"""
Getter method for next_hop_enable_default, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/next_hop_enable_default (empty)
"""
return self.__next_hop_enable_default
def _set_next_hop_enable_default(self, v, load=False):
"""
Setter method for next_hop_enable_default, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/next_hop_enable_default (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_next_hop_enable_default is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_next_hop_enable_default() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="next-hop-enable-default", rest_name="next-hop-enable-default", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable default route for BGP next-hop lookup'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """next_hop_enable_default must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="next-hop-enable-default", rest_name="next-hop-enable-default", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable default route for BGP next-hop lookup'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)""",
})
self.__next_hop_enable_default = t
if hasattr(self, '_set'):
self._set()
def _unset_next_hop_enable_default(self):
self.__next_hop_enable_default = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="next-hop-enable-default", rest_name="next-hop-enable-default", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Enable default route for BGP next-hop lookup'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='empty', is_config=True)
def _get_table_map(self):
"""
Getter method for table_map, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/table_map (container)
"""
return self.__table_map
def _set_table_map(self, v, load=False):
"""
Setter method for table_map, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/table_map (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_table_map is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_table_map() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=table_map.table_map, is_container='container', presence=False, yang_name="table-map", rest_name="table-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Map external entry attributes into routing table', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """table_map must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=table_map.table_map, is_container='container', presence=False, yang_name="table-map", rest_name="table-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Map external entry attributes into routing table', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__table_map = t
if hasattr(self, '_set'):
self._set()
def _unset_table_map(self):
self.__table_map = YANGDynClass(base=table_map.table_map, is_container='container', presence=False, yang_name="table-map", rest_name="table-map", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Map external entry attributes into routing table', u'cli-incomplete-no': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
def _get_update_time(self):
"""
Getter method for update_time, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/update_time (update-time-type)
"""
return self.__update_time
def _set_update_time(self, v, load=False):
"""
Setter method for update_time, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/update_time (update-time-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_update_time is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_update_time() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..30']}), is_leaf=True, yang_name="update-time", rest_name="update-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure igp route update interval'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='update-time-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """update_time must be of a type compatible with update-time-type""",
'defined-type': "brocade-bgp:update-time-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..30']}), is_leaf=True, yang_name="update-time", rest_name="update-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure igp route update interval'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='update-time-type', is_config=True)""",
})
self.__update_time = t
if hasattr(self, '_set'):
self._set()
def _unset_update_time(self):
self.__update_time = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..30']}), is_leaf=True, yang_name="update-time", rest_name="update-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Configure igp route update interval'}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='update-time-type', is_config=True)
def _get_graceful_restart(self):
"""
Getter method for graceful_restart, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/graceful_restart (container)
"""
return self.__graceful_restart
def _set_graceful_restart(self, v, load=False):
"""
Setter method for graceful_restart, mapped from YANG variable /routing_system/router/router_bgp/address_family/ipv4/ipv4_unicast/default_vrf/af_common_cmds_holder/graceful_restart (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_graceful_restart is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_graceful_restart() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=graceful_restart.graceful_restart, is_container='container', presence=False, yang_name="graceful-restart", rest_name="graceful-restart", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enables the BGP graceful restart capability', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """graceful_restart must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=graceful_restart.graceful_restart, is_container='container', presence=False, yang_name="graceful-restart", rest_name="graceful-restart", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enables the BGP graceful restart capability', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)""",
})
self.__graceful_restart = t
if hasattr(self, '_set'):
self._set()
def _unset_graceful_restart(self):
self.__graceful_restart = YANGDynClass(base=graceful_restart.graceful_restart, is_container='container', presence=False, yang_name="graceful-restart", rest_name="graceful-restart", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enables the BGP graceful restart capability', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp', defining_module='brocade-bgp', yang_type='container', is_config=True)
maximum_paths = __builtin__.property(_get_maximum_paths, _set_maximum_paths)
multipath = __builtin__.property(_get_multipath, _set_multipath)
always_propagate = __builtin__.property(_get_always_propagate, _set_always_propagate)
default_information_originate = __builtin__.property(_get_default_information_originate, _set_default_information_originate)
rib_route_limit = __builtin__.property(_get_rib_route_limit, _set_rib_route_limit)
client_to_client_reflection = __builtin__.property(_get_client_to_client_reflection, _set_client_to_client_reflection)
dampening = __builtin__.property(_get_dampening, _set_dampening)
default_metric = __builtin__.property(_get_default_metric, _set_default_metric)
next_hop_enable_default = __builtin__.property(_get_next_hop_enable_default, _set_next_hop_enable_default)
table_map = __builtin__.property(_get_table_map, _set_table_map)
update_time = __builtin__.property(_get_update_time, _set_update_time)
graceful_restart = __builtin__.property(_get_graceful_restart, _set_graceful_restart)
_pyangbind_elements = {'maximum_paths': maximum_paths, 'multipath': multipath, 'always_propagate': always_propagate, 'default_information_originate': default_information_originate, 'rib_route_limit': rib_route_limit, 'client_to_client_reflection': client_to_client_reflection, 'dampening': dampening, 'default_metric': default_metric, 'next_hop_enable_default': next_hop_enable_default, 'table_map': table_map, 'update_time': update_time, 'graceful_restart': graceful_restart, }
|
py | 1a47e39627518baf94951b59f688fc5bc269037e | import unittest
import serve
import argparse
class ServeTest(unittest.TestCase):
def test_str2bool(self):
self.assertTrue(serve._str2bool('True'))
self.assertTrue(serve._str2bool('TRUE'))
self.assertTrue(serve._str2bool('true'))
self.assertTrue(serve._str2bool('T'))
self.assertTrue(serve._str2bool('t'))
self.assertTrue(serve._str2bool('trUe'))
self.assertTrue(serve._str2bool('1'))
self.assertTrue(serve._str2bool('Yes'))
self.assertTrue(serve._str2bool('YES'))
self.assertTrue(serve._str2bool('yes'))
self.assertTrue(serve._str2bool('yeS'))
self.assertTrue(serve._str2bool('Y'))
self.assertTrue(serve._str2bool('y'))
self.assertFalse(serve._str2bool('False'))
self.assertFalse(serve._str2bool('FALSE'))
self.assertFalse(serve._str2bool('false'))
self.assertFalse(serve._str2bool('F'))
self.assertFalse(serve._str2bool('f'))
self.assertFalse(serve._str2bool('faLse'))
self.assertFalse(serve._str2bool('0'))
self.assertFalse(serve._str2bool('No'))
self.assertFalse(serve._str2bool('NO'))
self.assertFalse(serve._str2bool('no'))
self.assertFalse(serve._str2bool('nO'))
self.assertFalse(serve._str2bool('N'))
self.assertFalse(serve._str2bool('n'))
with self.assertRaises(argparse.ArgumentTypeError):
serve._str2bool('')
with self.assertRaises(argparse.ArgumentTypeError):
serve._str2bool('asd')
with self.assertRaises(TypeError):
serve._str2bool(True)
with self.assertRaises(TypeError):
serve._str2bool(False)
with self.assertRaises(TypeError):
serve._str2bool(1)
with self.assertRaises(TypeError):
serve._str2bool(0)
def test_is_filename_allowed(self):
self.assertTrue(serve._is_filename_allowed('a.txt'))
self.assertTrue(serve._is_filename_allowed('a.TXT'))
self.assertTrue(serve._is_filename_allowed('a.txT'))
self.assertTrue(serve._is_filename_allowed('b.pdf'))
self.assertTrue(serve._is_filename_allowed('c.png'))
self.assertTrue(serve._is_filename_allowed('d.jpg'))
self.assertTrue(serve._is_filename_allowed('e.jpeg'))
self.assertTrue(serve._is_filename_allowed('f.gif'))
self.assertTrue(serve._is_filename_allowed('çöşığüé§âİ.gif'))
self.assertTrue(serve._is_filename_allowed('asd.qwe.jpeg'))
self.assertFalse(serve._is_filename_allowed('zxc.gİf'))
self.assertFalse(serve._is_filename_allowed(''))
self.assertFalse(serve._is_filename_allowed('asd'))
self.assertFalse(serve._is_filename_allowed('.'))
self.assertFalse(serve._is_filename_allowed('.txt'))
self.assertFalse(serve._is_filename_allowed('/\\:*?<>|.txt'))
self.assertFalse(serve._is_filename_allowed('q. '))
self.assertFalse(serve._is_filename_allowed(' . '))
self.assertFalse(serve._is_filename_allowed(' .pdf'))
self.assertFalse(serve._is_filename_allowed('a. jpg'))
self.assertFalse(serve._is_filename_allowed('qwe.docx'))
with self.assertRaises(TypeError):
serve._is_filename_allowed(1)
with self.assertRaises(TypeError):
serve._is_filename_allowed(False)
if __name__ == '__main__':
unittest.main()
|
py | 1a47e4754ce00b17106a734a309153c51f486fb7 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This module contains Google Compute Engine operators.
"""
from copy import deepcopy
from typing import Dict
from json_merge_patch import merge
from googleapiclient.errors import HttpError
from airflow import AirflowException
from airflow.contrib.hooks.gcp_compute_hook import GceHook
from airflow.contrib.utils.gcp_field_sanitizer import GcpBodyFieldSanitizer
from airflow.contrib.utils.gcp_field_validator import GcpBodyFieldValidator
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class GceBaseOperator(BaseOperator):
"""
Abstract base operator for Google Compute Engine operators to inherit from.
"""
@apply_defaults
def __init__(self,
zone,
resource_id,
project_id=None,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
self.project_id = project_id
self.zone = zone
self.resource_id = resource_id
self.gcp_conn_id = gcp_conn_id
self.api_version = api_version
self._validate_inputs()
self._hook = GceHook(gcp_conn_id=self.gcp_conn_id, api_version=self.api_version)
super().__init__(*args, **kwargs)
def _validate_inputs(self):
if self.project_id == '':
raise AirflowException("The required parameter 'project_id' is missing")
if not self.zone:
raise AirflowException("The required parameter 'zone' is missing")
if not self.resource_id:
raise AirflowException("The required parameter 'resource_id' is missing")
def execute(self, context):
pass
class GceInstanceStartOperator(GceBaseOperator):
"""
Starts an instance in Google Compute Engine.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GceInstanceStartOperator`
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param project_id: Optional, Google Cloud Platform Project ID where the Compute
Engine Instance exists. If set to None or missing, the default project_id from the GCP connection is
used.
:type project_id: str
:param gcp_conn_id: Optional, The connection ID used to connect to Google Cloud
Platform. Defaults to 'google_cloud_default'.
:type gcp_conn_id: str
:param api_version: Optional, API version used (for example v1 - or beta). Defaults
to v1.
:type api_version: str
:param validate_body: Optional, If set to False, body validation is not performed.
Defaults to False.
"""
# [START gce_instance_start_template_fields]
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
# [END gce_instance_start_template_fields]
@apply_defaults
def __init__(self,
zone,
resource_id,
project_id=None,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super().__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
return self._hook.start_instance(zone=self.zone,
resource_id=self.resource_id,
project_id=self.project_id)
class GceInstanceStopOperator(GceBaseOperator):
"""
Stops an instance in Google Compute Engine.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GceInstanceStopOperator`
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param project_id: Optional, Google Cloud Platform Project ID where the Compute
Engine Instance exists. If set to None or missing, the default project_id from the GCP connection is
used.
:type project_id: str
:param gcp_conn_id: Optional, The connection ID used to connect to Google Cloud
Platform. Defaults to 'google_cloud_default'.
:type gcp_conn_id: str
:param api_version: Optional, API version used (for example v1 - or beta). Defaults
to v1.
:type api_version: str
:param validate_body: Optional, If set to False, body validation is not performed.
Defaults to False.
"""
# [START gce_instance_stop_template_fields]
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
# [END gce_instance_stop_template_fields]
@apply_defaults
def __init__(self,
zone,
resource_id,
project_id=None,
gcp_conn_id='google_cloud_default',
api_version='v1',
*args, **kwargs):
super().__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def execute(self, context):
self._hook.stop_instance(zone=self.zone,
resource_id=self.resource_id,
project_id=self.project_id)
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION = [
dict(name="machineType", regexp="^.+$"),
]
class GceSetMachineTypeOperator(GceBaseOperator):
"""
Changes the machine type for a stopped instance to the machine type specified in
the request.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GceSetMachineTypeOperator`
:param zone: Google Cloud Platform zone where the instance exists.
:type zone: str
:param resource_id: Name of the Compute Engine instance resource.
:type resource_id: str
:param body: Body required by the Compute Engine setMachineType API, as described in
https://cloud.google.com/compute/docs/reference/rest/v1/instances/setMachineType#request-body
:type body: dict
:param project_id: Optional, Google Cloud Platform Project ID where the Compute
Engine Instance exists. If set to None or missing, the default project_id from the GCP connection
is used.
:type project_id: str
:param gcp_conn_id: Optional, The connection ID used to connect to Google Cloud
Platform. Defaults to 'google_cloud_default'.
:type gcp_conn_id: str
:param api_version: Optional, API version used (for example v1 - or beta). Defaults
to v1.
:type api_version: str
:param validate_body: Optional, If set to False, body validation is not performed.
Defaults to False.
:type validate_body: bool
"""
# [START gce_instance_set_machine_type_template_fields]
template_fields = ('project_id', 'zone', 'resource_id', 'gcp_conn_id', 'api_version')
# [END gce_instance_set_machine_type_template_fields]
@apply_defaults
def __init__(self,
zone,
resource_id,
body,
project_id=None,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body = body
self._field_validator = None
if validate_body:
self._field_validator = GcpBodyFieldValidator(
SET_MACHINE_TYPE_VALIDATION_SPECIFICATION, api_version=api_version)
super().__init__(
project_id=project_id, zone=zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body)
def execute(self, context):
self._validate_all_body_fields()
return self._hook.set_machine_type(zone=self.zone,
resource_id=self.resource_id,
body=self.body,
project_id=self.project_id)
GCE_INSTANCE_TEMPLATE_VALIDATION_PATCH_SPECIFICATION = [
dict(name="name", regexp="^.+$"),
dict(name="description", optional=True),
dict(name="properties", type='dict', optional=True, fields=[
dict(name="description", optional=True),
dict(name="tags", optional=True, fields=[
dict(name="items", optional=True)
]),
dict(name="machineType", optional=True),
dict(name="canIpForward", optional=True),
dict(name="networkInterfaces", optional=True), # not validating deeper
dict(name="disks", optional=True), # not validating the array deeper
dict(name="metadata", optional=True, fields=[
dict(name="fingerprint", optional=True),
dict(name="items", optional=True),
dict(name="kind", optional=True),
]),
dict(name="serviceAccounts", optional=True), # not validating deeper
dict(name="scheduling", optional=True, fields=[
dict(name="onHostMaintenance", optional=True),
dict(name="automaticRestart", optional=True),
dict(name="preemptible", optional=True),
dict(name="nodeAffinitites", optional=True), # not validating deeper
]),
dict(name="labels", optional=True),
dict(name="guestAccelerators", optional=True), # not validating deeper
dict(name="minCpuPlatform", optional=True),
]),
]
GCE_INSTANCE_TEMPLATE_FIELDS_TO_SANITIZE = [
"kind",
"id",
"name",
"creationTimestamp",
"properties.disks.sha256",
"properties.disks.kind",
"properties.disks.sourceImageEncryptionKey.sha256",
"properties.disks.index",
"properties.disks.licenses",
"properties.networkInterfaces.kind",
"properties.networkInterfaces.accessConfigs.kind",
"properties.networkInterfaces.name",
"properties.metadata.kind",
"selfLink"
]
class GceInstanceTemplateCopyOperator(GceBaseOperator):
"""
Copies the instance template, applying specified changes.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GceInstanceTemplateCopyOperator`
:param resource_id: Name of the Instance Template
:type resource_id: str
:param body_patch: Patch to the body of instanceTemplates object following rfc7386
PATCH semantics. The body_patch content follows
https://cloud.google.com/compute/docs/reference/rest/v1/instanceTemplates
Name field is required as we need to rename the template,
all the other fields are optional. It is important to follow PATCH semantics
- arrays are replaced fully, so if you need to update an array you should
provide the whole target array as patch element.
:type body_patch: dict
:param project_id: Optional, Google Cloud Platform Project ID where the Compute
Engine Instance exists. If set to None or missing, the default project_id from the GCP connection
is used.
:type project_id: str
:param request_id: Optional, unique request_id that you might add to achieve
full idempotence (for example when client call times out repeating the request
with the same request id will not create a new instance template again).
It should be in UUID format as defined in RFC 4122.
:type request_id: str
:param gcp_conn_id: Optional, The connection ID used to connect to Google Cloud
Platform. Defaults to 'google_cloud_default'.
:type gcp_conn_id: str
:param api_version: Optional, API version used (for example v1 - or beta). Defaults
to v1.
:type api_version: str
:param validate_body: Optional, If set to False, body validation is not performed.
Defaults to False.
:type validate_body: bool
"""
# [START gce_instance_template_copy_operator_template_fields]
template_fields = ('project_id', 'resource_id', 'request_id',
'gcp_conn_id', 'api_version')
# [END gce_instance_template_copy_operator_template_fields]
@apply_defaults
def __init__(self,
resource_id,
body_patch,
project_id=None,
request_id=None,
gcp_conn_id='google_cloud_default',
api_version='v1',
validate_body=True,
*args, **kwargs):
self.body_patch = body_patch
self.request_id = request_id
self._field_validator = None
if 'name' not in self.body_patch:
raise AirflowException("The body '{}' should contain at least "
"name for the new operator in the 'name' field".
format(body_patch))
if validate_body:
self._field_validator = GcpBodyFieldValidator(
GCE_INSTANCE_TEMPLATE_VALIDATION_PATCH_SPECIFICATION, api_version=api_version)
self._field_sanitizer = GcpBodyFieldSanitizer(
GCE_INSTANCE_TEMPLATE_FIELDS_TO_SANITIZE)
super().__init__(
project_id=project_id, zone='global', resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _validate_all_body_fields(self):
if self._field_validator:
self._field_validator.validate(self.body_patch)
def execute(self, context):
self._validate_all_body_fields()
try:
# Idempotence check (sort of) - we want to check if the new template
# is already created and if is, then we assume it was created by previous run
# of CopyTemplate operator - we do not check if content of the template
# is as expected. Templates are immutable so we cannot update it anyway
# and deleting/recreating is not worth the hassle especially
# that we cannot delete template if it is already used in some Instance
# Group Manager. We assume success if the template is simply present
existing_template = self._hook.get_instance_template(
resource_id=self.body_patch['name'], project_id=self.project_id)
self.log.info(
"The %s template already existed. It was likely created by previous run of the operator. "
"Assuming success.",
existing_template
)
return existing_template
except HttpError as e:
# We actually expect to get 404 / Not Found here as the template should
# not yet exist
if not e.resp.status == 404:
raise e
old_body = self._hook.get_instance_template(resource_id=self.resource_id,
project_id=self.project_id)
new_body = deepcopy(old_body)
self._field_sanitizer.sanitize(new_body)
new_body = merge(new_body, self.body_patch)
self.log.info("Calling insert instance template with updated body: %s", new_body)
self._hook.insert_instance_template(body=new_body,
request_id=self.request_id,
project_id=self.project_id)
return self._hook.get_instance_template(resource_id=self.body_patch['name'],
project_id=self.project_id)
class GceInstanceGroupManagerUpdateTemplateOperator(GceBaseOperator):
"""
Patches the Instance Group Manager, replacing source template URL with the
destination one. API V1 does not have update/patch operations for Instance
Group Manager, so you must use beta or newer API version. Beta is the default.
.. seealso::
For more information on how to use this operator, take a look at the guide:
:ref:`howto/operator:GceInstanceGroupManagerUpdateTemplateOperator`
:param resource_id: Name of the Instance Group Manager
:type resource_id: str
:param zone: Google Cloud Platform zone where the Instance Group Manager exists.
:type zone: str
:param source_template: URL of the template to replace.
:type source_template: str
:param destination_template: URL of the target template.
:type destination_template: str
:param project_id: Optional, Google Cloud Platform Project ID where the Compute
Engine Instance exists. If set to None or missing, the default project_id from the GCP connection is
used.
:type project_id: str
:param request_id: Optional, unique request_id that you might add to achieve
full idempotence (for example when client call times out repeating the request
with the same request id will not create a new instance template again).
It should be in UUID format as defined in RFC 4122.
:type request_id: str
:param gcp_conn_id: Optional, The connection ID used to connect to Google Cloud
Platform. Defaults to 'google_cloud_default'.
:type gcp_conn_id: str
:param api_version: Optional, API version used (for example v1 - or beta). Defaults
to v1.
:type api_version: str
:param validate_body: Optional, If set to False, body validation is not performed.
Defaults to False.
:type validate_body: bool
"""
# [START gce_igm_update_template_operator_template_fields]
template_fields = ('project_id', 'resource_id', 'zone', 'request_id',
'source_template', 'destination_template',
'gcp_conn_id', 'api_version')
# [END gce_igm_update_template_operator_template_fields]
@apply_defaults
def __init__(self,
resource_id,
zone,
source_template,
destination_template,
project_id=None,
update_policy=None,
request_id=None,
gcp_conn_id='google_cloud_default',
api_version='beta',
*args, **kwargs):
self.zone = zone
self.source_template = source_template
self.destination_template = destination_template
self.request_id = request_id
self.update_policy = update_policy
self._change_performed = False
if api_version == 'v1':
raise AirflowException("Api version v1 does not have update/patch "
"operations for Instance Group Managers. Use beta"
" api version or above")
super().__init__(
project_id=project_id, zone=self.zone, resource_id=resource_id,
gcp_conn_id=gcp_conn_id, api_version=api_version, *args, **kwargs)
def _possibly_replace_template(self, dictionary: Dict) -> None:
if dictionary.get('instanceTemplate') == self.source_template:
dictionary['instanceTemplate'] = self.destination_template
self._change_performed = True
def execute(self, context):
old_instance_group_manager = self._hook.get_instance_group_manager(
zone=self.zone, resource_id=self.resource_id, project_id=self.project_id)
patch_body = {}
if 'versions' in old_instance_group_manager:
patch_body['versions'] = old_instance_group_manager['versions']
if 'instanceTemplate' in old_instance_group_manager:
patch_body['instanceTemplate'] = old_instance_group_manager['instanceTemplate']
if self.update_policy:
patch_body['updatePolicy'] = self.update_policy
self._possibly_replace_template(patch_body)
if 'versions' in patch_body:
for version in patch_body['versions']:
self._possibly_replace_template(version)
if self._change_performed or self.update_policy:
self.log.info(
"Calling patch instance template with updated body: %s",
patch_body)
return self._hook.patch_instance_group_manager(
zone=self.zone, resource_id=self.resource_id,
body=patch_body, request_id=self.request_id,
project_id=self.project_id)
else:
# Idempotence achieved
return True
|
py | 1a47e4e33227dfefadbdece6ad54439c9f7883d1 | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cv2
import numpy as np
def decode_image(im_file, im_info):
"""read rgb image
Args:
im_file (str|np.ndarray): input can be image path or np.ndarray
im_info (dict): info of image
Returns:
im (np.ndarray): processed image (np.ndarray)
im_info (dict): info of processed image
"""
if isinstance(im_file, str):
with open(im_file, 'rb') as f:
im_read = f.read()
data = np.frombuffer(im_read, dtype='uint8')
im = cv2.imdecode(data, 1) # BGR mode, but need RGB mode
im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB)
else:
im = im_file
im_info['im_shape'] = np.array(im.shape[:2], dtype=np.float32)
im_info['scale_factor'] = np.array([1., 1.], dtype=np.float32)
return im, im_info
class Resize(object):
"""resize image by target_size and max_size
Args:
target_size (int): the target size of image
keep_ratio (bool): whether keep_ratio or not, default true
interp (int): method of resize
"""
def __init__(self, target_size, keep_ratio=True, interp=cv2.INTER_LINEAR):
if isinstance(target_size, int):
target_size = [target_size, target_size]
self.target_size = target_size
self.keep_ratio = keep_ratio
self.interp = interp
def __call__(self, im, im_info):
"""
Args:
im (np.ndarray): image (np.ndarray)
im_info (dict): info of image
Returns:
im (np.ndarray): processed image (np.ndarray)
im_info (dict): info of processed image
"""
assert len(self.target_size) == 2
assert self.target_size[0] > 0 and self.target_size[1] > 0
im_channel = im.shape[2]
im_scale_y, im_scale_x = self.generate_scale(im)
im = cv2.resize(
im,
None,
None,
fx=im_scale_x,
fy=im_scale_y,
interpolation=self.interp)
im_info['im_shape'] = np.array(im.shape[:2]).astype('float32')
im_info['scale_factor'] = np.array(
[im_scale_y, im_scale_x]).astype('float32')
return im, im_info
def generate_scale(self, im):
"""
Args:
im (np.ndarray): image (np.ndarray)
Returns:
im_scale_x: the resize ratio of X
im_scale_y: the resize ratio of Y
"""
origin_shape = im.shape[:2]
im_c = im.shape[2]
if self.keep_ratio:
im_size_min = np.min(origin_shape)
im_size_max = np.max(origin_shape)
target_size_min = np.min(self.target_size)
target_size_max = np.max(self.target_size)
im_scale = float(target_size_min) / float(im_size_min)
if np.round(im_scale * im_size_max) > target_size_max:
im_scale = float(target_size_max) / float(im_size_max)
im_scale_x = im_scale
im_scale_y = im_scale
else:
resize_h, resize_w = self.target_size
im_scale_y = resize_h / float(origin_shape[0])
im_scale_x = resize_w / float(origin_shape[1])
return im_scale_y, im_scale_x
class NormalizeImage(object):
"""normalize image
Args:
mean (list): im - mean
std (list): im / std
is_scale (bool): whether need im / 255
is_channel_first (bool): if True: image shape is CHW, else: HWC
"""
def __init__(self, mean, std, is_scale=True):
self.mean = mean
self.std = std
self.is_scale = is_scale
def __call__(self, im, im_info):
"""
Args:
im (np.ndarray): image (np.ndarray)
im_info (dict): info of image
Returns:
im (np.ndarray): processed image (np.ndarray)
im_info (dict): info of processed image
"""
im = im.astype(np.float32, copy=False)
mean = np.array(self.mean)[np.newaxis, np.newaxis, :]
std = np.array(self.std)[np.newaxis, np.newaxis, :]
if self.is_scale:
im = im / 255.0
im -= mean
im /= std
return im, im_info
class Permute(object):
"""permute image
Args:
to_bgr (bool): whether convert RGB to BGR
channel_first (bool): whether convert HWC to CHW
"""
def __init__(self, ):
super(Permute, self).__init__()
def __call__(self, im, im_info):
"""
Args:
im (np.ndarray): image (np.ndarray)
im_info (dict): info of image
Returns:
im (np.ndarray): processed image (np.ndarray)
im_info (dict): info of processed image
"""
im = im.transpose((2, 0, 1)).copy()
return im, im_info
class PadStride(object):
""" padding image for model with FPN, instead PadBatch(pad_to_stride) in original config
Args:
stride (bool): model with FPN need image shape % stride == 0
"""
def __init__(self, stride=0):
self.coarsest_stride = stride
def __call__(self, im, im_info):
"""
Args:
im (np.ndarray): image (np.ndarray)
im_info (dict): info of image
Returns:
im (np.ndarray): processed image (np.ndarray)
im_info (dict): info of processed image
"""
coarsest_stride = self.coarsest_stride
if coarsest_stride <= 0:
return im, im_info
im_c, im_h, im_w = im.shape
pad_h = int(np.ceil(float(im_h) / coarsest_stride) * coarsest_stride)
pad_w = int(np.ceil(float(im_w) / coarsest_stride) * coarsest_stride)
padding_im = np.zeros((im_c, pad_h, pad_w), dtype=np.float32)
padding_im[:, :im_h, :im_w] = im
return padding_im, im_info
def preprocess(im, preprocess_ops):
# process image by preprocess_ops
im_info = {
'scale_factor': np.array(
[1., 1.], dtype=np.float32),
'im_shape': None,
}
im, im_info = decode_image(im, im_info)
for operator in preprocess_ops:
im, im_info = operator(im, im_info)
return im, im_info
|
py | 1a47e749f2012288da3445eda3dfd2715c4cbfa8 | """
Adopted from AllenNLP:
https://github.com/allenai/allennlp/tree/v0.6.1/allennlp/common
Functions and exceptions for checking that
AllenNLP and its models are configured correctly.
"""
from torch import cuda
from dadmatools.flair.utils import logging
logger = logging.init_logger() # pylint: disable=invalid-name
class ConfigurationError(Exception):
"""
The exception raised by any AllenNLP object when it's misconfigured
(e.g. missing properties, invalid properties, unknown properties).
"""
def __init__(self, message):
super(ConfigurationError, self).__init__()
self.message = message
def __str__(self):
return repr(self.message)
def log_pytorch_version_info():
import torch
logger.info("Pytorch version: %s", torch.__version__)
def check_dimensions_match(dimension_1: int,
dimension_2: int,
dim_1_name: str,
dim_2_name: str) -> None:
if dimension_1 != dimension_2:
raise ConfigurationError(f"{dim_1_name} must match {dim_2_name}, but got {dimension_1} "
f"and {dimension_2} instead")
def check_for_gpu(device_id: int):
if device_id is not None and device_id >= cuda.device_count():
raise ConfigurationError("Experiment specified a GPU but none is available;"
" if you want to run on CPU use the override"
" 'trainer.cuda_device=-1' in the json config file.")
|
py | 1a47e79a79cc03db13322e7e2a64f5a5e8a618b3 | import requests, logging
import pytest, json
from settings import TEST_DATA, DEPLOYMENTS
from suite.resources_utils import (
wait_before_test,
create_items_from_yaml,
wait_before_test,
get_file_contents,
get_service_endpoint,
)
from suite.custom_resources_utils import (
create_crd_from_yaml,
delete_crd,
)
from suite.vs_vsr_resources_utils import(
delete_virtual_server,
create_virtual_server_from_yaml,
patch_virtual_server_from_yaml,
patch_v_s_route_from_yaml,
create_v_s_route_from_yaml,
delete_v_s_route,
)
from suite.policy_resources_utils import(
create_policy_from_yaml,
delete_policy,
read_policy,
)
from suite.ap_resources_utils import (
create_ap_usersig_from_yaml,
delete_ap_usersig,
delete_and_create_ap_policy_from_yaml,
read_ap_custom_resource,
create_ap_logconf_from_yaml,
create_ap_policy_from_yaml,
delete_ap_policy,
delete_ap_logconf,
create_ap_waf_policy_from_yaml,
)
from suite.yaml_utils import get_first_ingress_host_from_yaml, get_name_from_yaml
ap_pol_name = ""
log_name = ""
std_vs_src = f"{TEST_DATA}/ap-waf/standard/virtual-server.yaml"
waf_spec_vs_src = f"{TEST_DATA}/ap-waf/virtual-server-waf-spec.yaml"
waf_route_vs_src = f"{TEST_DATA}/ap-waf/virtual-server-waf-route.yaml"
waf_subroute_vsr_src = f"{TEST_DATA}/ap-waf/virtual-server-route-waf-subroute.yaml"
waf_pol_default_src = f"{TEST_DATA}/ap-waf/policies/waf-default.yaml"
waf_pol_dataguard_src = f"{TEST_DATA}/ap-waf/policies/waf-dataguard.yaml"
ap_policy_uds = "dataguard-alarm-uds"
uds_crd_resource = f"{TEST_DATA}/ap-waf/ap-ic-uds.yaml"
valid_resp_addr = "Server address:"
valid_resp_name = "Server name:"
invalid_resp_title = "Request Rejected"
invalid_resp_body = "The requested URL was rejected. Please consult with your administrator."
@pytest.fixture(scope="class")
def appprotect_setup(request, kube_apis, test_namespace) -> None:
"""
Deploy simple application and all the AppProtect(dataguard-alarm) resources under test in one namespace.
:param request: pytest fixture
:param kube_apis: client apis
:param ingress_controller_endpoint: public endpoint
:param test_namespace:
"""
print("------------------------- Deploy logconf -----------------------------")
src_log_yaml = f"{TEST_DATA}/ap-waf/logconf.yaml"
global log_name
log_name = create_ap_logconf_from_yaml(kube_apis.custom_objects, src_log_yaml, test_namespace)
print("------------------------- Create UserSig CRD resource-----------------------------")
usersig_name = create_ap_usersig_from_yaml(
kube_apis.custom_objects, uds_crd_resource, test_namespace
)
print(f"------------------------- Deploy dataguard-alarm appolicy ---------------------------")
src_pol_yaml = f"{TEST_DATA}/ap-waf/{ap_policy_uds}.yaml"
global ap_pol_name
ap_pol_name = create_ap_policy_from_yaml(kube_apis.custom_objects, src_pol_yaml, test_namespace)
def fin():
print("Clean up:")
delete_ap_policy(kube_apis.custom_objects, ap_pol_name, test_namespace)
delete_ap_usersig(kube_apis.custom_objects, usersig_name, test_namespace)
delete_ap_logconf(kube_apis.custom_objects, log_name, test_namespace)
request.addfinalizer(fin)
def assert_ap_crd_info(ap_crd_info, policy_name) -> None:
"""
Assert fields in AppProtect policy documents
:param ap_crd_info: CRD output from k8s API
:param policy_name:
"""
assert ap_crd_info["kind"] == "APPolicy"
assert ap_crd_info["metadata"]["name"] == policy_name
assert ap_crd_info["spec"]["policy"]["enforcementMode"] == "blocking"
assert (
ap_crd_info["spec"]["policy"]["blocking-settings"]["violations"][0]["name"]
== "VIOL_DATA_GUARD"
)
def assert_invalid_responses(response) -> None:
"""
Assert responses when policy config is blocking requests
:param response: Response
"""
assert invalid_resp_title in response.text
assert invalid_resp_body in response.text
assert response.status_code == 200
def assert_valid_responses(response) -> None:
"""
Assert responses when policy config is allowing requests
:param response: Response
"""
assert valid_resp_name in response.text
assert valid_resp_addr in response.text
assert response.status_code == 200
@pytest.mark.skip_for_nginx_oss
@pytest.mark.appprotect
@pytest.mark.parametrize(
"crd_ingress_controller_with_ap, virtual_server_setup",
[
(
{
"type": "complete",
"extra_args": [
f"-enable-custom-resources",
f"-enable-leader-election=false",
f"-enable-app-protect",
],
},
{"example": "ap-waf", "app_type": "simple",},
)
],
indirect=True,
)
class TestAppProtectWAFPolicyVS:
def restore_default_vs(self, kube_apis, virtual_server_setup) -> None:
"""
Restore VirtualServer without policy spec
"""
delete_virtual_server(
kube_apis.custom_objects, virtual_server_setup.vs_name, virtual_server_setup.namespace
)
create_virtual_server_from_yaml(
kube_apis.custom_objects, std_vs_src, virtual_server_setup.namespace
)
wait_before_test()
@pytest.mark.smoke
@pytest.mark.parametrize(
"vs_src, waf",
[
(waf_spec_vs_src, waf_pol_default_src),
(waf_spec_vs_src, waf_pol_dataguard_src),
(waf_route_vs_src, waf_pol_default_src),
(waf_route_vs_src, waf_pol_dataguard_src),
],
)
def test_ap_waf_policy_block(
self,
kube_apis,
crd_ingress_controller_with_ap,
virtual_server_setup,
appprotect_setup,
test_namespace,
vs_src,
waf,
):
"""
Test waf policy when enabled with default and dataguard-alarm AP Policies
"""
print(f"Create waf policy")
if waf == waf_pol_dataguard_src:
create_ap_waf_policy_from_yaml(
kube_apis.custom_objects,
waf,
test_namespace,
test_namespace,
True,
False,
ap_pol_name,
log_name,
"syslog:server=127.0.0.1:514",
)
elif waf == waf_pol_default_src:
pol_name = create_policy_from_yaml(kube_apis.custom_objects, waf, test_namespace)
else:
pytest.fail(f"Invalid argument")
wait_before_test()
print(f"Patch vs with policy: {vs_src}")
patch_virtual_server_from_yaml(
kube_apis.custom_objects,
virtual_server_setup.vs_name,
vs_src,
virtual_server_setup.namespace,
)
wait_before_test()
ap_crd_info = read_ap_custom_resource(
kube_apis.custom_objects, test_namespace, "appolicies", ap_policy_uds
)
assert_ap_crd_info(ap_crd_info, ap_policy_uds)
wait_before_test(120)
print(
"----------------------- Send request with embedded malicious script----------------------"
)
response1 = requests.get(
virtual_server_setup.backend_1_url + "</script>",
headers={"host": virtual_server_setup.vs_host},
)
print(response1.text)
print(
"----------------------- Send request with blocked keyword in UDS----------------------"
)
response2 = requests.get(
virtual_server_setup.backend_1_url,
headers={"host": virtual_server_setup.vs_host},
data="kic",
)
print(response2.text)
delete_policy(kube_apis.custom_objects, "waf-policy", test_namespace)
self.restore_default_vs(kube_apis, virtual_server_setup)
assert_invalid_responses(response1)
if waf == waf_pol_dataguard_src:
assert_invalid_responses(response2)
elif waf == waf_pol_default_src:
assert_valid_responses(response2)
else:
pytest.fail(f"Invalid arguments")
@pytest.mark.parametrize(
"vs_src, waf",
[(waf_spec_vs_src, waf_pol_dataguard_src), (waf_route_vs_src, waf_pol_dataguard_src),],
)
def test_ap_waf_policy_allow(
self,
kube_apis,
crd_ingress_controller_with_ap,
virtual_server_setup,
appprotect_setup,
test_namespace,
vs_src,
waf,
):
"""
Test waf policy when disabled
"""
print(f"Create waf policy")
create_ap_waf_policy_from_yaml(
kube_apis.custom_objects,
waf,
test_namespace,
test_namespace,
False,
False,
ap_pol_name,
log_name,
"syslog:server=127.0.0.1:514",
)
wait_before_test()
print(f"Patch vs with policy: {vs_src}")
patch_virtual_server_from_yaml(
kube_apis.custom_objects,
virtual_server_setup.vs_name,
vs_src,
virtual_server_setup.namespace,
)
wait_before_test()
ap_crd_info = read_ap_custom_resource(
kube_apis.custom_objects, test_namespace, "appolicies", ap_policy_uds
)
assert_ap_crd_info(ap_crd_info, ap_policy_uds)
wait_before_test(120)
print(
"----------------------- Send request with embedded malicious script----------------------"
)
response1 = requests.get(
virtual_server_setup.backend_1_url + "</script>",
headers={"host": virtual_server_setup.vs_host},
)
print(response1.text)
print(
"----------------------- Send request with blocked keyword in UDS----------------------"
)
response2 = requests.get(
virtual_server_setup.backend_1_url,
headers={"host": virtual_server_setup.vs_host},
data="kic",
)
print(response2.text)
delete_policy(kube_apis.custom_objects, "waf-policy", test_namespace)
self.restore_default_vs(kube_apis, virtual_server_setup)
assert_valid_responses(response1)
assert_valid_responses(response2)
@pytest.mark.flaky(max_runs=3)
def test_ap_waf_policy_logs(
self,
kube_apis,
crd_ingress_controller_with_ap,
virtual_server_setup,
appprotect_setup,
test_namespace,
):
"""
Test waf policy logs
"""
src_syslog_yaml = f"{TEST_DATA}/ap-waf/syslog.yaml"
log_loc = f"/var/log/messages"
create_items_from_yaml(kube_apis, src_syslog_yaml, test_namespace)
syslog_dst = f"syslog-svc.{test_namespace}"
syslog_pod = kube_apis.v1.list_namespaced_pod(test_namespace).items[-1].metadata.name
print(f"Create waf policy")
create_ap_waf_policy_from_yaml(
kube_apis.custom_objects,
waf_pol_dataguard_src,
test_namespace,
test_namespace,
True,
True,
ap_pol_name,
log_name,
f"syslog:server={syslog_dst}:514",
)
wait_before_test()
print(f"Patch vs with policy: {waf_spec_vs_src}")
patch_virtual_server_from_yaml(
kube_apis.custom_objects,
virtual_server_setup.vs_name,
waf_spec_vs_src,
virtual_server_setup.namespace,
)
wait_before_test()
ap_crd_info = read_ap_custom_resource(
kube_apis.custom_objects, test_namespace, "appolicies", ap_policy_uds
)
assert_ap_crd_info(ap_crd_info, ap_policy_uds)
wait_before_test(120)
print(
"----------------------- Send request with embedded malicious script----------------------"
)
response = requests.get(
virtual_server_setup.backend_1_url + "</script>",
headers={"host": virtual_server_setup.vs_host},
)
print(response.text)
log_contents = ""
retry = 0
while "ASM:attack_type" not in log_contents and retry <= 30:
log_contents = get_file_contents(
kube_apis.v1, log_loc, syslog_pod, test_namespace
)
retry += 1
wait_before_test(1)
print(f"Security log not updated, retrying... #{retry}")
delete_policy(kube_apis.custom_objects, "waf-policy", test_namespace)
self.restore_default_vs(kube_apis, virtual_server_setup)
assert_invalid_responses(response)
assert (
f'ASM:attack_type="Non-browser Client,Abuse of Functionality,Cross Site Scripting (XSS)"'
in log_contents
)
assert f'severity="Critical"' in log_contents
assert f'request_status="blocked"' in log_contents
assert f'outcome="REJECTED"' in log_contents
@pytest.mark.skip_for_nginx_oss
@pytest.mark.appprotect
@pytest.mark.parametrize(
"crd_ingress_controller_with_ap, v_s_route_setup",
[
(
{
"type": "complete",
"extra_args": [
f"-enable-custom-resources",
f"-enable-leader-election=false",
f"-enable-app-protect",
],
},
{"example": "virtual-server-route"},
)
],
indirect=True,
)
class TestAppProtectWAFPolicyVSR:
def restore_default_vsr(self, kube_apis, v_s_route_setup) -> None:
"""
Function to revert vsr deployments to standard state
"""
patch_src_m = f"{TEST_DATA}/virtual-server-route/route-multiple.yaml"
patch_v_s_route_from_yaml(
kube_apis.custom_objects,
v_s_route_setup.route_m.name,
patch_src_m,
v_s_route_setup.route_m.namespace,
)
wait_before_test()
@pytest.mark.parametrize(
"ap_enable",
[
True,
# False
],
)
def test_ap_waf_policy_block(
self,
kube_apis,
crd_ingress_controller_with_ap,
v_s_route_setup,
appprotect_setup,
test_namespace,
ap_enable,
):
"""
Test if WAF policy is working with VSR deployments
"""
req_url = f"http://{v_s_route_setup.public_endpoint.public_ip}:{v_s_route_setup.public_endpoint.port}"
print(f"Create waf policy")
create_ap_waf_policy_from_yaml(
kube_apis.custom_objects,
waf_pol_dataguard_src,
v_s_route_setup.route_m.namespace,
test_namespace,
ap_enable,
ap_enable,
ap_pol_name,
log_name,
"syslog:server=127.0.0.1:514",
)
wait_before_test()
print(f"Patch vsr with policy: {waf_subroute_vsr_src}")
patch_v_s_route_from_yaml(
kube_apis.custom_objects,
v_s_route_setup.route_m.name,
waf_subroute_vsr_src,
v_s_route_setup.route_m.namespace,
)
wait_before_test()
ap_crd_info = read_ap_custom_resource(
kube_apis.custom_objects, test_namespace, "appolicies", ap_policy_uds
)
assert_ap_crd_info(ap_crd_info, ap_policy_uds)
wait_before_test(120)
response = requests.get(
f"{req_url}{v_s_route_setup.route_m.paths[0]}+'</script>'",
headers={"host": v_s_route_setup.vs_host},
)
print(response.text)
delete_policy(kube_apis.custom_objects, "waf-policy", v_s_route_setup.route_m.namespace)
self.restore_default_vsr(kube_apis, v_s_route_setup)
if ap_enable == True:
assert_invalid_responses(response)
elif ap_enable == False:
assert_valid_responses(response)
else:
pytest.fail(f"Invalid arguments")
|
py | 1a47e7e664927afbe20855d138cd55f9d5baefaf | # type: ignore
import time
from robomaster import robot, logger, logging, sensor # noqa
import patch_ftp # noqa
def data_info(self):
return self._cmd_id, self._direct, self._flag, self._distance
sensor.TofSubject.data_info = data_info
def cb(msg):
print(msg)
def main():
logger.setLevel(logging.ERROR)
ep_robot = robot.Robot()
ep_robot.initialize(conn_type="sta")
ep_robot.chassis.drive_speed(z=30)
ep_robot.sensor.sub_distance(freq=5, callback=cb)
time.sleep(10)
ep_robot.sensor.unsub_distance()
ep_robot.chassis.drive_speed(z=0)
ep_robot.close()
if __name__ == '__main__':
main()
|
py | 1a47e8502fa5e02a310c17728fc6554cd84e2bce | """Automated data download and IO."""
# Builtins
import glob
import os
import gzip
import bz2
import hashlib
import shutil
import zipfile
import sys
import math
import logging
from functools import partial, wraps
import time
import fnmatch
import urllib.request
import urllib.error
from urllib.parse import urlparse
import socket
import multiprocessing
from netrc import netrc
import ftplib
import ssl
import tarfile
# External libs
import pandas as pd
import numpy as np
from shapely.ops import transform as shp_trafo
from shapely.ops import unary_union
import shapely.geometry as shpg
import requests
# Optional libs
try:
import geopandas as gpd
except ImportError:
pass
try:
import salem
from salem import wgs84
except ImportError:
pass
try:
import rasterio
try:
# rasterio V > 1.0
from rasterio.merge import merge as merge_tool
except ImportError:
from rasterio.tools.merge import merge as merge_tool
except ImportError:
pass
try:
ModuleNotFoundError
except NameError:
ModuleNotFoundError = ImportError
# Locals
import oggm.cfg as cfg
from oggm.exceptions import (InvalidParamsError, NoInternetException,
DownloadVerificationFailedException,
DownloadCredentialsMissingException,
HttpDownloadError, HttpContentTooShortError,
InvalidDEMError, FTPSDownloadError)
# Module logger
logger = logging.getLogger('.'.join(__name__.split('.')[:-1]))
# Github repository and commit hash/branch name/tag name on that repository
# The given commit will be downloaded from github and used as source for
# all sample data
SAMPLE_DATA_GH_REPO = 'OGGM/oggm-sample-data'
SAMPLE_DATA_COMMIT = '18210326c4a212bd75fe21ba5355571e02938ff9'
GDIR_URL = 'https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.1/'
DEMO_GDIR_URL = 'https://cluster.klima.uni-bremen.de/~oggm/demo_gdirs/'
DEMS_GDIR_URL = 'https://cluster.klima.uni-bremen.de/data/gdirs/dems_v0/'
CMIP5_URL = 'https://cluster.klima.uni-bremen.de/~nicolas/cmip5-ng/'
CHECKSUM_URL = 'https://cluster.klima.uni-bremen.de/data/downloads.sha256.hdf'
CHECKSUM_VALIDATION_URL = CHECKSUM_URL + '.sha256'
# Web mercator proj constants
WEB_N_PIX = 256
WEB_EARTH_RADUIS = 6378137.
DEM_SOURCES = ['GIMP', 'ARCTICDEM', 'RAMP', 'TANDEM', 'AW3D30', 'MAPZEN',
'DEM3', 'ASTER', 'SRTM', 'REMA', 'ALASKA', 'COPDEM', 'NASADEM']
_RGI_METADATA = dict()
DEM3REG = {
'ISL': [-25., -13., 63., 67.], # Iceland
'SVALBARD': [9., 35.99, 75., 84.],
'JANMAYEN': [-10., -7., 70., 72.],
'FJ': [36., 68., 79., 90.], # Franz Josef Land
'FAR': [-8., -6., 61., 63.], # Faroer
'BEAR': [18., 20., 74., 75.], # Bear Island
'SHL': [-3., 0., 60., 61.], # Shetland
# Antarctica tiles as UTM zones, large files
'01-15': [-180., -91., -90, -60.],
'16-30': [-91., -1., -90., -60.],
'31-45': [-1., 89., -90., -60.],
'46-60': [89., 189., -90., -60.],
# Greenland tiles
'GL-North': [-72., -11., 76., 84.],
'GL-West': [-62., -42., 64., 76.],
'GL-South': [-52., -40., 59., 64.],
'GL-East': [-42., -17., 64., 76.]
}
# Function
tuple2int = partial(np.array, dtype=np.int64)
lock = None
def mkdir(path, reset=False):
"""Checks if directory exists and if not, create one.
Parameters
----------
reset: erase the content of the directory if exists
Returns
-------
the path
"""
if reset and os.path.exists(path):
shutil.rmtree(path)
try:
os.makedirs(path)
except FileExistsError:
pass
return path
def del_empty_dirs(s_dir):
"""Delete empty directories."""
b_empty = True
for s_target in os.listdir(s_dir):
s_path = os.path.join(s_dir, s_target)
if os.path.isdir(s_path):
if not del_empty_dirs(s_path):
b_empty = False
else:
b_empty = False
if b_empty:
os.rmdir(s_dir)
return b_empty
def findfiles(root_dir, endswith):
"""Finds all files with a specific ending in a directory
Parameters
----------
root_dir : str
The directory to search fo
endswith : str
The file ending (e.g. '.hgt'
Returns
-------
the list of files
"""
out = []
for dirpath, dirnames, filenames in os.walk(root_dir):
for filename in [f for f in filenames if f.endswith(endswith)]:
out.append(os.path.join(dirpath, filename))
return out
def get_lock():
"""Get multiprocessing lock."""
global lock
if lock is None:
# Global Lock
if cfg.PARAMS.get('use_mp_spawn', False):
lock = multiprocessing.get_context('spawn').Lock()
else:
lock = multiprocessing.Lock()
return lock
def get_dl_verify_data(section):
"""Returns a pandas DataFrame with all known download object hashes.
The returned dictionary resolves str: cache_obj_name (without section)
to a tuple int(size) and bytes(sha256)
"""
verify_key = 'dl_verify_data_' + section
if cfg.DATA.get(verify_key) is not None:
return cfg.DATA[verify_key]
verify_file_path = os.path.join(cfg.CACHE_DIR, 'downloads.sha256.hdf')
def verify_file():
"""Check the hash file's own hash"""
logger.info('Checking the download verification file checksum...')
try:
with requests.get(CHECKSUM_VALIDATION_URL) as req:
req.raise_for_status()
verify_file_sha256 = req.text.split(maxsplit=1)[0]
verify_file_sha256 = bytearray.fromhex(verify_file_sha256)
except Exception as e:
verify_file_sha256 = None
logger.warning('Failed getting verification checksum: ' + repr(e))
if os.path.isfile(verify_file_path) and verify_file_sha256:
sha256 = hashlib.sha256()
with open(verify_file_path, 'rb') as f:
for b in iter(lambda: f.read(0xFFFF), b''):
sha256.update(b)
if sha256.digest() != verify_file_sha256:
logger.warning('%s changed or invalid, deleting.'
% (verify_file_path))
os.remove(verify_file_path)
if not np.any(['dl_verify_data_' in k for k in cfg.DATA.keys()]):
# We check the hash file only once per session
# no need to do it at each call
verify_file()
if not os.path.isfile(verify_file_path):
logger.info('Downloading %s to %s...'
% (CHECKSUM_URL, verify_file_path))
with requests.get(CHECKSUM_URL, stream=True) as req:
if req.status_code == 200:
mkdir(os.path.dirname(verify_file_path))
with open(verify_file_path, 'wb') as f:
for b in req.iter_content(chunk_size=0xFFFF):
if b:
f.write(b)
logger.info('Done downloading.')
verify_file()
if not os.path.isfile(verify_file_path):
logger.warning('Downloading and verifying checksums failed.')
return pd.DataFrame()
try:
data = pd.read_hdf(verify_file_path, key=section)
except KeyError:
data = pd.DataFrame()
cfg.DATA[verify_key] = data
return data
def _call_dl_func(dl_func, cache_path):
"""Helper so the actual call to downloads can be overridden
"""
return dl_func(cache_path)
def _cached_download_helper(cache_obj_name, dl_func, reset=False):
"""Helper function for downloads.
Takes care of checking if the file is already cached.
Only calls the actual download function when no cached version exists.
"""
cache_dir = cfg.PATHS['dl_cache_dir']
cache_ro = cfg.PARAMS['dl_cache_readonly']
# A lot of logic below could be simplified but it's also not too important
wd = cfg.PATHS.get('working_dir')
if wd:
# this is for real runs
fb_cache_dir = os.path.join(wd, 'cache')
check_fb_dir = False
else:
# Nothing have been set up yet, this is bad - find a place to write
# This should happen on read-only cluster only but still
wd = os.environ.get('OGGM_WORKDIR')
if wd is not None and os.path.isdir(wd):
fb_cache_dir = os.path.join(wd, 'cache')
else:
fb_cache_dir = os.path.join(cfg.CACHE_DIR, 'cache')
check_fb_dir = True
if not cache_dir:
# Defaults to working directory: it must be set!
if not cfg.PATHS['working_dir']:
raise InvalidParamsError("Need a valid PATHS['working_dir']!")
cache_dir = fb_cache_dir
cache_ro = False
fb_path = os.path.join(fb_cache_dir, cache_obj_name)
if not reset and os.path.isfile(fb_path):
return fb_path
cache_path = os.path.join(cache_dir, cache_obj_name)
if not reset and os.path.isfile(cache_path):
return cache_path
if cache_ro:
if check_fb_dir:
# Add a manual check that we are caching sample data download
if 'oggm-sample-data' not in fb_path:
raise InvalidParamsError('Attempting to download something '
'with invalid global settings.')
cache_path = fb_path
if not cfg.PARAMS['has_internet']:
raise NoInternetException("Download required, but "
"`has_internet` is False.")
mkdir(os.path.dirname(cache_path))
try:
cache_path = _call_dl_func(dl_func, cache_path)
except BaseException:
if os.path.exists(cache_path):
os.remove(cache_path)
raise
return cache_path
def _verified_download_helper(cache_obj_name, dl_func, reset=False):
"""Helper function for downloads.
Verifies the size and hash of the downloaded file against the included
list of known static files.
Uses _cached_download_helper to perform the actual download.
"""
path = _cached_download_helper(cache_obj_name, dl_func, reset)
try:
dl_verify = cfg.PARAMS['dl_verify']
except KeyError:
dl_verify = True
if dl_verify and path and cache_obj_name not in cfg.DL_VERIFIED:
cache_section, cache_path = cache_obj_name.split('/', 1)
data = get_dl_verify_data(cache_section)
if cache_path not in data.index:
logger.info('No known hash for %s' % cache_obj_name)
cfg.DL_VERIFIED[cache_obj_name] = True
else:
# compute the hash
sha256 = hashlib.sha256()
with open(path, 'rb') as f:
for b in iter(lambda: f.read(0xFFFF), b''):
sha256.update(b)
sha256 = sha256.digest()
size = os.path.getsize(path)
# check
data = data.loc[cache_path]
if data['size'] != size or bytes(data['sha256']) != sha256:
err = '%s failed to verify!\nis: %s %s\nexpected: %s %s' % (
path, size, sha256.hex(), data[0], data[1].hex())
raise DownloadVerificationFailedException(msg=err, path=path)
logger.info('%s verified successfully.' % path)
cfg.DL_VERIFIED[cache_obj_name] = True
return path
def _requests_urlretrieve(url, path, reporthook, auth=None, timeout=None):
"""Implements the required features of urlretrieve on top of requests
"""
chunk_size = 128 * 1024
chunk_count = 0
with requests.get(url, stream=True, auth=auth, timeout=timeout) as r:
if r.status_code != 200:
raise HttpDownloadError(r.status_code, url)
r.raise_for_status()
size = r.headers.get('content-length') or -1
size = int(size)
if reporthook:
reporthook(chunk_count, chunk_size, size)
with open(path, 'wb') as f:
for chunk in r.iter_content(chunk_size=chunk_size):
if not chunk:
continue
f.write(chunk)
chunk_count += 1
if reporthook:
reporthook(chunk_count, chunk_size, size)
if chunk_count * chunk_size < size:
raise HttpContentTooShortError()
def _classic_urlretrieve(url, path, reporthook, auth=None, timeout=None):
"""Thin wrapper around pythons urllib urlretrieve
"""
ourl = url
if auth:
u = urlparse(url)
if '@' not in u.netloc:
netloc = auth[0] + ':' + auth[1] + '@' + u.netloc
url = u._replace(netloc=netloc).geturl()
old_def_timeout = socket.getdefaulttimeout()
if timeout is not None:
socket.setdefaulttimeout(timeout)
try:
urllib.request.urlretrieve(url, path, reporthook)
except urllib.error.HTTPError as e:
raise HttpDownloadError(e.code, ourl)
except urllib.error.ContentTooShortError as e:
raise HttpContentTooShortError()
finally:
socket.setdefaulttimeout(old_def_timeout)
class ImplicitFTPTLS(ftplib.FTP_TLS):
""" FTP_TLS subclass that automatically wraps sockets in SSL to support
implicit FTPS.
Taken from https://stackoverflow.com/a/36049814
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._sock = None
@property
def sock(self):
"""Return the socket."""
return self._sock
@sock.setter
def sock(self, value):
"""When modifying the socket, ensure that it is ssl wrapped."""
if value is not None and not isinstance(value, ssl.SSLSocket):
value = self.context.wrap_socket(value)
self._sock = value
def _ftps_retrieve(url, path, reporthook, auth=None, timeout=None):
""" Wrapper around ftplib to download from FTPS server
"""
if not auth:
raise DownloadCredentialsMissingException('No authentication '
'credentials given!')
upar = urlparse(url)
# Decide if Implicit or Explicit FTPS is used based on the port in url
if upar.port == 990:
ftps = ImplicitFTPTLS()
elif upar.port == 21:
ftps = ftplib.FTP_TLS()
try:
# establish ssl connection
ftps.connect(host=upar.hostname, port=upar.port, timeout=timeout)
ftps.login(user=auth[0], passwd=auth[1])
ftps.prot_p()
logger.info('Established connection %s' % upar.hostname)
# meta for progress bar size
count = 0
total = ftps.size(upar.path)
bs = 12*1024
def _ftps_progress(data):
outfile.write(data)
nonlocal count
count += 1
reporthook(count, count*bs, total)
with open(path, 'wb') as outfile:
ftps.retrbinary('RETR ' + upar.path, _ftps_progress, blocksize=bs)
except (ftplib.error_perm, socket.timeout, socket.gaierror) as err:
raise FTPSDownloadError(err)
finally:
ftps.close()
def _get_url_cache_name(url):
"""Returns the cache name for any given url.
"""
res = urlparse(url)
return res.netloc.split(':', 1)[0] + res.path
def oggm_urlretrieve(url, cache_obj_name=None, reset=False,
reporthook=None, auth=None, timeout=None):
"""Wrapper around urlretrieve, to implement our caching logic.
Instead of accepting a destination path, it decided where to store the file
and returns the local path.
auth is expected to be either a tuple of ('username', 'password') or None.
"""
if cache_obj_name is None:
cache_obj_name = _get_url_cache_name(url)
def _dlf(cache_path):
logger.info("Downloading %s to %s..." % (url, cache_path))
try:
_requests_urlretrieve(url, cache_path, reporthook, auth, timeout)
except requests.exceptions.InvalidSchema:
if 'ftps://' in url:
_ftps_retrieve(url, cache_path, reporthook, auth, timeout)
else:
_classic_urlretrieve(url, cache_path, reporthook, auth,
timeout)
return cache_path
return _verified_download_helper(cache_obj_name, _dlf, reset)
def _progress_urlretrieve(url, cache_name=None, reset=False,
auth=None, timeout=None):
"""Downloads a file, returns its local path, and shows a progressbar."""
try:
from progressbar import DataTransferBar, UnknownLength
pbar = [None]
def _upd(count, size, total):
if pbar[0] is None:
pbar[0] = DataTransferBar()
if pbar[0].max_value is None:
if total > 0:
pbar[0].start(total)
else:
pbar[0].start(UnknownLength)
pbar[0].update(min(count * size, total))
sys.stdout.flush()
res = oggm_urlretrieve(url, cache_obj_name=cache_name, reset=reset,
reporthook=_upd, auth=auth, timeout=timeout)
try:
pbar[0].finish()
except BaseException:
pass
return res
except (ImportError, ModuleNotFoundError):
return oggm_urlretrieve(url, cache_obj_name=cache_name,
reset=reset, auth=auth, timeout=timeout)
def aws_file_download(aws_path, cache_name=None, reset=False):
with get_lock():
return _aws_file_download_unlocked(aws_path, cache_name, reset)
def _aws_file_download_unlocked(aws_path, cache_name=None, reset=False):
"""Download a file from the AWS drive s3://astgtmv2/
**Note:** you need AWS credentials for this to work.
Parameters
----------
aws_path: path relative to s3://astgtmv2/
"""
while aws_path.startswith('/'):
aws_path = aws_path[1:]
if cache_name is not None:
cache_obj_name = cache_name
else:
cache_obj_name = 'astgtmv2/' + aws_path
def _dlf(cache_path):
raise NotImplementedError("Downloads from AWS are no longer supported")
return _verified_download_helper(cache_obj_name, _dlf, reset)
def file_downloader(www_path, retry_max=5, cache_name=None,
reset=False, auth=None, timeout=None):
"""A slightly better downloader: it tries more than once."""
local_path = None
retry_counter = 0
while retry_counter <= retry_max:
# Try to download
try:
retry_counter += 1
local_path = _progress_urlretrieve(www_path, cache_name=cache_name,
reset=reset, auth=auth,
timeout=timeout)
# if no error, exit
break
except HttpDownloadError as err:
# This works well for py3
if err.code == 404 or err.code == 300:
# Ok so this *should* be an ocean tile
return None
elif err.code >= 500 and err.code < 600:
logger.info("Downloading %s failed with HTTP error %s, "
"retrying in 10 seconds... %s/%s" %
(www_path, err.code, retry_counter, retry_max))
time.sleep(10)
continue
else:
raise
except HttpContentTooShortError as err:
logger.info("Downloading %s failed with ContentTooShortError"
" error %s, retrying in 10 seconds... %s/%s" %
(www_path, err.code, retry_counter, retry_max))
time.sleep(10)
continue
except DownloadVerificationFailedException as err:
if (cfg.PATHS['dl_cache_dir'] and
err.path.startswith(cfg.PATHS['dl_cache_dir']) and
cfg.PARAMS['dl_cache_readonly']):
if not cache_name:
cache_name = _get_url_cache_name(www_path)
cache_name = "GLOBAL_CACHE_INVALID/" + cache_name
retry_counter -= 1
logger.info("Global cache for %s is invalid!")
else:
try:
os.remove(err.path)
except FileNotFoundError:
pass
logger.info("Downloading %s failed with "
"DownloadVerificationFailedException\n %s\n"
"The file might have changed or is corrupted. "
"File deleted. Re-downloading... %s/%s" %
(www_path, err.msg, retry_counter, retry_max))
continue
except requests.ConnectionError as err:
if err.args[0].__class__.__name__ == 'MaxRetryError':
# if request tried often enough we don't have to do this
# this error does happen for not existing ASTERv3 files
return None
else:
# in other cases: try again
logger.info("Downloading %s failed with ConnectionError, "
"retrying in 10 seconds... %s/%s" %
(www_path, retry_counter, retry_max))
time.sleep(10)
continue
except FTPSDownloadError as err:
logger.info("Downloading %s failed with FTPSDownloadError"
" error: '%s', retrying in 10 seconds... %s/%s" %
(www_path, err.orgerr, retry_counter, retry_max))
time.sleep(10)
continue
# See if we managed (fail is allowed)
if not local_path or not os.path.exists(local_path):
logger.warning('Downloading %s failed.' % www_path)
return local_path
def locked_func(func):
"""To decorate a function that needs to be locked for multiprocessing"""
@wraps(func)
def wrapper(*args, **kwargs):
with get_lock():
return func(*args, **kwargs)
return wrapper
def file_extractor(file_path):
"""For archives with only one file inside extract the file to tmpdir."""
filename, file_extension = os.path.splitext(file_path)
# Second one for tar.gz files
f2, ex2 = os.path.splitext(filename)
if ex2 == '.tar':
filename, file_extension = f2, '.tar.gz'
bname = os.path.basename(file_path)
# This is to give a unique name to the tmp file
hid = hashlib.md5(file_path.encode()).hexdigest()[:7] + '_'
# extract directory
tmpdir = cfg.PATHS['tmp_dir']
mkdir(tmpdir)
# Check output extension
def _check_ext(f):
_, of_ext = os.path.splitext(f)
if of_ext not in ['.nc', '.tif']:
raise InvalidParamsError('Extracted file extension not recognized'
': {}'.format(of_ext))
return of_ext
if file_extension == '.zip':
with zipfile.ZipFile(file_path) as zf:
members = zf.namelist()
if len(members) != 1:
raise RuntimeError('Cannot extract multiple files')
o_name = hid + members[0]
o_path = os.path.join(tmpdir, o_name)
of_ext = _check_ext(o_path)
if not os.path.exists(o_path):
logger.info('Extracting {} to {}...'.format(bname, o_path))
with open(o_path, 'wb') as f:
f.write(zf.read(members[0]))
elif file_extension == '.gz':
# Gzip files cannot be inspected. It's always only one file
# Decide on its name
o_name = hid + os.path.basename(filename)
o_path = os.path.join(tmpdir, o_name)
of_ext = _check_ext(o_path)
if not os.path.exists(o_path):
logger.info('Extracting {} to {}...'.format(bname, o_path))
with gzip.GzipFile(file_path) as zf:
with open(o_path, 'wb') as outfile:
for line in zf:
outfile.write(line)
elif file_extension == '.bz2':
# bzip2 files cannot be inspected. It's always only one file
# Decide on its name
o_name = hid + os.path.basename(filename)
o_path = os.path.join(tmpdir, o_name)
of_ext = _check_ext(o_path)
if not os.path.exists(o_path):
logger.info('Extracting {} to {}...'.format(bname, o_path))
with bz2.open(file_path) as zf:
with open(o_path, 'wb') as outfile:
for line in zf:
outfile.write(line)
elif file_extension in ['.tar.gz', '.tar']:
with tarfile.open(file_path) as zf:
members = zf.getmembers()
if len(members) != 1:
raise RuntimeError('Cannot extract multiple files')
o_name = hid + members[0].name
o_path = os.path.join(tmpdir, o_name)
of_ext = _check_ext(o_path)
if not os.path.exists(o_path):
logger.info('Extracting {} to {}...'.format(bname, o_path))
with open(o_path, 'wb') as f:
f.write(zf.extractfile(members[0]).read())
else:
raise InvalidParamsError('Extension not recognized: '
'{}'.format(file_extension))
# Be sure we don't overfill the folder
cfg.get_lru_handler(tmpdir, ending=of_ext).append(o_path)
return o_path
def download_with_authentication(wwwfile, key):
""" Uses credentials from a local .netrc file to download files
This is function is currently used for TanDEM-X and ASTER
Parameters
----------
wwwfile : str
path to the file to download
key : str
the machine to to look at in the .netrc file
Returns
-------
"""
# Check the cache first. Use dummy download function to assure nothing is
# tried to be downloaded without credentials:
def _always_none(foo):
return None
cache_obj_name = _get_url_cache_name(wwwfile)
dest_file = _verified_download_helper(cache_obj_name, _always_none)
# Grab auth parameters
if not dest_file:
authfile = os.path.expanduser('~/.netrc')
if not os.path.isfile(authfile):
raise DownloadCredentialsMissingException(
(authfile, ' does not exist. Add necessary credentials for ',
key, ' with `oggm_netrc_credentials. You may have to ',
'register at the respective service first.'))
try:
netrc(authfile).authenticators(key)[0]
except TypeError:
raise DownloadCredentialsMissingException(
('Credentials for ', key, ' are not in ', authfile, '. Add ',
'credentials for with `oggm_netrc_credentials`.'))
dest_file = file_downloader(
wwwfile, auth=(netrc(authfile).authenticators(key)[0],
netrc(authfile).authenticators(key)[2]))
return dest_file
def download_oggm_files():
with get_lock():
return _download_oggm_files_unlocked()
def _download_oggm_files_unlocked():
"""Checks if the demo data is already on the cache and downloads it."""
zip_url = 'https://github.com/%s/archive/%s.zip' % \
(SAMPLE_DATA_GH_REPO, SAMPLE_DATA_COMMIT)
odir = os.path.join(cfg.CACHE_DIR)
sdir = os.path.join(cfg.CACHE_DIR,
'oggm-sample-data-%s' % SAMPLE_DATA_COMMIT)
# download only if necessary
if not os.path.exists(sdir):
ofile = file_downloader(zip_url)
with zipfile.ZipFile(ofile) as zf:
zf.extractall(odir)
assert os.path.isdir(sdir)
# list of files for output
out = dict()
for root, directories, filenames in os.walk(sdir):
for filename in filenames:
if filename in out:
# This was a stupid thing, and should not happen
# TODO: duplicates in sample data...
k = os.path.join(os.path.basename(root), filename)
assert k not in out
out[k] = os.path.join(root, filename)
else:
out[filename] = os.path.join(root, filename)
return out
def _download_srtm_file(zone):
with get_lock():
return _download_srtm_file_unlocked(zone)
def _download_srtm_file_unlocked(zone):
"""Checks if the srtm data is in the directory and if not, download it.
"""
# extract directory
tmpdir = cfg.PATHS['tmp_dir']
mkdir(tmpdir)
outpath = os.path.join(tmpdir, 'srtm_' + zone + '.tif')
# check if extracted file exists already
if os.path.exists(outpath):
return outpath
# Did we download it yet?
wwwfile = ('http://srtm.csi.cgiar.org/wp-content/uploads/files/srtm_5x5/'
'TIFF/srtm_' + zone + '.zip')
dest_file = file_downloader(wwwfile)
# None means we tried hard but we couldn't find it
if not dest_file:
return None
# ok we have to extract it
if not os.path.exists(outpath):
with zipfile.ZipFile(dest_file) as zf:
zf.extractall(tmpdir)
# See if we're good, don't overfill the tmp directory
assert os.path.exists(outpath)
cfg.get_lru_handler(tmpdir).append(outpath)
return outpath
def _download_nasadem_file(zone):
with get_lock():
return _download_nasadem_file_unlocked(zone)
def _download_nasadem_file_unlocked(zone):
"""Checks if the NASADEM data is in the directory and if not, download it.
"""
# extract directory
tmpdir = cfg.PATHS['tmp_dir']
mkdir(tmpdir)
wwwfile = ('https://e4ftl01.cr.usgs.gov/MEASURES/NASADEM_HGT.001/'
'2000.02.11/NASADEM_HGT_{}.zip'.format(zone))
demfile = '{}.hgt'.format(zone)
outpath = os.path.join(tmpdir, demfile)
# check if extracted file exists already
if os.path.exists(outpath):
return outpath
# Did we download it yet?
dest_file = file_downloader(wwwfile)
# None means we tried hard but we couldn't find it
if not dest_file:
return None
# ok we have to extract it
if not os.path.exists(outpath):
with zipfile.ZipFile(dest_file) as zf:
zf.extract(demfile, path=tmpdir)
# See if we're good, don't overfill the tmp directory
assert os.path.exists(outpath)
cfg.get_lru_handler(tmpdir).append(outpath)
return outpath
def _download_tandem_file(zone):
with get_lock():
return _download_tandem_file_unlocked(zone)
def _download_tandem_file_unlocked(zone):
"""Checks if the tandem data is in the directory and if not, download it.
"""
# extract directory
tmpdir = cfg.PATHS['tmp_dir']
mkdir(tmpdir)
bname = zone.split('/')[-1] + '_DEM.tif'
wwwfile = ('https://download.geoservice.dlr.de/TDM90/files/'
'{}.zip'.format(zone))
outpath = os.path.join(tmpdir, bname)
# check if extracted file exists already
if os.path.exists(outpath):
return outpath
dest_file = download_with_authentication(wwwfile, 'geoservice.dlr.de')
# That means we tried hard but we couldn't find it
if not dest_file:
return None
elif not zipfile.is_zipfile(dest_file):
# If the TanDEM-X tile does not exist, a invalid file is created.
# See https://github.com/OGGM/oggm/issues/893 for more details
return None
# ok we have to extract it
if not os.path.exists(outpath):
with zipfile.ZipFile(dest_file) as zf:
for fn in zf.namelist():
if 'DEM/' + bname in fn:
break
with open(outpath, 'wb') as fo:
fo.write(zf.read(fn))
# See if we're good, don't overfill the tmp directory
assert os.path.exists(outpath)
cfg.get_lru_handler(tmpdir).append(outpath)
return outpath
def _download_dem3_viewpano(zone):
with get_lock():
return _download_dem3_viewpano_unlocked(zone)
def _download_dem3_viewpano_unlocked(zone):
"""Checks if the DEM3 data is in the directory and if not, download it.
"""
# extract directory
tmpdir = cfg.PATHS['tmp_dir']
mkdir(tmpdir)
outpath = os.path.join(tmpdir, zone + '.tif')
extract_dir = os.path.join(tmpdir, 'tmp_' + zone)
mkdir(extract_dir, reset=True)
# check if extracted file exists already
if os.path.exists(outpath):
return outpath
# OK, so see if downloaded already
# some files have a newer version 'v2'
if zone in ['R33', 'R34', 'R35', 'R36', 'R37', 'R38', 'Q32', 'Q33', 'Q34',
'Q35', 'Q36', 'Q37', 'Q38', 'Q39', 'Q40', 'P31', 'P32', 'P33',
'P34', 'P35', 'P36', 'P37', 'P38', 'P39', 'P40']:
ifile = 'http://viewfinderpanoramas.org/dem3/' + zone + 'v2.zip'
elif zone in DEM3REG.keys():
# We prepared these files as tif already
ifile = ('https://cluster.klima.uni-bremen.de/~oggm/dem/'
'DEM3_MERGED/{}.tif'.format(zone))
return file_downloader(ifile)
else:
ifile = 'http://viewfinderpanoramas.org/dem3/' + zone + '.zip'
dfile = file_downloader(ifile)
# None means we tried hard but we couldn't find it
if not dfile:
return None
# ok we have to extract it
with zipfile.ZipFile(dfile) as zf:
zf.extractall(extract_dir)
# Serious issue: sometimes, if a southern hemisphere URL is queried for
# download and there is none, a NH zip file is downloaded.
# Example: http://viewfinderpanoramas.org/dem3/SN29.zip yields N29!
# BUT: There are southern hemisphere files that download properly. However,
# the unzipped folder has the file name of
# the northern hemisphere file. Some checks if correct file exists:
if len(zone) == 4 and zone.startswith('S'):
zonedir = os.path.join(extract_dir, zone[1:])
else:
zonedir = os.path.join(extract_dir, zone)
globlist = glob.glob(os.path.join(zonedir, '*.hgt'))
# take care of the special file naming cases
if zone in DEM3REG.keys():
globlist = glob.glob(os.path.join(extract_dir, '*', '*.hgt'))
if not globlist:
# Final resort
globlist = (findfiles(extract_dir, '.hgt') or
findfiles(extract_dir, '.HGT'))
if not globlist:
raise RuntimeError("We should have some files here, but we don't")
# merge the single HGT files (can be a bit ineffective, because not every
# single file might be exactly within extent...)
rfiles = [rasterio.open(s) for s in globlist]
dest, output_transform = merge_tool(rfiles)
profile = rfiles[0].profile
if 'affine' in profile:
profile.pop('affine')
profile['transform'] = output_transform
profile['height'] = dest.shape[1]
profile['width'] = dest.shape[2]
profile['driver'] = 'GTiff'
with rasterio.open(outpath, 'w', **profile) as dst:
dst.write(dest)
for rf in rfiles:
rf.close()
# delete original files to spare disk space
for s in globlist:
os.remove(s)
del_empty_dirs(tmpdir)
# See if we're good, don't overfill the tmp directory
assert os.path.exists(outpath)
cfg.get_lru_handler(tmpdir).append(outpath)
return outpath
def _download_aster_file(zone):
with get_lock():
return _download_aster_file_unlocked(zone)
def _download_aster_file_unlocked(zone):
"""Checks if the ASTER data is in the directory and if not, download it.
"""
# extract directory
tmpdir = cfg.PATHS['tmp_dir']
mkdir(tmpdir)
wwwfile = ('https://e4ftl01.cr.usgs.gov/ASTER_B/ASTT/ASTGTM.003/'
'2000.03.01/{}.zip'.format(zone))
outpath = os.path.join(tmpdir, zone + '_dem.tif')
# check if extracted file exists already
if os.path.exists(outpath):
return outpath
# download from NASA Earthdata with credentials
dest_file = download_with_authentication(wwwfile, 'urs.earthdata.nasa.gov')
# That means we tried hard but we couldn't find it
if not dest_file:
return None
# ok we have to extract it
if not os.path.exists(outpath):
with zipfile.ZipFile(dest_file) as zf:
zf.extractall(tmpdir)
# See if we're good, don't overfill the tmp directory
assert os.path.exists(outpath)
cfg.get_lru_handler(tmpdir).append(outpath)
return outpath
def _download_topo_file_from_cluster(fname):
with get_lock():
return _download_topo_file_from_cluster_unlocked(fname)
def _download_topo_file_from_cluster_unlocked(fname):
"""Checks if the special topo data is in the directory and if not,
download it from the cluster.
"""
# extract directory
tmpdir = cfg.PATHS['tmp_dir']
mkdir(tmpdir)
outpath = os.path.join(tmpdir, fname)
url = 'https://cluster.klima.uni-bremen.de/data/dems/'
url += fname + '.zip'
dfile = file_downloader(url)
if not os.path.exists(outpath):
logger.info('Extracting ' + fname + '.zip to ' + outpath + '...')
with zipfile.ZipFile(dfile) as zf:
zf.extractall(tmpdir)
# See if we're good, don't overfill the tmp directory
assert os.path.exists(outpath)
cfg.get_lru_handler(tmpdir).append(outpath)
return outpath
def _download_copdem_file(cppfile, tilename):
with get_lock():
return _download_copdem_file_unlocked(cppfile, tilename)
def _download_copdem_file_unlocked(cppfile, tilename):
"""Checks if Copernicus DEM file is in the directory, if not download it.
cppfile : name of the tarfile to download
tilename : name of folder and tif file within the cppfile
"""
# extract directory
tmpdir = cfg.PATHS['tmp_dir']
mkdir(tmpdir)
# tarfiles are extracted in directories per each tile
fpath = '{0}_DEM.tif'.format(tilename)
demfile = os.path.join(tmpdir, fpath)
# check if extracted file exists already
if os.path.exists(demfile):
return demfile
# Did we download it yet?
ftpfile = ('ftps://cdsdata.copernicus.eu:990/' +
'datasets/COP-DEM_GLO-90-DGED/2019_1/' +
cppfile)
dest_file = download_with_authentication(ftpfile,
'spacedata.copernicus.eu')
# None means we tried hard but we couldn't find it
if not dest_file:
return None
# ok we have to extract it
if not os.path.exists(demfile):
tiffile = os.path.join(tilename, 'DEM', fpath)
with tarfile.open(dest_file) as tf:
tmember = tf.getmember(tiffile)
# do not extract the full path of the file
tmember.name = os.path.basename(tf.getmember(tiffile).name)
tf.extract(tmember, tmpdir)
# See if we're good, don't overfill the tmp directory
assert os.path.exists(demfile)
cfg.get_lru_handler(tmpdir).append(demfile)
return demfile
def _download_aw3d30_file(zone):
with get_lock():
return _download_aw3d30_file_unlocked(zone)
def _download_aw3d30_file_unlocked(fullzone):
"""Checks if the AW3D30 data is in the directory and if not, download it.
"""
# extract directory
tmpdir = cfg.PATHS['tmp_dir']
mkdir(tmpdir)
# tarfiles are extracted in directories per each tile
tile = fullzone.split('/')[1]
demfile = os.path.join(tmpdir, tile, tile + '_AVE_DSM.tif')
# check if extracted file exists already
if os.path.exists(demfile):
return demfile
# Did we download it yet?
ftpfile = ('ftp://ftp.eorc.jaxa.jp/pub/ALOS/ext1/AW3D30/release_v1804/'
+ fullzone + '.tar.gz')
try:
dest_file = file_downloader(ftpfile, timeout=180)
except urllib.error.URLError:
# This error is raised if file is not available, could be water
return None
# None means we tried hard but we couldn't find it
if not dest_file:
return None
# ok we have to extract it
if not os.path.exists(demfile):
from oggm.utils import robust_tar_extract
dempath = os.path.dirname(demfile)
robust_tar_extract(dest_file, dempath)
# See if we're good, don't overfill the tmp directory
assert os.path.exists(demfile)
# this tarfile contains several files
for file in os.listdir(dempath):
cfg.get_lru_handler(tmpdir).append(os.path.join(dempath, file))
return demfile
def _download_mapzen_file(zone):
with get_lock():
return _download_mapzen_file_unlocked(zone)
def _download_mapzen_file_unlocked(zone):
"""Checks if the mapzen data is in the directory and if not, download it.
"""
bucket = 'elevation-tiles-prod'
prefix = 'geotiff'
url = 'http://s3.amazonaws.com/%s/%s/%s' % (bucket, prefix, zone)
# That's all
return file_downloader(url, timeout=180)
def _get_centerline_lonlat(gdir):
"""Quick n dirty solution to write the centerlines as a shapefile"""
cls = gdir.read_pickle('centerlines')
olist = []
for j, cl in enumerate(cls[::-1]):
mm = 1 if j == 0 else 0
gs = gpd.GeoSeries()
gs['RGIID'] = gdir.rgi_id
gs['LE_SEGMENT'] = np.rint(np.max(cl.dis_on_line) * gdir.grid.dx)
gs['MAIN'] = mm
tra_func = partial(gdir.grid.ij_to_crs, crs=wgs84)
gs['geometry'] = shp_trafo(tra_func, cl.line)
olist.append(gs)
return olist
def get_prepro_gdir(rgi_version, rgi_id, border, prepro_level, base_url=None):
with get_lock():
return _get_prepro_gdir_unlocked(rgi_version, rgi_id, border,
prepro_level, base_url=base_url)
def _get_prepro_gdir_unlocked(rgi_version, rgi_id, border, prepro_level,
base_url=None):
# Prepro URL
if base_url is None:
base_url = GDIR_URL
if not base_url.endswith('/'):
base_url += '/'
url = base_url
url += 'RGI{}/'.format(rgi_version)
url += 'b_{:03d}/'.format(border)
url += 'L{:d}/'.format(prepro_level)
url += '{}/{}.tar' .format(rgi_id[:8], rgi_id[:11])
tar_base = file_downloader(url)
if tar_base is None:
raise RuntimeError('Could not find file at ' + url)
return tar_base
def srtm_zone(lon_ex, lat_ex):
"""Returns a list of SRTM zones covering the desired extent.
"""
# SRTM are sorted in tiles of 5 degrees
srtm_x0 = -180.
srtm_y0 = 60.
srtm_dx = 5.
srtm_dy = -5.
# quick n dirty solution to be sure that we will cover the whole range
mi, ma = np.min(lon_ex), np.max(lon_ex)
# int() to avoid Deprec warning:
lon_ex = np.linspace(mi, ma, int(np.ceil((ma - mi) + 3)))
mi, ma = np.min(lat_ex), np.max(lat_ex)
# int() to avoid Deprec warning
lat_ex = np.linspace(mi, ma, int(np.ceil((ma - mi) + 3)))
zones = []
for lon in lon_ex:
for lat in lat_ex:
dx = lon - srtm_x0
dy = lat - srtm_y0
assert dy < 0
zx = np.ceil(dx / srtm_dx)
zy = np.ceil(dy / srtm_dy)
zones.append('{:02.0f}_{:02.0f}'.format(zx, zy))
return list(sorted(set(zones)))
def _tandem_path(lon_tile, lat_tile):
# OK we have a proper tile now
# First folder level is sorted from S to N
level_0 = 'S' if lat_tile < 0 else 'N'
level_0 += '{:02d}'.format(abs(lat_tile))
# Second folder level is sorted from W to E, but in 10 steps
level_1 = 'W' if lon_tile < 0 else 'E'
level_1 += '{:03d}'.format(divmod(abs(lon_tile), 10)[0] * 10)
# Level 2 is formating, but depends on lat
level_2 = 'W' if lon_tile < 0 else 'E'
if abs(lat_tile) <= 60:
level_2 += '{:03d}'.format(abs(lon_tile))
elif abs(lat_tile) <= 80:
level_2 += '{:03d}'.format(divmod(abs(lon_tile), 2)[0] * 2)
else:
level_2 += '{:03d}'.format(divmod(abs(lon_tile), 4)[0] * 4)
# Final path
out = (level_0 + '/' + level_1 + '/' +
'TDM1_DEM__30_{}{}'.format(level_0, level_2))
return out
def tandem_zone(lon_ex, lat_ex):
"""Returns a list of TanDEM-X zones covering the desired extent.
"""
# Files are one by one tiles, so lets loop over them
# For higher lats they are stored in steps of 2 and 4. My code below
# is probably giving more files than needed but better safe than sorry
lat_tiles = np.arange(np.floor(lat_ex[0]), np.ceil(lat_ex[1]+1e-9),
dtype=np.int)
zones = []
for lat in lat_tiles:
if abs(lat) < 60:
l0 = np.floor(lon_ex[0])
l1 = np.floor(lon_ex[1])
elif abs(lat) < 80:
l0 = divmod(lon_ex[0], 2)[0] * 2
l1 = divmod(lon_ex[1], 2)[0] * 2
elif abs(lat) < 90:
l0 = divmod(lon_ex[0], 4)[0] * 4
l1 = divmod(lon_ex[1], 4)[0] * 4
lon_tiles = np.arange(l0, l1+1, dtype=np.int)
for lon in lon_tiles:
zones.append(_tandem_path(lon, lat))
return list(sorted(set(zones)))
def _aw3d30_path(lon_tile, lat_tile):
# OK we have a proper tile now
# Folders are sorted with N E S W in 5 degree steps
# But in N and E the lower boundary is indicated
# e.g. N060 contains N060 - N064
# e.g. E000 contains E000 - E004
# but S and W indicate the upper boundary:
# e.g. S010 contains S006 - S010
# e.g. W095 contains W091 - W095
# get letters
ns = 'S' if lat_tile < 0 else 'N'
ew = 'W' if lon_tile < 0 else 'E'
# get lat/lon
lon = abs(5 * np.floor(lon_tile/5))
lat = abs(5 * np.floor(lat_tile/5))
folder = '%s%.3d%s%.3d' % (ns, lat, ew, lon)
filename = '%s%.3d%s%.3d' % (ns, abs(lat_tile), ew, abs(lon_tile))
# Final path
out = folder + '/' + filename
return out
def aw3d30_zone(lon_ex, lat_ex):
"""Returns a list of AW3D30 zones covering the desired extent.
"""
# Files are one by one tiles, so lets loop over them
lon_tiles = np.arange(np.floor(lon_ex[0]), np.ceil(lon_ex[1]+1e-9),
dtype=np.int)
lat_tiles = np.arange(np.floor(lat_ex[0]), np.ceil(lat_ex[1]+1e-9),
dtype=np.int)
zones = []
for lon in lon_tiles:
for lat in lat_tiles:
zones.append(_aw3d30_path(lon, lat))
return list(sorted(set(zones)))
def _extent_to_polygon(lon_ex, lat_ex, to_crs=None):
if lon_ex[0] == lon_ex[1] and lat_ex[0] == lat_ex[1]:
out = shpg.Point(lon_ex[0], lat_ex[0])
else:
x = [lon_ex[0], lon_ex[1], lon_ex[1], lon_ex[0], lon_ex[0]]
y = [lat_ex[0], lat_ex[0], lat_ex[1], lat_ex[1], lat_ex[0]]
out = shpg.Polygon(np.array((x, y)).T)
if to_crs is not None:
out = salem.transform_geometry(out, to_crs=to_crs)
return out
def arcticdem_zone(lon_ex, lat_ex):
"""Returns a list of Arctic-DEM zones covering the desired extent.
"""
gdf = gpd.read_file(get_demo_file('ArcticDEM_Tile_Index_Rel7_by_tile.shp'))
p = _extent_to_polygon(lon_ex, lat_ex, to_crs=gdf.crs)
gdf = gdf.loc[gdf.intersects(p)]
return gdf.tile.values if len(gdf) > 0 else []
def rema_zone(lon_ex, lat_ex):
"""Returns a list of REMA-DEM zones covering the desired extent.
"""
gdf = gpd.read_file(get_demo_file('REMA_Tile_Index_Rel1.1.shp'))
p = _extent_to_polygon(lon_ex, lat_ex, to_crs=gdf.crs)
gdf = gdf.loc[gdf.intersects(p)]
return gdf.tile.values if len(gdf) > 0 else []
def alaska_dem_zone(lon_ex, lat_ex):
"""Returns a list of Alaska-DEM zones covering the desired extent.
"""
gdf = gpd.read_file(get_demo_file('Alaska_albers_V3_tiles.shp'))
p = _extent_to_polygon(lon_ex, lat_ex, to_crs=gdf.crs)
gdf = gdf.loc[gdf.intersects(p)]
return gdf.tile.values if len(gdf) > 0 else []
def copdem_zone(lon_ex, lat_ex):
"""Returns a list of Copernicus DEM tarfile and tilename tuples
"""
# path to the lookup shapefiles
gdf = gpd.read_file(get_demo_file('RGI60_COPDEM_lookup.shp'))
# intersect with lat lon extents
p = _extent_to_polygon(lon_ex, lat_ex, to_crs=gdf.crs)
gdf = gdf.loc[gdf.intersects(p)]
# COPDEM is global, if we miss any tile it is worth an error
if (len(gdf) == 0) or (not unary_union(gdf.geometry).contains(p)):
raise InvalidDEMError('Could not find all necessary Copernicus DEM '
'tiles. This should not happen in a global DEM. '
'Check the RGI-CopernicusDEM lookup shapefile '
'for this particular glacier!')
flist = []
for _, g in gdf.iterrows():
cpp = g['CPP File']
eop = g['Eop Id']
eop = eop.split(':')[-2]
assert 'Copernicus' in eop
flist.append((cpp, eop))
return flist
def dem3_viewpano_zone(lon_ex, lat_ex):
"""Returns a list of DEM3 zones covering the desired extent.
http://viewfinderpanoramas.org/Coverage%20map%20viewfinderpanoramas_org3.htm
"""
for _f in DEM3REG.keys():
if (np.min(lon_ex) >= DEM3REG[_f][0]) and \
(np.max(lon_ex) <= DEM3REG[_f][1]) and \
(np.min(lat_ex) >= DEM3REG[_f][2]) and \
(np.max(lat_ex) <= DEM3REG[_f][3]):
# test some weird inset files in Antarctica
if (np.min(lon_ex) >= -91.) and (np.max(lon_ex) <= -90.) and \
(np.min(lat_ex) >= -72.) and (np.max(lat_ex) <= -68.):
return ['SR15']
elif (np.min(lon_ex) >= -47.) and (np.max(lon_ex) <= -43.) and \
(np.min(lat_ex) >= -61.) and (np.max(lat_ex) <= -60.):
return ['SP23']
elif (np.min(lon_ex) >= 162.) and (np.max(lon_ex) <= 165.) and \
(np.min(lat_ex) >= -68.) and (np.max(lat_ex) <= -66.):
return ['SQ58']
# test some rogue Greenland tiles as well
elif (np.min(lon_ex) >= -72.) and (np.max(lon_ex) <= -66.) and \
(np.min(lat_ex) >= 76.) and (np.max(lat_ex) <= 80.):
return ['T19']
elif (np.min(lon_ex) >= -72.) and (np.max(lon_ex) <= -66.) and \
(np.min(lat_ex) >= 80.) and (np.max(lat_ex) <= 83.):
return ['U19']
elif (np.min(lon_ex) >= -66.) and (np.max(lon_ex) <= -60.) and \
(np.min(lat_ex) >= 80.) and (np.max(lat_ex) <= 83.):
return ['U20']
elif (np.min(lon_ex) >= -60.) and (np.max(lon_ex) <= -54.) and \
(np.min(lat_ex) >= 80.) and (np.max(lat_ex) <= 83.):
return ['U21']
elif (np.min(lon_ex) >= -54.) and (np.max(lon_ex) <= -48.) and \
(np.min(lat_ex) >= 80.) and (np.max(lat_ex) <= 83.):
return ['U22']
elif (np.min(lon_ex) >= -25.) and (np.max(lon_ex) <= -13.) and \
(np.min(lat_ex) >= 63.) and (np.max(lat_ex) <= 67.):
return ['ISL']
else:
return [_f]
# if the tile doesn't have a special name, its name can be found like this:
# corrected SRTMs are sorted in tiles of 6 deg longitude and 4 deg latitude
srtm_x0 = -180.
srtm_y0 = 0.
srtm_dx = 6.
srtm_dy = 4.
# quick n dirty solution to be sure that we will cover the whole range
mi, ma = np.min(lon_ex), np.max(lon_ex)
# TODO: Fabien, find out what Johannes wanted with this +3
# +3 is just for the number to become still a bit larger
# int() to avoid Deprec warning
lon_ex = np.linspace(mi, ma, int(np.ceil((ma - mi) / srtm_dy) + 3))
mi, ma = np.min(lat_ex), np.max(lat_ex)
# int() to avoid Deprec warning
lat_ex = np.linspace(mi, ma, int(np.ceil((ma - mi) / srtm_dx) + 3))
zones = []
for lon in lon_ex:
for lat in lat_ex:
dx = lon - srtm_x0
dy = lat - srtm_y0
zx = np.ceil(dx / srtm_dx)
# convert number to letter
zy = chr(int(abs(dy / srtm_dy)) + ord('A'))
if lat >= 0:
zones.append('%s%02.0f' % (zy, zx))
else:
zones.append('S%s%02.0f' % (zy, zx))
return list(sorted(set(zones)))
def aster_zone(lon_ex, lat_ex):
"""Returns a list of ASTGTMV3 zones covering the desired extent.
ASTER v3 tiles are 1 degree x 1 degree
N50 contains 50 to 50.9
E10 contains 10 to 10.9
S70 contains -69.99 to -69.0
W20 contains -19.99 to -19.0
"""
# adding small buffer for unlikely case where one lon/lat_ex == xx.0
lons = np.arange(np.floor(lon_ex[0]-1e-9), np.ceil(lon_ex[1]+1e-9))
lats = np.arange(np.floor(lat_ex[0]-1e-9), np.ceil(lat_ex[1]+1e-9))
zones = []
for lat in lats:
# north or south?
ns = 'S' if lat < 0 else 'N'
for lon in lons:
# east or west?
ew = 'W' if lon < 0 else 'E'
filename = 'ASTGTMV003_{}{:02.0f}{}{:03.0f}'.format(ns, abs(lat),
ew, abs(lon))
zones.append(filename)
return list(sorted(set(zones)))
def nasadem_zone(lon_ex, lat_ex):
"""Returns a list of NASADEM zones covering the desired extent.
NASADEM tiles are 1 degree x 1 degree
N50 contains 50 to 50.9
E10 contains 10 to 10.9
S70 contains -69.99 to -69.0
W20 contains -19.99 to -19.0
"""
# adding small buffer for unlikely case where one lon/lat_ex == xx.0
lons = np.arange(np.floor(lon_ex[0]-1e-9), np.ceil(lon_ex[1]+1e-9))
lats = np.arange(np.floor(lat_ex[0]-1e-9), np.ceil(lat_ex[1]+1e-9))
zones = []
for lat in lats:
# north or south?
ns = 's' if lat < 0 else 'n'
for lon in lons:
# east or west?
ew = 'w' if lon < 0 else 'e'
filename = '{}{:02.0f}{}{:03.0f}'.format(ns, abs(lat), ew,
abs(lon))
zones.append(filename)
return list(sorted(set(zones)))
def mapzen_zone(lon_ex, lat_ex, dx_meter=None, zoom=None):
"""Returns a list of AWS mapzen zones covering the desired extent.
For mapzen one has to specify the level of detail (zoom) one wants. The
best way in OGGM is to specify dx_meter of the underlying map and OGGM
will decide which zoom level works best.
"""
if dx_meter is None and zoom is None:
raise InvalidParamsError('Need either zoom level or dx_meter.')
bottom, top = lat_ex
left, right = lon_ex
ybound = 85.0511
if bottom <= -ybound:
bottom = -ybound
if top <= -ybound:
top = -ybound
if bottom > ybound:
bottom = ybound
if top > ybound:
top = ybound
if right >= 180:
right = 179.999
if left >= 180:
left = 179.999
if dx_meter:
# Find out the zoom so that we are close to the desired accuracy
lat = np.max(np.abs([bottom, top]))
zoom = int(np.ceil(math.log2((math.cos(lat * math.pi / 180) *
2 * math.pi * WEB_EARTH_RADUIS) /
(WEB_N_PIX * dx_meter))))
# According to this we should just always stay above 10 (sorry)
# https://github.com/tilezen/joerd/blob/master/docs/data-sources.md
zoom = 10 if zoom < 10 else zoom
# Code from planetutils
size = 2 ** zoom
xt = lambda x: int((x + 180.0) / 360.0 * size)
yt = lambda y: int((1.0 - math.log(math.tan(math.radians(y)) +
(1 / math.cos(math.radians(y))))
/ math.pi) / 2.0 * size)
tiles = []
for x in range(xt(left), xt(right) + 1):
for y in range(yt(top), yt(bottom) + 1):
tiles.append('/'.join(map(str, [zoom, x, str(y) + '.tif'])))
return tiles
def get_demo_file(fname):
"""Returns the path to the desired OGGM-sample-file.
If Sample data is not cached it will be downloaded from
https://github.com/OGGM/oggm-sample-data
Parameters
----------
fname : str
Filename of the desired OGGM-sample-file
Returns
-------
str
Absolute path to the desired file.
"""
d = download_oggm_files()
if fname in d:
return d[fname]
else:
return None
def get_wgms_files():
"""Get the path to the default WGMS-RGI link file and the data dir.
Returns
-------
(file, dir) : paths to the files
"""
download_oggm_files()
sdir = os.path.join(cfg.CACHE_DIR,
'oggm-sample-data-%s' % SAMPLE_DATA_COMMIT,
'wgms')
datadir = os.path.join(sdir, 'mbdata')
assert os.path.exists(datadir)
outf = os.path.join(sdir, 'rgi_wgms_links_20200415.csv')
outf = pd.read_csv(outf, dtype={'RGI_REG': object})
return outf, datadir
def get_geodetic_files(geodetic_folder_path=None, geodetic_filename=None):
"""Get the path to the combined geodetic and WGMS-RGI link file and the data dir.
Returns
-------
(file, dir) : paths to the files
"""
download_oggm_files()
sdir = os.path.join(cfg.CACHE_DIR,
'oggm-sample-data-%s' % SAMPLE_DATA_COMMIT,
'wgms')
sdir = os.path.join(geodetic_folder_path)
datadir = os.path.join(sdir, 'mbdata_with_geo')
assert os.path.exists(datadir)
outf = os.path.join(sdir, geodetic_filename)
outf = pd.read_csv(outf, dtype={'RGI_REG': object})
return outf, datadir
def get_glathida_file():
"""Get the path to the default GlaThiDa-RGI link file.
Returns
-------
file : paths to the file
"""
# Roll our own
download_oggm_files()
sdir = os.path.join(cfg.CACHE_DIR,
'oggm-sample-data-%s' % SAMPLE_DATA_COMMIT,
'glathida')
outf = os.path.join(sdir, 'rgi_glathida_links.csv')
assert os.path.exists(outf)
return outf
def get_rgi_dir(version=None, reset=False):
"""Path to the RGI directory.
If the RGI files are not present, download them.
Parameters
----------
version : str
'5', '6', defaults to None (linking to the one specified in cfg.PARAMS)
reset : bool
If True, deletes the RGI directory first and downloads the data
Returns
-------
str
path to the RGI directory
"""
with get_lock():
return _get_rgi_dir_unlocked(version=version, reset=reset)
def _get_rgi_dir_unlocked(version=None, reset=False):
rgi_dir = cfg.PATHS['rgi_dir']
if version is None:
version = cfg.PARAMS['rgi_version']
if len(version) == 1:
version += '0'
# Be sure the user gave a sensible path to the RGI dir
if not rgi_dir:
raise InvalidParamsError('The RGI data directory has to be'
'specified explicitly.')
rgi_dir = os.path.abspath(os.path.expanduser(rgi_dir))
rgi_dir = os.path.join(rgi_dir, 'RGIV' + version)
mkdir(rgi_dir, reset=reset)
if version == '50':
dfile = 'http://www.glims.org/RGI/rgi50_files/rgi50.zip'
elif version == '60':
dfile = 'http://www.glims.org/RGI/rgi60_files/00_rgi60.zip'
elif version == '61':
dfile = 'https://cluster.klima.uni-bremen.de/data/rgi/rgi_61.zip'
elif version == '62':
dfile = 'https://cluster.klima.uni-bremen.de/~oggm/rgi/rgi62.zip'
test_file = os.path.join(rgi_dir,
'*_rgi*{}_manifest.txt'.format(version))
if len(glob.glob(test_file)) == 0:
# if not there download it
ofile = file_downloader(dfile, reset=reset)
# Extract root
with zipfile.ZipFile(ofile) as zf:
zf.extractall(rgi_dir)
# Extract subdirs
pattern = '*_rgi{}_*.zip'.format(version)
for root, dirs, files in os.walk(cfg.PATHS['rgi_dir']):
for filename in fnmatch.filter(files, pattern):
zfile = os.path.join(root, filename)
with zipfile.ZipFile(zfile) as zf:
ex_root = zfile.replace('.zip', '')
mkdir(ex_root)
zf.extractall(ex_root)
# delete the zipfile after success
os.remove(zfile)
if len(glob.glob(test_file)) == 0:
raise RuntimeError('Could not find a manifest file in the RGI '
'directory: ' + rgi_dir)
return rgi_dir
def get_rgi_region_file(region, version=None, reset=False):
"""Path to the RGI region file.
If the RGI files are not present, download them.
Parameters
----------
region : str
from '01' to '19'
version : str
'5', '6', defaults to None (linking to the one specified in cfg.PARAMS)
reset : bool
If True, deletes the RGI directory first and downloads the data
Returns
-------
str
path to the RGI shapefile
"""
rgi_dir = get_rgi_dir(version=version, reset=reset)
f = list(glob.glob(rgi_dir + "/*/{}_*.shp".format(region)))
assert len(f) == 1
return f[0]
def get_rgi_glacier_entities(rgi_ids, version=None):
"""Get a list of glacier outlines selected from their RGI IDs.
Will download RGI data if not present.
Parameters
----------
rgi_ids : list of str
the glaciers you want the outlines for
version : str
the rgi version
Returns
-------
geopandas.GeoDataFrame
containing the desired RGI glacier outlines
"""
regions = [s.split('-')[1].split('.')[0] for s in rgi_ids]
if version is None:
version = rgi_ids[0].split('-')[0][-2:]
selection = []
for reg in sorted(np.unique(regions)):
sh = gpd.read_file(get_rgi_region_file(reg, version=version))
selection.append(sh.loc[sh.RGIId.isin(rgi_ids)])
# Make a new dataframe of those
selection = pd.concat(selection)
selection.crs = sh.crs # for geolocalisation
if len(selection) != len(rgi_ids):
raise RuntimeError('Could not find all RGI ids')
return selection
def get_rgi_intersects_dir(version=None, reset=False):
"""Path to the RGI directory containing the intersect files.
If the files are not present, download them.
Parameters
----------
version : str
'5', '6', defaults to None (linking to the one specified in cfg.PARAMS)
reset : bool
If True, deletes the intersects before redownloading them
Returns
-------
str
path to the directory
"""
with get_lock():
return _get_rgi_intersects_dir_unlocked(version=version, reset=reset)
def _get_rgi_intersects_dir_unlocked(version=None, reset=False):
rgi_dir = cfg.PATHS['rgi_dir']
if version is None:
version = cfg.PARAMS['rgi_version']
if len(version) == 1:
version += '0'
# Be sure the user gave a sensible path to the RGI dir
if not rgi_dir:
raise InvalidParamsError('The RGI data directory has to be'
'specified explicitly.')
rgi_dir = os.path.abspath(os.path.expanduser(rgi_dir))
mkdir(rgi_dir)
dfile = 'https://cluster.klima.uni-bremen.de/data/rgi/'
dfile += 'RGI_V{}_Intersects.zip'.format(version)
if version == '62':
dfile = ('https://cluster.klima.uni-bremen.de/~oggm/rgi/'
'rgi62_Intersects.zip')
odir = os.path.join(rgi_dir, 'RGI_V' + version + '_Intersects')
if reset and os.path.exists(odir):
shutil.rmtree(odir)
# A lot of code for backwards compat (sigh...)
if version in ['50', '60']:
test_file = os.path.join(odir, 'Intersects_OGGM_Manifest.txt')
if not os.path.exists(test_file):
# if not there download it
ofile = file_downloader(dfile, reset=reset)
# Extract root
with zipfile.ZipFile(ofile) as zf:
zf.extractall(odir)
if not os.path.exists(test_file):
raise RuntimeError('Could not find a manifest file in the RGI '
'directory: ' + odir)
else:
test_file = os.path.join(odir,
'*ntersect*anifest.txt'.format(version))
if len(glob.glob(test_file)) == 0:
# if not there download it
ofile = file_downloader(dfile, reset=reset)
# Extract root
with zipfile.ZipFile(ofile) as zf:
zf.extractall(odir)
# Extract subdirs
pattern = '*_rgi{}_*.zip'.format(version)
for root, dirs, files in os.walk(cfg.PATHS['rgi_dir']):
for filename in fnmatch.filter(files, pattern):
zfile = os.path.join(root, filename)
with zipfile.ZipFile(zfile) as zf:
ex_root = zfile.replace('.zip', '')
mkdir(ex_root)
zf.extractall(ex_root)
# delete the zipfile after success
os.remove(zfile)
if len(glob.glob(test_file)) == 0:
raise RuntimeError('Could not find a manifest file in the RGI '
'directory: ' + odir)
return odir
def get_rgi_intersects_region_file(region=None, version=None, reset=False):
"""Path to the RGI regional intersect file.
If the RGI files are not present, download them.
Parameters
----------
region : str
from '00' to '19', with '00' being the global file (deprecated).
From RGI version '61' onwards, please use `get_rgi_intersects_entities`
with a list of glaciers instead of relying to the global file.
version : str
'5', '6', '61'... defaults the one specified in cfg.PARAMS
reset : bool
If True, deletes the intersect file before redownloading it
Returns
-------
str
path to the RGI intersects shapefile
"""
if version is None:
version = cfg.PARAMS['rgi_version']
if len(version) == 1:
version += '0'
rgi_dir = get_rgi_intersects_dir(version=version, reset=reset)
if region == '00':
if version in ['50', '60']:
version = 'AllRegs'
region = '*'
else:
raise InvalidParamsError("From RGI version 61 onwards, please use "
"get_rgi_intersects_entities() instead.")
f = list(glob.glob(os.path.join(rgi_dir, "*", '*intersects*' + region +
'_rgi*' + version + '*.shp')))
assert len(f) == 1
return f[0]
def get_rgi_intersects_entities(rgi_ids, version=None):
"""Get a list of glacier intersects selected from their RGI IDs.
Parameters
----------
rgi_ids: list of str
list of rgi_ids you want to look for intersections for
version: str
'5', '6', '61'... defaults the one specified in cfg.PARAMS
Returns
-------
geopandas.GeoDataFrame
with the selected intersects
"""
if version is None:
version = cfg.PARAMS['rgi_version']
if len(version) == 1:
version += '0'
regions = [s.split('-')[1].split('.')[0] for s in rgi_ids]
selection = []
for reg in sorted(np.unique(regions)):
sh = gpd.read_file(get_rgi_intersects_region_file(reg,
version=version))
selection.append(sh.loc[sh.RGIId_1.isin(rgi_ids) |
sh.RGIId_2.isin(rgi_ids)])
# Make a new dataframe of those
selection = pd.concat(selection)
selection.crs = sh.crs # for geolocalisation
return selection
def is_dem_source_available(source, lon_ex, lat_ex):
"""Checks if a DEM source is available for your purpose.
This is only a very rough check! It doesn't mean that the data really is
available, but at least it's worth a try.
Parameters
----------
source : str, required
the source you want to check for
lon_ex : tuple or int, required
a (min_lon, max_lon) tuple delimiting the requested area longitudes
lat_ex : tuple or int, required
a (min_lat, max_lat) tuple delimiting the requested area latitudes
Returns
-------
True or False
"""
from oggm.utils import tolist
lon_ex = tolist(lon_ex, length=2)
lat_ex = tolist(lat_ex, length=2)
def _in_grid(grid_json, lon, lat):
i, j = cfg.DATA['dem_grids'][grid_json].transform(lon, lat,
maskout=True)
return np.all(~ (i.mask | j.mask))
if source == 'GIMP':
return _in_grid('gimpdem_90m_v01.1.json', lon_ex, lat_ex)
elif source == 'ARCTICDEM':
return _in_grid('arcticdem_mosaic_100m_v3.0.json', lon_ex, lat_ex)
elif source == 'RAMP':
return _in_grid('AntarcticDEM_wgs84.json', lon_ex, lat_ex)
elif source == 'REMA':
return _in_grid('REMA_100m_dem.json', lon_ex, lat_ex)
elif source == 'ALASKA':
return _in_grid('Alaska_albers_V3.json', lon_ex, lat_ex)
elif source == 'TANDEM':
return True
elif source == 'AW3D30':
return np.min(lat_ex) > -60
elif source == 'MAPZEN':
return True
elif source == 'DEM3':
return True
elif source == 'ASTER':
return True
elif source == 'SRTM':
return np.max(np.abs(lat_ex)) < 60
elif source == 'COPDEM':
return True
elif source == 'NASADEM':
return (np.min(lat_ex) > -56) and (np.max(lat_ex) < 60)
elif source == 'USER':
return True
elif source is None:
return True
def default_dem_source(lon_ex, lat_ex, rgi_region=None, rgi_subregion=None):
"""Current default DEM source at a given location.
Parameters
----------
lon_ex : tuple or int, required
a (min_lon, max_lon) tuple delimiting the requested area longitudes
lat_ex : tuple or int, required
a (min_lat, max_lat) tuple delimiting the requested area latitudes
rgi_region : str, optional
the RGI region number (required for the GIMP DEM)
rgi_subregion : str, optional
the RGI subregion str (useful for RGI Reg 19)
Returns
-------
the chosen DEM source
"""
from oggm.utils import tolist
lon_ex = tolist(lon_ex, length=2)
lat_ex = tolist(lat_ex, length=2)
# GIMP is in polar stereographic, not easy to test if glacier is on the map
# It would be possible with a salem grid but this is a bit more expensive
# Instead, we are just asking RGI for the region
if rgi_region is not None and int(rgi_region) == 5:
return 'GIMP'
# ARCTIC DEM is not yet automatized
# If we have to automatise this one day, we should use the shapefile
# of the tiles, and then check for RGI region:
# use_without_check = ['03', '05', '06', '07', '09']
# to_test_on_shape = ['01', '02', '04', '08']
# Antarctica
if rgi_region is not None and int(rgi_region) == 19:
if rgi_subregion is None:
raise InvalidParamsError('Must specify subregion for Antarctica')
if rgi_subregion in ['19-01', '19-02', '19-03', '19-04', '19-05']:
# special case for some distant islands
return 'DEM3'
return 'RAMP'
# In high latitudes and an exceptional region in Eastern Russia, DEM3
# exceptional test for eastern russia:
if ((np.min(lat_ex) < -60.) or (np.max(lat_ex) > 60.) or
(np.min(lat_ex) > 59 and np.min(lon_ex) > 170)):
return 'DEM3'
# Everywhere else SRTM
return 'SRTM'
def get_topo_file(lon_ex, lat_ex, rgi_region=None, rgi_subregion=None,
dx_meter=None, zoom=None, source=None):
"""Path(s) to the DEM file(s) covering the desired extent.
If the needed files for covering the extent are not present, download them.
By default it will be referred to SRTM for [-60S; 60N], GIMP for Greenland,
RAMP for Antarctica, and a corrected DEM3 (viewfinderpanoramas.org)
elsewhere.
A user-specified data source can be given with the ``source`` keyword.
Parameters
----------
lon_ex : tuple or int, required
a (min_lon, max_lon) tuple delimiting the requested area longitudes
lat_ex : tuple or int, required
a (min_lat, max_lat) tuple delimiting the requested area latitudes
rgi_region : str, optional
the RGI region number (required for the GIMP DEM)
rgi_subregion : str, optional
the RGI subregion str (useful for RGI Reg 19)
dx_meter : float, required for source='MAPZEN'
the resolution of the glacier map (to decide the zoom level of mapzen)
zoom : int, optional
if you know the zoom already (for MAPZEN only)
source : str or list of str, optional
Name of specific DEM source. See gis.define_glacier_region for details
Returns
-------
tuple: (list with path(s) to the DEM file(s), data source str)
"""
from oggm.utils import tolist
lon_ex = tolist(lon_ex, length=2)
lat_ex = tolist(lat_ex, length=2)
if source is not None and not isinstance(source, str):
# check all user options
for s in source:
demf, source_str = get_topo_file(lon_ex, lat_ex,
rgi_region=rgi_region,
rgi_subregion=rgi_subregion,
source=s)
if demf[0]:
return demf, source_str
# Did the user specify a specific DEM file?
if 'dem_file' in cfg.PATHS and os.path.isfile(cfg.PATHS['dem_file']):
source = 'USER' if source is None else source
if source == 'USER':
return [cfg.PATHS['dem_file']], source
# Some logic to decide which source to take if unspecified
if source is None:
source = default_dem_source(lon_ex, lat_ex, rgi_region=rgi_region,
rgi_subregion=rgi_subregion)
if source not in DEM_SOURCES:
raise InvalidParamsError('`source` must be one of '
'{}'.format(DEM_SOURCES))
# OK go
files = []
if source == 'GIMP':
_file = _download_topo_file_from_cluster('gimpdem_90m_v01.1.tif')
files.append(_file)
if source == 'ARCTICDEM':
zones = arcticdem_zone(lon_ex, lat_ex)
for z in zones:
with get_lock():
url = 'https://cluster.klima.uni-bremen.de/~oggm/'
url += 'dem/ArcticDEM_100m_v3.0/'
url += '{}_100m_v3.0/{}_100m_v3.0_reg_dem.tif'.format(z, z)
files.append(file_downloader(url))
if source == 'RAMP':
_file = _download_topo_file_from_cluster('AntarcticDEM_wgs84.tif')
files.append(_file)
if source == 'ALASKA':
zones = alaska_dem_zone(lon_ex, lat_ex)
for z in zones:
with get_lock():
url = 'https://cluster.klima.uni-bremen.de/~oggm/'
url += 'dem/Alaska_albers_V3/'
url += '{}_Alaska_albers_V3/'.format(z)
url += '{}_Alaska_albers_V3.tif'.format(z)
files.append(file_downloader(url))
if source == 'REMA':
zones = rema_zone(lon_ex, lat_ex)
for z in zones:
with get_lock():
url = 'https://cluster.klima.uni-bremen.de/~oggm/'
url += 'dem/REMA_100m_v1.1/'
url += '{}_100m_v1.1/{}_100m_v1.1_reg_dem.tif'.format(z, z)
files.append(file_downloader(url))
if source == 'TANDEM':
zones = tandem_zone(lon_ex, lat_ex)
for z in zones:
files.append(_download_tandem_file(z))
if source == 'AW3D30':
zones = aw3d30_zone(lon_ex, lat_ex)
for z in zones:
files.append(_download_aw3d30_file(z))
if source == 'MAPZEN':
zones = mapzen_zone(lon_ex, lat_ex, dx_meter=dx_meter, zoom=zoom)
for z in zones:
files.append(_download_mapzen_file(z))
if source == 'ASTER':
zones = aster_zone(lon_ex, lat_ex)
for z in zones:
files.append(_download_aster_file(z))
if source == 'DEM3':
zones = dem3_viewpano_zone(lon_ex, lat_ex)
for z in zones:
files.append(_download_dem3_viewpano(z))
if source == 'SRTM':
zones = srtm_zone(lon_ex, lat_ex)
for z in zones:
files.append(_download_srtm_file(z))
if source == 'COPDEM':
filetuple = copdem_zone(lon_ex, lat_ex)
for cpp, eop in filetuple:
files.append(_download_copdem_file(cpp, eop))
if source == 'NASADEM':
zones = nasadem_zone(lon_ex, lat_ex)
for z in zones:
files.append(_download_nasadem_file(z))
# filter for None (e.g. oceans)
files = [s for s in files if s]
if files:
return files, source
else:
raise InvalidDEMError('Source: {2} no topography file available for '
'extent lat:{0}, lon:{1}!'.
format(lat_ex, lon_ex, source))
def get_cmip5_file(filename, reset=False):
"""Download a global CMIP5 file.
List of files: https://cluster.klima.uni-bremen.de/~nicolas/cmip5-ng/
Parameters
----------
filename : str
the file to download, e.g 'pr_ann_ACCESS1-3_rcp85_r1i1p1_g025.nc'
or 'tas_ann_ACCESS1-3_rcp45_r1i1p1_g025.nc'
reset : bool
force re-download of an existing file
Returns
-------
the path to the netCDF file
"""
prefix = filename.split('_')[0]
dfile = CMIP5_URL + prefix + '/' + filename
return file_downloader(dfile, reset=reset)
def get_ref_mb_glaciers_candidates(rgi_version=None):
"""Reads in the WGMS list of glaciers with available MB data.
Can be found afterwards (and extended) in cdf.DATA['RGIXX_ref_ids'].
"""
if rgi_version is None:
rgi_version = cfg.PARAMS['rgi_version']
if len(rgi_version) == 2:
# We might change this one day
rgi_version = rgi_version[:1]
key = 'RGI{}0_ref_ids'.format(rgi_version)
if key not in cfg.DATA:
flink, _ = get_wgms_files()
cfg.DATA[key] = flink['RGI{}0_ID'.format(rgi_version)].tolist()
return cfg.DATA[key]
def get_ref_mb_glaciers(gdirs):
"""Get the list of glaciers we have valid mass balance measurements for.
To be valid glaciers must have more than 5 years of measurements and
be land terminating. Therefore, the list depends on the time period of the
baseline climate data and this method selects them out of a list
of potential candidates (`gdirs` arg).
Parameters
----------
gdirs : list of :py:class:`oggm.GlacierDirectory` objects
list of glaciers to check for valid reference mass balance data
Returns
-------
ref_gdirs : list of :py:class:`oggm.GlacierDirectory` objects
list of those glaciers with valid reference mass balance data
See Also
--------
get_ref_mb_glaciers_candidates
"""
# Get the links
ref_ids = get_ref_mb_glaciers_candidates(gdirs[0].rgi_version)
# We remove tidewater glaciers and glaciers with < 5 years
ref_gdirs = []
for g in gdirs:
if g.rgi_id not in ref_ids or g.is_tidewater:
continue
try:
mbdf = g.get_ref_mb_data()
if len(mbdf) >= 5:
ref_gdirs.append(g)
except RuntimeError as e:
if 'Please process some climate data before call' in str(e):
raise
return ref_gdirs
def get_ref_mb_glaciers_candidates_geodetic(rgi_version=None, folder_path=None,filename=None):
"""Reads in the WGMS list of glaciers with available MB data.
Can be found afterwards (and extended) in cdf.DATA['RGIXX_ref_ids'].
"""
if rgi_version is None:
rgi_version = cfg.PARAMS['rgi_version']
if len(rgi_version) == 2:
# We might change this one day
rgi_version = rgi_version[:1]
key = 'RGI{}0_ref_ids'.format(rgi_version)
if key not in cfg.DATA:
flink, _ = get_geodetic_files(geodetic_folder_path=folder_path, geodetic_filename=filename)
cfg.DATA[key] = flink['RGI{}0_ID'.format(rgi_version)].tolist()
return cfg.DATA[key]
def get_ref_mb_glaciers_geodetic(gdirs,temp_geodetic_folder_path=None,temp_geodetic_filename=None):
"""Get the list of glaciers we have valid mass balance measurements for.
To be valid glaciers must have more than 5 years of measurements and
be land terminating. Therefore, the list depends on the time period of the
baseline climate data and this method selects them out of a list
of potential candidates (`gdirs` arg).
Parameters
----------
gdirs : list of :py:class:`oggm.GlacierDirectory` objects
list of glaciers to check for valid reference mass balance data
Returns
-------
ref_gdirs : list of :py:class:`oggm.GlacierDirectory`geodetic_folder_path=geo_folder_path, geodetic_filename=geo_file_name objects
list of those glaciers with valid reference mass balance data
See Also
--------
get_ref_mb_glaciers_candidates
"""
# Get the links
ref_ids = get_ref_mb_glaciers_candidates_geodetic(gdirs[0].rgi_version,folder_path=temp_geodetic_folder_path,filename=temp_geodetic_filename)
# We remove tidewater glaciers and glaciers with < 5 years
ref_gdirs = []
for g in gdirs:
if g.rgi_id not in ref_ids or g.is_tidewater:
continue
try:
mbdf = g.get_ref_mb_data_geodetic(folder_path=temp_geodetic_folder_path,filename=temp_geodetic_filename)
if len(mbdf) >= 5:
ref_gdirs.append(g)
except RuntimeError as e:
if 'Please process some climate data before call' in str(e):
raise
return ref_gdirs
|
py | 1a47e87dce986706596558832bb7e11eb542cac8 | import calendar
from processing.lifestore_tables import lifestore_products, lifestore_sales, lifestore_searches
from processing.filters_df import Filters
class Service(Filters):
"""
Clase que agrupa diversas operaciones de consulta de los datos de LifeStore, para apoyar
el análisis de información.
Esta clase es hija de la clase Filters, por lo que tiene acceso a sus métodos.
"""
# Métodos generales
def count_sales(self, id_product: int or None = None, start_date: str or None = None,
end_date: str or None = None, score_min: int or None = None,
score_max: int or None = None, refund_status: bool or None = None) -> int:
"""
Consulta la tabla de ventas con una serie de filtros de producto y/o tiempo, extrayendo
solo las filas de interés. Mediante un conteo de las ventas que cumplen con las condiciones
del caso, se determina el número de ventas.
Args:
id_product (int or None, optional): Id de producto. Defaults to None.
start_date (str or None, optional): Fecha de inicio de periodo de ventas considerado. Defaults to None.
end_date (str or None, optional): Fecha de inicio de periodo de ventas considerado. Defaults to None.
score_min (int or None, optional): Calificación mínima de venta. Defaults to None.
score_max (int or None, optional): Calificación máxima de venta. Defaults to None.
refund_status (bool or None, optional): Ventas devueltas (True) o no devueltas (False). Defaults to None.
Returns:
int: Número de ventas que tuvo el caso solicitado.
"""
# Obtener subtabla de tabla de ventas con filas que cumplan con los filtros indicados
sales_df = self._Filters__filter_sales_df(id_product = id_product, start_date = start_date,
end_date = end_date, refund = refund_status,
score_min = score_min, score_max = score_max)
# Contar ventas mediante el número de filas de la tabla filtrada
sales_number = len(sales_df)
return sales_number
def calculate_income(self, id_product: int or None = None, start_date: str or None = None,
end_date: str or None = None, score_min: int or None = None,
score_max: int or None = None, refund_status: bool or None = None) -> int:
"""
Obtiene el total de ingresos para un producto especifico o todos los productos
que cumplan con los filtros en las entradas.
Args:
id_product (int or None, optional): Id de producto. Defaults to None.
start_date (str or None, optional): Fecha de inicio de periodo de ventas considerado. Defaults to None.
end_date (str or None, optional): Fecha de inicio de periodo de ventas considerado. Defaults to None.
score_min (int or None, optional): Calificación mínima de venta. Defaults to None.
score_max (int or None, optional): Calificación máxima de venta. Defaults to None.
refund_status (bool or None, optional): Ventas devueltas (True) o no devueltas (False). Defaults to None.
Returns:
int: Total de ingresos para caso solicitado.
"""
income = 0
# Si se incluyo un id_product en la solicitud, filtra la tabla generada ahora por producto
if id_product is not None:
product_sales = self.count_sales(id_product, start_date, end_date, score_min,
score_max, refund_status)
income = product_sales * lifestore_products["price"][lifestore_products["id_product"] == id_product].item()
# Si no se indico id_product, se itera cada id, y se suman los ingresos de cada producto diferente
else:
for row in lifestore_products.iterrows():
# Se obtiene id
id_product = row[1]['id_product']
# Se cuentan ventas del producto
product_sales = self.count_sales(id_product, start_date, end_date, score_min,
score_max, refund_status)
# Se suman los ingresos del producto a los ingresos totales
income += product_sales * lifestore_products["price"][lifestore_products["id_product"] == id_product].item()
return income
def count_searches(self, id_product: int or None):
"""
Consulta la tabla de búsquedas con un filtro de producto y cuenta la cantidad
de búsquedas que cumplan con las condiciones indicadas.
Args:
id_product (int or None, optional): Id de producto. Defaults to None.
Returns:
int: número de búsquedas que tuvo el caso solicitado.
"""
# Obtener tabla de búsquedas filtrada. Si no hay filtro, se obtiene completa
searches_df = self._Filters__filter_searches_df(id_product = id_product)
# Contar búsquedas
searches_number = len(searches_df)
return searches_number
# Métodos relacionados a tiempo
def get_year_sales(self, year:int, id_product: int or None = None, refund_status: bool or None = None) -> int:
"""
Obtiene el número de ventas anuales. Cuenta con algunos filtros opcionales que permiten
considerar únicamente un producto o descartar las ventas que terminaron en devolución.
Args:
year (int): Año seleccionado.
id_product (int or None, optional): Id de producto. Defaults to None.
refund_status (bool or None, optional): Filtro de devoluciones. Defaults to None.
Returns:
int: Número de ventas anuales.
"""
# Definir fecha de inicio del año y fecha de fin de año para filtros
year_start= f"{year}-01-01"
year_end = f"{year}-12-31"
# Usar función get ventas para obtener datos de ventas anuales con los filtros
sales_number = self.count_sales(start_date=year_start, end_date=year_end,
id_product=id_product, refund_status=refund_status)
return sales_number
def get_year_income(self, year:int, id_product: int or None = None, refund_status: bool or None = None) -> int:
"""
Obtiene los ingresos anuales. Cuenta con algunos filtros opcionales que permiten
considerar únicamente un producto o descartar las ventas que terminaron en devolución.
Args:
year (int): Año seleccionado.
id_product (int or None, optional): Id de producto. Defaults to None.
refund_status (bool or None, optional): Filtro de devoluciones. Defaults to None.
Returns:
int: Ingresos anuales.
"""
# Definir fecha de inicio del año y fecha de fin de año para filtros
year_start= f"{year}-01-01"
year_end = f"{year}-12-31"
# Usar función get ventas para obtener datos de ingresos anuales con los filtros
income = self.calculate_income(start_date=year_start, end_date=year_end,
id_product=id_product, refund_status=refund_status)
return income
def get_monthly_sales(self, year: int, id_product: int or None = None, refund_status: bool or None = None) -> dict:
"""
Obtiene el número de ventas de cada mes. Cuenta con algunos filtros opcionales que
permiten considerar únicamente un producto o descartar las ventas que terminaron en devolución.
Las ventas de cada mes se organizan en un diccionario.
Args:
year (int): Año seleccionado.
id_product (int or None, optional): Id de producto. Defaults to None.
refund_status (bool or None, optional): Filtro de devoluciones. Defaults to None.
Returns:
dict: Número de ventas de cada mes.
"""
# Inicializa variables
sales_dict = {1:0, 2:0, 3:0, 4:0, 5:0, 6:0, 7:0, 8:0, 9:0, 10:0, 11:0, 12:0} # month: sales_number_of_month
# Ciclo for para obtener el número de ventas de cada mes
for month in range(1,13):
# Obtener cantidad de dias del mes para saber ultimo día
month_days = calendar.monthrange(year, month)[1]
# Definir fecha de inicio del año y fecha de fin de año para filtros
month_start = f"{year}-{month:02d}-01"
month_end = f"{year}-{month:02d}-{month_days:02d}"
# Obtener ventas de ese mes
sales_dict[month] = self.count_sales(start_date=month_start, end_date=month_end,
id_product=id_product, refund_status=refund_status)
return sales_dict
def get_monthly_income(self, year: int, id_product: int or None = None, refund_status: bool or None = None) -> dict:
"""
Obtiene los ingresos mensuales. Cuenta con algunos filtros opcionales que permiten
considerar únicamente un producto o descartar las ventas que terminaron en devolución.
Las ventas de cada mes se organizan en un diccionario.
Args:
year (int): Año seleccionado.
id_product (int or None, optional): Id de producto. Defaults to None.
refund_status (bool or None, optional): Filtro de devoluciones. Defaults to None.
Returns:
dict: Ingresos de cada mes.
"""
# Inicializa variables
income_dict = {1:0, 2:0, 3:0, 4:0, 5:0, 6:0, 7:0, 8:0, 9:0, 10:0, 11:0, 12:0} # month: income_of_month
# Ciclo for para obtener los ingresos de cada mes
for month in range(1,13):
# Obtener cantidad de dias del mes para saber ultimo día
month_days = calendar.monthrange(year, month)[1]
# Definir fecha de inicio del año y fecha de fin de año para filtros
month_start = f"{year}-{month:02d}-01"
month_end = f"{year}-{month:02d}-{month_days:02d}"
# Obtener ventas de ese mes
income_dict[month] = self.calculate_income(start_date=month_start, end_date=month_end,
id_product=id_product, refund_status=refund_status)
return income_dict
def get_products_sales(self, refund_status: bool or None = None,
start_date: str or None = None, end_date: str or None = None) -> dict:
"""
Obtiene el número de ventas de cada producto de la tienda.
Las ventas pueden filtrarse por fechas.
Las ventas que terminaron en devolución pueden considerarse u omitirse.
Args:
refund_status (bool or None, optional): Ventas devueltas (True) o no devueltas (False). Defaults to None.
start_date (str or None, optional): Fecha de inicio de periodo de ventas considerado. Defaults to None.
end_date (str or None, optional): Fecha de inicio de periodo de ventas considerado. Defaults to None.
Returns:
dict: Ventas por producto. El key de cada elemento es el id de producto,
mientras que el valor corresponde al número de ventas.
"""
# Inicializar variables
products_sales = {}
# Ciclo for para revisar cada producto diferente de la tabla productos
for row in lifestore_products.iterrows():
# Se obtiene id
id_product = row[1]['id_product']
# Se cuentan ventas del producto
sales_number = self.count_sales(id_product=id_product, refund_status=refund_status,
start_date=start_date, end_date=end_date)
# Se almacena resultado en diccionario
products_sales[id_product] = sales_number
return products_sales
def get_products_searches(self) -> dict:
"""
Obtiene el número de búsquedas de cada producto de la tienda.
Las búsquedas pueden filtrarse por fechas.
bLas búsquedas que terminaron en devolución pueden considerarse u omitirse.
Returns:
dict: Búsquedas por producto. El key de cada elemento es el id de producto,
mientras que el valor corresponde al número de búsquedas.
"""
# Inicializar variables
products_searches = {}
# Ciclo for para revisar cada producto diferente de la tabla productos
for row in lifestore_products.iterrows():
# Se obtiene id
id_product = row[1]['id_product']
# Se cuentan búsquedas del producto
searches = self.count_searches(id_product=id_product)
# Se almacena resultado en diccionario
products_searches[id_product] = searches
return products_searches
def get_product_name(self, id_product: int) -> str:
""" Obtiene el nombre del producto de la tabla lifestore_products.
Args:
id_product (int): ID del producto.
Returns:
str: Nombre del producto.
"""
name = lifestore_products.loc[lifestore_products['id_product']== id_product, 'name'].item()
return name
def get_product_stock(self, id_product: int) -> int:
"""
Obtiene la cantidad de unidades de un producto en inventario,
a partir de la tabla lifestore_products.
Args:
id_product (int): ID del producto.
Returns:
int: Unidades del producto en inventario.
"""
stock = lifestore_products.loc[lifestore_products['id_product']== id_product, 'stock'].item()
return stock
def get_product_grades(self, reviews_weight:float = 0.6, refunds_weight:float = 0.4,
start_date: str or None = None, end_date: str or None = None) -> dict:
"""
Calcula la valoración que tiene los productos vendidos considerando las calificaciones
de los clientes por cada venta y la cantidad de devoluciones que tiene el producto.
Para usar ambos factores se asigna cierto peso a cada factor. Estos factores
cumplen con la siguiente expresión:
factor_calificaciones + factor_devoluciones = 1
Args:
reviews_weight (float, optional): Factor de calificaciones. Defaults to 0.6.
refunds_weight (float, optional): Factor de devoluciones. Defaults to 0.4.
start_date (str or None, optional): Fecha de inicio de periodo de ventas considerado. Defaults to None.
end_date (str or None, optional): Fecha de inicio de periodo de ventas considerado. Defaults to None.
Returns:
dict: Valoración de cada producto.
"""
product_grades = {}
# Ciclo for para revisar cada producto diferente de la tabla productos
for row in lifestore_products.iterrows():
# Se obtiene id
id_product = row[1]['id_product']
# Para ese id, se obtiene tabla de ventas de producto
sales_df = self._Filters__filter_sales_df(id_product = id_product, start_date = start_date,
end_date = end_date)
# Determinar total de ventas
total_sales = len(sales_df)
# Si las ventas son mayores a cero, revisa calificaciones del producto, sino calificación N.D.
if total_sales > 0:
# Caso hay ventas
# Obtener puntaje promedio de revisiones de clientes
reviews_mean = sales_df["score"].mean()
# Se normaliza puntaje de revisiones, valor entre 0 y 1
reviews_normalized = (reviews_mean-1)/(5-1)
# Contar devoluciones
total_refunds = len(sales_df[sales_df["refund"] > 0])
# Obtener relación entre productos no devueltos y ventas del producto
refunds_pct = 1 - total_refunds/total_sales
# Se calcula calificación dandole pesos a las revisiones y a la cantidad de productos no devueltos
product_grades[id_product] = round((reviews_weight*reviews_normalized + refunds_weight*refunds_pct)*100, 2)
else:
# Caso no hubo ventas
product_grades[id_product] = 'N.D.'
# Obtener subtabla de tabla de ventas con filas que cumplan con los filtros indicados
return product_grades
def count_category_products(self) -> dict:
"""
Cuenta productos existentes en cada categoria.
Returns:
dict: Diccionario con conteo de productos de cada categoria. (category: amount_of_products)
"""
# Inicializar variables
products_per_category = {}
# Se identifican las diferentes cateogrias de la tabla de producto
categories = lifestore_products.category.unique().tolist()
for category in categories:
# Filtra tabla de productos por la categoria
product_df = self._Filters__filter_products_df(category = category)
# Cuenta elementos en tabla de productos de categoria
products_per_category[category] = len(product_df)
return products_per_category
def get_category_sales(self, id_product: int or None = None, refund_status: bool or None = None,
start_date: str or None = None, end_date: str or None = None) -> dict:
""" Clasifica en las diferentes cateogrías de producto las ventas de la empresa.
Args:
id_product (int or None, optional): Id de producto. Defaults to None.
refund_status (bool or None, optional): Ventas devueltas (True) o no devueltas (False). Defaults to None.
start_date (str or None, optional): Fecha de inicio de periodo de ventas considerado. Defaults to None.
end_date (str or None, optional): Fecha de inicio de periodo de ventas considerado. Defaults to None.
Returns:
dict: Total de ventas por categoría de producto.
"""
# Inicializar variables
sales_per_category = {}
# Se obtienen las ventas de cada producto
product_sales = self.get_products_sales(refund_status=refund_status, start_date=start_date, end_date=end_date)
# Se identifican las diferentes cateogrias de la tabla de producto
categories = lifestore_products.category.unique().tolist()
for category in categories:
sales_number = 0
# Filtra tabla de productos por la categoria
product_df = self._Filters__filter_products_df(category = category)
# Se suman las ventas de cada producto en la tabla filtrada
for row in product_df.iterrows():
# Se obtiene id
id_product = row[1]['id_product']
# Si suman las ventas del producto a ventas de cateogira
sales_number += product_sales[id_product]
# Cuenta elementos en tabla de productos de categoria
sales_per_category[category] = sales_number
return sales_per_category
def get_category_income(self, id_product: int or None = None, refund_status: bool or None = None,
start_date: str or None = None, end_date: str or None = None) -> dict:
""" Clasifica en las diferentes cateogrías de producto los ingresos de la empresa.
Args:
id_product (int or None, optional): Id de producto. Defaults to None.
refund_status (bool or None, optional): Ventas devueltas (True) o no devueltas (False). Defaults to None.
start_date (str or None, optional): Fecha de inicio de periodo de ventas considerado. Defaults to None.
end_date (str or None, optional): Fecha de inicio de periodo de ventas considerado. Defaults to None.
Returns:
dict: Total de ingresos por categoría de producto.
"""
income_per_category = {}
# Se identifican las diferentes cateogrias de la tabla de producto
categories = lifestore_products.category.unique().tolist()
for category in categories:
income = 0
# Filtra tabla de productos por la categoria
product_df = self._Filters__filter_products_df(category = category)
# Se suman las ventas de cada producto en la tabla filtrada
for row in product_df.iterrows():
# Se obtiene id
id_product = row[1]['id_product']
# Se suman los ingresos del producto a las ventas
income += self.calculate_income(id_product= id_product, refund_status=refund_status,
start_date=start_date, end_date=end_date)
# Cuenta elementos en tabla de productos de categoria
income_per_category[category] = income
return income_per_category
|
py | 1a47e8c1ff04c444ddb2dba2004f1dac854b423b | """
post to api data from sanitized_reference_json/
python post_reference_to_api.py
update okta_token only
python post_reference_to_api.py -a
keys that exist in data
2021-05-25 21:16:53,372 - literature logger - INFO - key abstract
2021-05-25 21:16:53,372 - literature logger - INFO - key citation
2021-05-25 21:16:53,372 - literature logger - INFO - key datePublished
2021-05-25 21:16:53,373 - literature logger - INFO - key dateArrivedInPubmed
2021-05-25 21:16:53,373 - literature logger - INFO - key dateLastModified
2021-05-25 21:16:53,373 - literature logger - INFO - key keywords
2021-05-25 21:16:53,373 - literature logger - INFO - key crossReferences
2021-05-25 21:16:53,373 - literature logger - INFO - key title
2021-05-25 21:16:53,373 - literature logger - INFO - key tags
2021-05-25 21:16:53,373 - literature logger - INFO - key issueName
2021-05-25 21:16:53,373 - literature logger - INFO - key issueDate
2021-05-25 21:16:53,373 - literature logger - INFO - key MODReferenceType
2021-05-25 21:16:53,373 - literature logger - INFO - key pubMedType
2021-05-25 21:16:53,373 - literature logger - INFO - key meshTerms
2021-05-25 21:16:53,373 - literature logger - INFO - key allianceCategory
2021-05-25 21:16:53,373 - literature logger - INFO - key volume
2021-05-25 21:16:53,373 - literature logger - INFO - key authors
2021-05-25 21:16:53,373 - literature logger - INFO - key pages
2021-05-25 21:16:53,373 - literature logger - INFO - key publisher
2021-05-25 21:16:53,373 - literature logger - INFO - key resource
2021-05-25 21:16:53,373 - literature logger - INFO - key language
2021-05-25 21:16:53,373 - literature logger - INFO - key modResources
2021-05-25 21:16:53,373 - literature logger - INFO - key MODReferenceTypes
2021-05-25 21:16:53,373 - literature logger - INFO - key resourceAbbreviation
"""
# import requests
import argparse
import json
import logging
import logging.config
import re
from os import environ, listdir, path
from helper_file_processing import (
generate_cross_references_file,
load_ref_xref,
split_identifier,
)
from helper_post_to_api import (
generate_headers,
get_authentication_token,
process_api_request,
update_token,
)
log_file_path = path.join(path.dirname(path.abspath(__file__)), "../logging.conf")
logging.config.fileConfig(log_file_path)
logger = logging.getLogger("literature logger")
parser = argparse.ArgumentParser()
parser.add_argument("-a", "--authorization", action="store_true", help="update authorization token")
parser.add_argument("-f", "--file", action="store", help="take input from input file in full path")
parser.add_argument("-c", "--commandline", nargs="*", action="store", help="placeholder for process_single_pmid.py")
args = vars(parser.parse_args())
def camel_to_snake(name):
"""
:param name:
:return:
"""
return re.sub("([a-z0-9])([A-Z])", r"\1_\2", name).lower()
def post_references(input_file, check_file_flag): # noqa: C901
"""
:param input_file:
:param check_file_flag:
:return:
"""
api_port = environ.get("API_PORT")
# base_path = '/home/azurebrd/git/agr_literature_service_demo/src/xml_processing/'
base_path = environ.get("XML_PATH")
files_to_process = []
if input_file == "sanitized":
json_storage_path = base_path + "sanitized_reference_json/"
dir_list = listdir(json_storage_path)
for filename in dir_list:
# logger.info(filename)
if "REFERENCE_" in filename and ".REFERENCE_" not in filename:
# logger.info(filename)
files_to_process.append(json_storage_path + filename)
else:
files_to_process.append(input_file)
keys_to_remove = {"nlm", "primaryId", "modResources", "resourceAbbreviation"}
remap_keys = {"datePublished": "date_published", "dateArrivedInPubmed": "date_arrived_in_pubmed",
"dateLastModified": "date_last_modified", "crossReferences": "cross_references",
"issueName": "issue_name", "issueDate": "issue_date", "pubMedType": "pubmed_type",
"meshTerms": "mesh_terms", "allianceCategory": "category",
"MODReferenceType": "mod_reference_types", "MODReferenceTypes": "mod_reference_types",
"plainLanguageAbstract": "plain_language_abstract",
"pubmedAbstractLanguages": "pubmed_abstract_languages", "publicationStatus": "pubmed_publication_status"}
subkeys_to_remove = {}
remap_subkeys = {}
subkeys_to_remove["mesh_terms"] = {"referenceId"}
subkeys_to_remove["tags"] = {"referenceId"}
subkeys_to_remove["authors"] = {"referenceId", "firstinit", "firstInit", "crossReferences", "collectivename"}
remap_subkeys["mesh_terms"] = {}
remap_subkeys["mesh_terms"]["meshHeadingTerm"] = "heading_term"
remap_subkeys["mesh_terms"]["meshQualfierTerm"] = "qualifier_term"
remap_subkeys["mesh_terms"]["meshQualifierTerm"] = "qualifier_term"
remap_subkeys["mod_reference_types"] = {}
remap_subkeys["mod_reference_types"]["referenceType"] = "reference_type"
remap_subkeys["tags"] = {}
remap_subkeys["tags"]["tagName"] = "tag_name"
remap_subkeys["tags"]["tagSource"] = "tag_source"
remap_subkeys["cross_references"] = {}
remap_subkeys["cross_references"]["id"] = "curie"
remap_subkeys["authors"] = {}
remap_subkeys["authors"]["authorRank"] = "order"
remap_subkeys["authors"]["firstName"] = "first_name"
remap_subkeys["authors"]["lastName"] = "last_name"
remap_subkeys["authors"]["middleNames"] = "middle_names"
remap_subkeys["authors"]["firstname"] = "first_name"
remap_subkeys["authors"]["lastname"] = "last_name"
remap_subkeys["authors"]["middlenames"] = "middle_names"
remap_subkeys["authors"]["correspondingAuthor"] = "corresponding_author"
remap_subkeys["authors"]["firstAuthor"] = "first_author"
keys_found = set([])
# token = ''
# okta_file = base_path + 'okta_token'
# if path.isfile(okta_file):
# with open(okta_file, 'r') as okta_fh:
# token = okta_fh.read().replace('\n', '')
# okta_fh.close
# else:
# token = update_token()
token = get_authentication_token()
headers = generate_headers(token)
api_server = environ.get("API_SERVER", "localhost")
url = "http://" + api_server + ":" + api_port + "/reference/"
reference_primary_id_to_curie_file = base_path + "reference_primary_id_to_curie"
errors_in_posting_reference_file = base_path + "errors_in_posting_reference"
# previously loading from reference_primary_id_to_curie from past run of this script
# already_processed_primary_id = set()
# if check_file_flag == 'yes_file_check':
# if path.isfile(reference_primary_id_to_curie_file):
# with open(reference_primary_id_to_curie_file, 'r') as read_fh:
# for line in read_fh:
# line_data = line.split("\t")
# if line_data[0]:
# already_processed_primary_id.add(line_data[0].rstrip())
# read_fh.close
# this updates from resources in the database, and takes 4 seconds. if updating this script, comment it out after running it once
generate_cross_references_file("resource")
# this updates from references in the database, and takes 88 seconds. if updating this script, comment it out after running it once
generate_cross_references_file("reference")
xref_ref, ref_xref_valid, ref_xref_obsolete = load_ref_xref("resource")
resource_to_curie = {}
for prefix in xref_ref:
for identifier in xref_ref[prefix]:
xref_curie = prefix + ":" + identifier
resource_to_curie[xref_curie] = xref_ref[prefix][identifier]
# previously loading from resource_primary_id_to_curie from past run of post_resource_to_api
# resource_primary_id_to_curie_file = base_path + 'resource_primary_id_to_curie'
# if path.isfile(resource_primary_id_to_curie_file):
# with open(resource_primary_id_to_curie_file, 'r') as read_fh:
# for line in read_fh:
# line_data = line.rstrip().split("\t")
# if line_data[0]:
# resource_to_curie[line_data[0]] = line_data[1]
# read_fh.close
xref_ref, ref_xref_valid, ref_xref_obsolete = load_ref_xref("reference")
process_results = []
with open(reference_primary_id_to_curie_file, "a") as mapping_fh, open(errors_in_posting_reference_file, "a") as error_fh:
for filepath in sorted(files_to_process):
# only test one file for run
# if filepath != json_storage_path + 'REFERENCE_PUBMED_WB_1.json':
# continue
# logger.info("opening file\t%s", filepath)
f = open(filepath)
reference_data = json.load(f)
# counter = 0
for entry in reference_data:
# only take a couple of sample from each file for testing
# counter += 1
# if counter > 2:
# break
# output what we get from the file before converting for the API
# json_object = json.dumps(entry, indent=4)
# print(json_object)
primary_id = entry["primaryId"]
prefix, identifier, separator = split_identifier(primary_id)
if prefix in xref_ref:
if identifier in xref_ref[prefix]:
logger.info("%s\talready in", primary_id)
continue
# previously loading from reference_primary_id_to_curie from past run of this script
# if primary_id in already_processed_primary_id:
# continue
# if primary_id != 'PMID:9643811':
# continue
new_entry = {}
for key in entry:
keys_found.add(key)
# logger.info("key found\t%s\t%s", key, entry[key])
if key in remap_keys:
# logger.info("remap\t%s\t%s", key, remap_keys[key])
# this renames a key, but it can be accessed again in the for key loop, so sometimes a key is
# visited twice while another is skipped, so have to create a new dict to populate instead
# entry[remap_keys[key]] = entry.pop(key)
new_entry[remap_keys[key]] = entry[key]
elif key not in keys_to_remove:
new_entry[key] = entry[key]
for key in remap_subkeys:
if key in new_entry:
# logger.info("%s\t%s\t%s", primary_id, key, new_entry[key])
new_list = []
for sub_element in new_entry[key]:
new_sub_element = {}
for subkey in sub_element:
if subkey in remap_subkeys[key]:
new_sub_element[remap_subkeys[key][subkey]] = sub_element[subkey]
# logger.info('remap subkey\t%s\t%s', subkey, remap_subkeys[key][subkey])
elif key not in subkeys_to_remove or subkey not in subkeys_to_remove[key]:
new_sub_element[subkey] = sub_element[subkey]
new_list.append(new_sub_element)
new_entry[key] = new_list
# can only enter agr resource curie, if resource does not map to one, enter nothing
if "resource" in new_entry:
if new_entry["resource"] in resource_to_curie:
new_entry["resource"] = resource_to_curie[new_entry["resource"]]
else:
del new_entry["resource"]
if "category" in new_entry:
new_entry["category"] = (new_entry["category"].lower().replace(" ", "_"))
if "tags" in new_entry:
for sub_element in new_entry["tags"]:
if "tag_name" in sub_element:
sub_element["tag_name"] = camel_to_snake(sub_element["tag_name"])
if "authors" in new_entry:
for author in new_entry["authors"]:
if "orcid" in author:
# orcid field in json has just the identifier, need to add the prefix
if 'ORCID:' not in author['orcid']:
author['orcid'] = 'ORCID:' + author['orcid']
if 'cross_references' in new_entry:
new_entry['cross_references'] = list(filter(lambda x: "curie" in x and "NLM:" not in x['curie'] and "ISSN:" not in x["curie"], new_entry["cross_references"]))
# output what is sent to API after converting file data
# json_object = json.dumps(new_entry, indent=4)
# print(json_object)
# get rid of this if process_api_request works on a full run
# process_post_tuple = process_post(url, headers, new_entry, primary_id, mapping_fh, error_fh)
# headers = process_post_tuple[0]
# process_text = process_post_tuple[1]
# process_status_code = process_post_tuple[2]
# process_result = {}
# process_result['text'] = process_text
# process_result['status_code'] = process_status_code
# process_results.append(process_result)
api_response_tuple = process_api_request("POST", url, headers, new_entry, primary_id, None, None)
headers = api_response_tuple[0]
response_text = api_response_tuple[1]
response_status_code = api_response_tuple[2]
log_info = api_response_tuple[3]
response_dict = json.loads(response_text)
if log_info:
logger.info(log_info)
if response_status_code == 201:
response_dict = response_dict.replace('"', "")
logger.info("%s\t%s", primary_id, response_dict)
mapping_fh.write("%s\t%s\n" % (primary_id, response_dict))
else:
logger.info("api error %s primaryId %s message %s", str(response_status_code), primary_id, response_dict['detail'])
error_fh.write("api error %s primaryId %s message %s\n" % (str(response_status_code), primary_id, response_dict['detail']))
# if wanting to output keys in data for figuring out mapping
# for key in keys_found:
# logger.info("key %s", key)
mapping_fh.close
error_fh.close
return process_results
# get rid of this if process_api_request works on a full run
# def process_post(url, headers, new_entry, primary_id, mapping_fh, error_fh):
# """
#
# output the json getting posted to the API
# json_object = json.dumps(new_entry, indent = 4)
# print(json_object)
#
# :param url:
# :param headers:
# :param new_entry:
# :param primary_id:
# :param mapping_fh:
# :param error_fh:
# :return:
# """
#
# post_return = requests.post(url, headers=headers, json=new_entry)
# process_text = str(post_return.text)
# process_status_code = str(post_return.status_code)
# logger.info(primary_id + ' text ' + process_text)
# logger.info(primary_id + ' status_code ' + process_status_code)
#
# response_dict = {}
# try:
# response_dict = json.loads(post_return.text)
# except ValueError:
# logger.info("%s\tValueError", primary_id)
# error_fh.write("ERROR %s primaryId did not convert to json\n" % (primary_id))
# return headers, process_text, process_status_code
#
# if (post_return.status_code == 201):
# response_dict = response_dict.replace('"', '')
# logger.info("%s\t%s", primary_id, response_dict)
# mapping_fh.write("%s\t%s\n" % (primary_id, response_dict))
# elif (post_return.status_code == 401):
# logger.info("%s\texpired token", primary_id)
# mapping_fh.write("%s\t%s\n" % (primary_id, response_dict))
# token = update_token()
# headers = generate_headers(token)
# process_post_tuple = process_post(url, headers, new_entry, primary_id, mapping_fh, error_fh)
# headers = process_post_tuple[0]
# process_text = process_post_tuple[1]
# process_status_code = process_post_tuple[2]
# elif (post_return.status_code == 500):
# logger.info("%s\tFAILURE", primary_id)
# mapping_fh.write("%s\t%s\n" % (primary_id, response_dict))
# # if redoing a run and want to skip errors of data having already gone in
# # elif (post_return.status_code == 409):
# # continue
# else:
# logger.info("ERROR %s primaryId %s message %s", post_return.status_code, primary_id, response_dict['detail'])
# error_fh.write("ERROR %s primaryId %s message %s\n" % (post_return.status_code, primary_id, response_dict['detail']))
# return headers, process_text, process_status_code
if __name__ == "__main__":
"""
call main start function
"""
logger.info("Starting post_reference_to_api.py")
if args["authorization"]:
update_token()
elif args["commandline"]:
logger.info("placeholder for process_single_pmid.py")
elif args["file"]:
logger.info("placeholder for parse_pubmed_json_reference.py")
else:
post_references("sanitized", "yes_file_check")
logger.info("ending post_reference_to_api.py")
# pipenv run python post_reference_to_api.py
|
py | 1a47e97a38bcd35327c5d36bf7714784e8e6bc41 | # Generated by Django 2.2 on 2019-05-04 05:57
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('questions', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='answers',
name='answers_text',
),
migrations.AddField(
model_name='answers',
name='user',
field=models.ForeignKey(default=-1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
preserve_default=False,
),
migrations.AddField(
model_name='question',
name='number',
field=models.IntegerField(default=0),
),
]
|
py | 1a47e99595eaa348a07bfbcbbe6cd199a67d6e8a | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.13.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1ReplicaSetCondition(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'last_transition_time': 'datetime',
'message': 'str',
'reason': 'str',
'status': 'str',
'type': 'str'
}
attribute_map = {
'last_transition_time': 'lastTransitionTime',
'message': 'message',
'reason': 'reason',
'status': 'status',
'type': 'type'
}
def __init__(self, last_transition_time=None, message=None, reason=None, status=None, type=None):
"""
V1ReplicaSetCondition - a model defined in Swagger
"""
self._last_transition_time = None
self._message = None
self._reason = None
self._status = None
self._type = None
self.discriminator = None
if last_transition_time is not None:
self.last_transition_time = last_transition_time
if message is not None:
self.message = message
if reason is not None:
self.reason = reason
self.status = status
self.type = type
@property
def last_transition_time(self):
"""
Gets the last_transition_time of this V1ReplicaSetCondition.
The last time the condition transitioned from one status to another.
:return: The last_transition_time of this V1ReplicaSetCondition.
:rtype: datetime
"""
return self._last_transition_time
@last_transition_time.setter
def last_transition_time(self, last_transition_time):
"""
Sets the last_transition_time of this V1ReplicaSetCondition.
The last time the condition transitioned from one status to another.
:param last_transition_time: The last_transition_time of this V1ReplicaSetCondition.
:type: datetime
"""
self._last_transition_time = last_transition_time
@property
def message(self):
"""
Gets the message of this V1ReplicaSetCondition.
A human readable message indicating details about the transition.
:return: The message of this V1ReplicaSetCondition.
:rtype: str
"""
return self._message
@message.setter
def message(self, message):
"""
Sets the message of this V1ReplicaSetCondition.
A human readable message indicating details about the transition.
:param message: The message of this V1ReplicaSetCondition.
:type: str
"""
self._message = message
@property
def reason(self):
"""
Gets the reason of this V1ReplicaSetCondition.
The reason for the condition's last transition.
:return: The reason of this V1ReplicaSetCondition.
:rtype: str
"""
return self._reason
@reason.setter
def reason(self, reason):
"""
Sets the reason of this V1ReplicaSetCondition.
The reason for the condition's last transition.
:param reason: The reason of this V1ReplicaSetCondition.
:type: str
"""
self._reason = reason
@property
def status(self):
"""
Gets the status of this V1ReplicaSetCondition.
Status of the condition, one of True, False, Unknown.
:return: The status of this V1ReplicaSetCondition.
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""
Sets the status of this V1ReplicaSetCondition.
Status of the condition, one of True, False, Unknown.
:param status: The status of this V1ReplicaSetCondition.
:type: str
"""
if status is None:
raise ValueError("Invalid value for `status`, must not be `None`")
self._status = status
@property
def type(self):
"""
Gets the type of this V1ReplicaSetCondition.
Type of replica set condition.
:return: The type of this V1ReplicaSetCondition.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this V1ReplicaSetCondition.
Type of replica set condition.
:param type: The type of this V1ReplicaSetCondition.
:type: str
"""
if type is None:
raise ValueError("Invalid value for `type`, must not be `None`")
self._type = type
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1ReplicaSetCondition):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
py | 1a47e9c0513cff6a6b7125a4c8cde591cc70f48f | # -*- coding: utf-8 -*-
# ____ __ __ ___ _ _ _
# |_ /___ / _|/ _|/ __| (_)___ _ _| |_
# / // -_) _| _| (__| | / -_) ' \ _|
# /___\___|_| |_| \___|_|_\___|_||_\__|
#
"""Zeff Cloud training status."""
__author__ = """Lance Finn Helsten <[email protected]>"""
__copyright__ = """Copyright © 2019, Ziff, Inc. — All Rights Reserved"""
__license__ = """
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import enum
import datetime
import logging
import json
class TrainingStatus(enum.Enum):
"""Model training status."""
unknown = "UNKNOWN"
queued = "QUEUED"
started = "STARTED"
progress = "PCT_COMPLETE"
complete = "COMPLETE"
def __str__(self):
"""Return a user appropriate name of this status."""
return self.name
def __repr__(self):
"""Return a representation of this status."""
return "<%s.%s>" % (self.__class__.__name__, self.name)
class TrainingSessionInfo:
"""Information about the current training session."""
def __init__(self, status_json):
"""Create a new training information.
:param status_json: The status JSON returned from a train
status request.
"""
self.__data = status_json
logging.debug("Training Session JSON: \n%s", self.__data_str())
def __data_str(self):
"""Return the data as a JSON formatted string."""
return json.dumps(self.__data, indent="\t", sort_keys=True)
@property
def status(self) -> TrainingStatus:
"""Return state of current training session."""
value = self.__data["status"]
return TrainingStatus(value if value is not None else "UNKNOWN")
@property
def progress(self) -> float:
"""Return progress, [0.0, 1.0], of current training session."""
value = self.__data["percentComplete"]
return float(value) if value is not None else 0.0
@property
def model_version(self) -> str:
"""Return model version of the current training session."""
value = self.__data["modelVersion"]
return str(value) if value is not None else "unknown"
@property
def model_location(self) -> str:
"""Return the URL to the model."""
value = self.__data["modelLocation"]
return str(value) if value is not None else "unknown"
@property
def created_timestamp(self) -> datetime.datetime:
"""Return the timestamp when this training session was created."""
value = self.__data["createdAt"]
if value is not None:
ret = datetime.datetime.fromisoformat(value)
else:
ret = datetime.datetime.min
return ret
@property
def updated_timestamp(self) -> datetime.datetime:
"""Return timestamp when current session status was last updated."""
value = self.__data["updatedAt"]
if value is not None:
ret = datetime.datetime.fromisoformat(value)
else:
ret = self.created_timestamp
return ret
|
py | 1a47ea12ebcb4ea268399d2c77e68a9e78da34aa | """
This module calls the setuplogging function and creates a root logger instance.
All future loggers will inherit these yaml configurations from this root logger.
Python uses __init__.py files to navigate between folders. They are implicitly executed.
"""
import logging
from logconfig.logconfig import setup_logging
from pathlib import Path
# make sure logfiles/ directory exists
p = Path.cwd() / "src/logfiles"
if not p.exists():
Path.mkdir(p)
# setuplogging function called from logconfig.py
setup_logging()
# Root logger instance
logging.RootLogger(level=logging.DEBUG)
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
logger.info("Root logger setup successful") |
py | 1a47ea388bcd9a4d8436f08d838311e8af763ab5 | import numpy as np
from environments.DeterministicMDP import DeterministicMDP
from spaces.DiscreteSpace import DiscreteSpace
class SharedLearningChain(DeterministicMDP):
def __init__(self, name, num_states, N):
# create the state and action space
self.inner_size = N
state_space = DiscreteSpace(N)
action_space = DiscreteSpace(3)
# one maps to 2
starting_state = 1
# specify the transition function
transition_func = np.zeros((N, 3), dtype=np.int32)
# iterate over and fill with the transitions
for i in range(N):
transition_func[i, 0] = i - 1
transition_func[i, 1] = i + 1
transition_func[i, 2] = 0
transition_func[0, 0] = 0
transition_func[N - 1, 1] = N - 1
transition_func[N - 1, 2] = N - 1
# now we define the reward function
reward_function = np.zeros((N, 3), dtype=np.float64)
for i in range(N - 1):
reward_function[i, 2] = -0.1
reward_function[0, 0] = -0.1
reward_function[0, 2] = -0.1
reward_function[1, 0] = -0.1
reward_function[N-2, 1] = 1
super().__init__(name, num_states, action_space, state_space, transition_func, reward_function, starting_state)
def get_name(self):
return "shared_chain"
|
py | 1a47ea7083b53be9b786e017eedf96ce5feb7c44 | #!/usr/bin/env python3
# Copyright (c) 2013-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Generate seeds.txt from Pieter's DNS seeder
#
import re
import sys
import dns.resolver
import collections
NSEEDS=512
MAX_SEEDS_PER_ASN=2
MIN_BLOCKS = 2000000
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")
PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$")
PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$")
PATTERN_AGENT = re.compile(
r"^/Feathercoin:("
r"0.13.(0|1|2|3|99)|"
r"0.16.(0|1|2|3|4|99)|"
r"0.17.(0|1|99)|"
r"0.18.(0|1|2|3|4|99)|"
r")")
def parseline(line):
sline = line.split()
if len(sline) < 11:
return None
m = PATTERN_IPV4.match(sline[0])
sortkey = None
ip = None
if m is None:
m = PATTERN_IPV6.match(sline[0])
if m is None:
m = PATTERN_ONION.match(sline[0])
if m is None:
return None
else:
net = 'onion'
ipstr = sortkey = m.group(1)
port = int(m.group(2))
else:
net = 'ipv6'
if m.group(1) in ['::']: # Not interested in localhost
return None
ipstr = m.group(1)
sortkey = ipstr # XXX parse IPv6 into number, could use name_to_ipv6 from generate-seeds
port = int(m.group(2))
else:
# Do IPv4 sanity check
ip = 0
for i in range(0,4):
if int(m.group(i+2)) < 0 or int(m.group(i+2)) > 255:
return None
ip = ip + (int(m.group(i+2)) << (8*(3-i)))
if ip == 0:
return None
net = 'ipv4'
sortkey = ip
ipstr = m.group(1)
port = int(m.group(6))
# Skip bad results.
if sline[1] == 0:
return None
# Extract uptime %.
uptime30 = float(sline[7][:-1])
# Extract Unix timestamp of last success.
lastsuccess = int(sline[2])
# Extract protocol version.
version = int(sline[10])
# Extract user agent.
agent = sline[11][1:-1]
# Extract service flags.
service = int(sline[9], 16)
# Extract blocks.
blocks = int(sline[8])
# Construct result.
return {
'net': net,
'ip': ipstr,
'port': port,
'ipnum': ip,
'uptime': uptime30,
'lastsuccess': lastsuccess,
'version': version,
'agent': agent,
'service': service,
'blocks': blocks,
'sortkey': sortkey,
}
def dedup(ips):
'''deduplicate by address,port'''
d = {}
for ip in ips:
d[ip['ip'],ip['port']] = ip
return list(d.values())
def filtermultiport(ips):
'''Filter out hosts with more nodes per IP'''
hist = collections.defaultdict(list)
for ip in ips:
hist[ip['sortkey']].append(ip)
return [value[0] for (key,value) in list(hist.items()) if len(value)==1]
def lookup_asn(net, ip):
'''
Look up the asn for an IP (4 or 6) address by querying cymry.com, or None
if it could not be found.
'''
try:
if net == 'ipv4':
ipaddr = ip
prefix = '.origin'
else: # http://www.team-cymru.com/IP-ASN-mapping.html
res = str() # 2001:4860:b002:23::68
for nb in ip.split(':')[:4]: # pick the first 4 nibbles
for c in nb.zfill(4): # right padded with '0'
res += c + '.' # 2001 4860 b002 0023
ipaddr = res.rstrip('.') # 2.0.0.1.4.8.6.0.b.0.0.2.0.0.2.3
prefix = '.origin6'
asn = int([x.to_text() for x in dns.resolver.query('.'.join(
reversed(ipaddr.split('.'))) + prefix + '.asn.cymru.com',
'TXT').response.answer][0].split('\"')[1].split(' ')[0])
return asn
except Exception:
sys.stderr.write('ERR: Could not resolve ASN for "' + ip + '"\n')
return None
# Based on Greg Maxwell's seed_filter.py
def filterbyasn(ips, max_per_asn, max_per_net):
# Sift out ips by type
ips_ipv46 = [ip for ip in ips if ip['net'] in ['ipv4', 'ipv6']]
ips_onion = [ip for ip in ips if ip['net'] == 'onion']
# Filter IPv46 by ASN, and limit to max_per_net per network
result = []
net_count = collections.defaultdict(int)
asn_count = collections.defaultdict(int)
for ip in ips_ipv46:
if net_count[ip['net']] == max_per_net:
continue
asn = lookup_asn(ip['net'], ip['ip'])
if asn is None or asn_count[asn] == max_per_asn:
continue
asn_count[asn] += 1
net_count[ip['net']] += 1
result.append(ip)
# Add back Onions (up to max_per_net)
result.extend(ips_onion[0:max_per_net])
return result
def ip_stats(ips):
hist = collections.defaultdict(int)
for ip in ips:
if ip is not None:
hist[ip['net']] += 1
return '%6d %6d %6d' % (hist['ipv4'], hist['ipv6'], hist['onion'])
def main():
lines = sys.stdin.readlines()
ips = [parseline(line) for line in lines]
print('\x1b[7m IPv4 IPv6 Onion Pass \x1b[0m', file=sys.stderr)
print('%s Initial' % (ip_stats(ips)), file=sys.stderr)
# Skip entries with invalid address.
ips = [ip for ip in ips if ip is not None]
print('%s Skip entries with invalid address' % (ip_stats(ips)), file=sys.stderr)
# Skip duplicattes (in case multiple seeds files were concatenated)
ips = dedup(ips)
print('%s After removing duplicates' % (ip_stats(ips)), file=sys.stderr)
# Enforce minimal number of blocks.
ips = [ip for ip in ips if ip['blocks'] >= MIN_BLOCKS]
print('%s Enforce minimal number of blocks' % (ip_stats(ips)), file=sys.stderr)
# Require service bit 1.
ips = [ip for ip in ips if (ip['service'] & 1) == 1]
print('%s Require service bit 1' % (ip_stats(ips)), file=sys.stderr)
# Require at least 50% 30-day uptime for clearnet, 10% for onion.
req_uptime = {
'ipv4': 50,
'ipv6': 50,
'onion': 10,
}
ips = [ip for ip in ips if ip['uptime'] > req_uptime[ip['net']]]
print('%s Require minimum uptime' % (ip_stats(ips)), file=sys.stderr)
# Require a known and recent user agent.
ips = [ip for ip in ips if PATTERN_AGENT.match(ip['agent'])]
print('%s Require a known and recent user agent' % (ip_stats(ips)), file=sys.stderr)
# Sort by availability (and use last success as tie breaker)
ips.sort(key=lambda x: (x['uptime'], x['lastsuccess'], x['ip']), reverse=True)
# Filter out hosts with multiple bitcoin ports, these are likely abusive
ips = filtermultiport(ips)
print('%s Filter out hosts with multiple bitcoin ports' % (ip_stats(ips)), file=sys.stderr)
# Look up ASNs and limit results, both per ASN and globally.
ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS)
print('%s Look up ASNs and limit results per ASN and per net' % (ip_stats(ips)), file=sys.stderr)
# Sort the results by IP address (for deterministic output).
ips.sort(key=lambda x: (x['net'], x['sortkey']))
for ip in ips:
if ip['net'] == 'ipv6':
print('[%s]:%i' % (ip['ip'], ip['port']))
else:
print('%s:%i' % (ip['ip'], ip['port']))
if __name__ == '__main__':
main()
|
py | 1a47eb03c49a573f6b9b11e1cc07c441de069388 | import ipaddress
from sqlalchemy import types
class IPv4Network(types.TypeDecorator):
impl = types.TEXT
def process_bind_param(self, value, dialect):
return str(value) if value else None
def process_result_value(self, value, dialect):
return ipaddress.IPv4Network(value) if value else None
|
py | 1a47eb351ca57e37076b5635ffe81c511fc4e12f | # -*- coding: utf-8 -*-
"""
Created on Thu Jun 9 08:24:52 2016
@author: WELG
"""
class Animal(object):
def __init__(self, age):
self.age = age
self.name = None
def get_age(self):
return self.age
def get_name(self):
return self.name
def set_age(self, newage):
self.age = newage
def set_name(self, newname=""):
self.name = newname
def __str__(self):
return "animal:" + str(self.name) + ":" + str(self.age)
class Cat(Animal):
def speak(self):
print("мяу”)
def __str__(self):
return "cat:" + str(self.name) + ":" + str(self.age)
class Person(Animal):
def __init__(self, name, age):
Animal.__init__(self, age)
Animal.set_name(self, name)
self.friends = []
def get_friends(self):
return self.friends
def add_friend(self, fname):
if fname not in self.friends:
self.friends.append(fname)
def speak(self):
print("hello”)
def age_diff(self, other):
# alternate way: diff = self.age - other.age
diff = self.get_age() - other.get_age()
if self.age > other.age:
print(self.name, "is", diff, "years older than", other.name)
else:
print(self.name, "is", -diff, "years younger than", other.name)
def __str__(self):
return "person:" + str(self.name) + ":" + str(self.age)
import random
class Student(Person):
def __init__(self, name, age, major=None):
Person.__init__(self, name, age)
self.major = major
def change_major(self, major):
self.major = major
def speak(self):
r = random.random()
if r < 0.25:
print("i have homework”)
elif 0.25 <= r < 0.5:
print("i need sleep”)
elif 0.5 <= r < 0.75:
print("i should eat”)
else:
print("i am watching tv”)
def __str__(self):
return "student:" + str(self.name) + ":" + str(self.age) + ":" + str(self.major)
class Rabbit(Animal):
tag = 1
def __init__(self, age, parent1=None, parent2=None):
Animal.__init__(self, age)
self.parent1 = parent1
self.parent2 = parent2
self.rid = Rabbit.tag
Rabbit.tag += 1
|
py | 1a47eb381709dbf27a3da5710f669460a210e1f7 | from django.shortcuts import render, get_object_or_404, redirect
from django.views.generic import DetailView
from django.utils.translation import gettext as _
from froide.account.preferences import get_preferences_for_user
from froide.helper.utils import render_403
from ..models import FoiRequest, FoiEvent, FoiAttachment
from ..forms.preferences import request_page_tour_pref, message_received_tour_pref
from ..auth import can_read_foirequest, can_write_foirequest, check_foirequest_auth_code
def shortlink(request, obj_id, url_path=""):
foirequest = get_object_or_404(FoiRequest, pk=obj_id)
if not can_read_foirequest(foirequest, request):
return render_403(request)
url = foirequest.get_absolute_url()
if url_path:
url_path = url_path[1:]
return redirect(url + url_path)
def auth(request, obj_id, code):
foirequest = get_object_or_404(FoiRequest, pk=obj_id)
if check_foirequest_auth_code(foirequest, code):
request.session["pb_auth"] = code
return redirect(foirequest)
if can_read_foirequest(foirequest, request):
return redirect(foirequest)
return render_403(request)
def can_see_attachment(att, can_write):
if att.approved:
return True
if att.redacted_id and not can_write:
return False
if att.converted_id and not can_write:
return False
return True
def show_foirequest(
request, obj, template_name="foirequest/alpha/show.html", context=None, status=200
):
if context is None:
context = {}
context.update(get_foirequest_context(request, obj))
return render(request, template_name, context, status=status)
class FoiRequestView(DetailView):
queryset = FoiRequest.objects.select_related(
"public_body",
"jurisdiction",
"user",
"law",
).prefetch_related(
"tags",
)
template_name = "foirequest/alpha/show.html"
def get(self, request, *args, **kwargs):
self.object = self.get_object()
if not can_read_foirequest(self.object, self.request):
return render_403(self.request)
context = self.get_context_data(object=self.object)
return self.render_to_response(context)
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
obj = self.object
request = self.request
context.update(get_foirequest_context(request, obj))
return context
def get_foirequest_context(request, obj):
context = {}
all_attachments = FoiAttachment.objects.select_related("redacted").filter(
belongs_to__request=obj
)
can_write = can_write_foirequest(obj, request)
messages = obj.get_messages(with_tags=can_write)
for message in messages:
message.request = obj
message.all_attachments = [
a for a in all_attachments if a.belongs_to_id == message.id
]
# Preempt attribute access
for att in message.all_attachments:
att.belongs_to = message
message.listed_attachments = [
a
for a in all_attachments
if a.belongs_to_id == message.id and can_see_attachment(a, can_write)
]
message.hidden_attachments = [
a for a in message.listed_attachments if a.is_irrelevant
]
message.can_edit_attachments = bool(
[a for a in message.listed_attachments if a.can_edit]
)
message.approved_attachments = [
a
for a in message.listed_attachments
if a.approved and a not in message.hidden_attachments
]
message.unapproved_attachments = [
a
for a in message.listed_attachments
if not a.approved and a not in message.hidden_attachments
]
events = (
FoiEvent.objects.filter(request=obj)
.select_related("user", "request", "public_body")
.order_by("timestamp")
)
event_count = len(events)
last_index = event_count
for message in reversed(obj.messages):
message.events = [
ev for ev in events[:last_index] if ev.timestamp >= message.timestamp
]
last_index = last_index - len(message.events)
# TODO: remove active_tab
active_tab = "info"
if can_write:
active_tab = get_active_tab(obj, context)
context.update({"object": obj, "active_tab": active_tab, "preferences": {}})
if can_write:
preferences = get_preferences_for_user(
request.user, [request_page_tour_pref, message_received_tour_pref]
)
context.update({"preferences": preferences})
if (
obj.reply_received()
and not preferences["foirequest_messagereceived_tour"].value
):
context.update(
{"foirequest_messagereceived_tour": get_messagereceived_tour_data()}
)
elif not preferences["foirequest_requestpage_tour"].value:
context.update({"foirequest_requestpage_tour": get_requestpage_tour_data()})
return context
def get_active_tab(obj, context):
if "postal_reply_form" in context:
return "add-postal-reply"
elif "postal_message_form" in context:
return "add-postal-message"
elif "status_form" in context:
return "set-status"
elif "send_message_form" in context:
return "write-message"
elif "escalation_form" in context:
return "escalate"
if "active_tab" in context:
return context["active_tab"]
if obj.awaits_classification():
return "set-status"
elif obj.is_overdue() and obj.awaits_response():
return "write-message"
return "info"
def get_base_tour_data():
return {
"i18n": {
"done": _("👋 Goodbye!"),
"next": _("Next"),
"previous": _("Previous"),
"close": _("Close"),
"start": _("Next"),
}
}
def get_requestpage_tour_data():
return {
**get_base_tour_data(),
"steps": [
{
"element": "#infobox .info-box__header",
"popover": {
"title": _("Status of request"),
"description": _(
"""Here you can see the status your request. Below you can update the status of your request when you receive a response."""
),
},
},
{
"element": "#due-date",
"popover": {
"title": _("Deadline"),
"description": _(
"""This is the deadline for your request. If the public body has not replied by then, we will let you know, so you can send a reminder. You can also adjust the date if necessary."""
),
},
},
{
"element": "#share-links",
"popover": {
"title": _("Share links"),
"description": _(
"""Here are some quick links for you to share your request with others."""
),
},
},
{
"element": "#download-links",
"popover": {
"title": _("Download"),
"description": _(
"""You can download all messages of your request. The RSS link allows you to subscribe to the request in a feed reader."""
),
},
},
{
"element": "#correspondence-tab",
"popover": {
"title": _("Messages in this request"),
"description": _(
"""Below you find all messages that you sent and received in this request. When you receive a response it appears at the end and we let you know about it via email."""
),
},
},
{
"element": "#correspondence .alpha-message .alpha-message__head",
"popover": {
"title": _("Details of your message"),
"description": _(
"""This is your message. There's more information e.g. about the delivery status of your message when you click on the “Details” link."""
),
},
"position": "top-center",
},
{
"element": ".write-message-top-link",
"popover": {
"title": _("Need to reply or send a reminder?"),
"description": _(
"""This button takes you to the send message form."""
),
},
},
{
"element": ".upload-post-link",
"popover": {
"title": _("Got postal mail?"),
"description": _(
"""When you receive a letter, you can click this button and upload a scan or photo of the letter. You can redact parts of the letter with our tool before publishing it."""
),
},
},
{
"element": ".request-title",
"popover": {
"title": _("The end."),
"description": _(
"""That concludes this tour! We'll let you know via email if anything around your request changes."""
),
"position": "top-center",
},
},
],
}
def get_messagereceived_tour_data():
return {
**get_base_tour_data(),
"steps": [
{
"element": "#infobox .info-box__header",
"popover": {
"title": _("Status of request"),
"description": _(
"""After you read your replies you need to update the status of your request here below."""
),
},
},
{
"element": "#correspondence .alpha-message",
"popover": {
"title": _("Message toolbar"),
"description": _(
"""The “Redact” button allows you to redact the text of a message in case sensitive information is accidentally not automatically removed. The “Problem?” allows you to notify our moderation team, if you have a problem with a message."""
),
"position": "bottom-center",
},
},
{
"element": ".reply-form__wrap",
"popover": {
"title": _("Reply"),
"description": _(
"""At the bottom of the page you can send replies to the public body or start a mediation process with the mediation authority."""
),
"position": "top-center",
},
},
{
"element": "#request-summary",
"popover": {
"title": _("Got the information you asked for?"),
"description": _(
"""When you received documents, you can write a summary of what you have learned."""
),
},
},
{
"element": ".request-title",
"popover": {
"title": _("The end."),
"description": _("""That concludes this tour!"""),
"position": "top-center",
},
},
],
}
|
py | 1a47ebc41505825de94169038355bb9fc9dc6187 | """
Django settings for panelintegration project.
Generated by 'django-admin startproject' using Django 2.0.6.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
STATIC_DIR = os.path.join(BASE_DIR, 'static')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'm-2u7f2j_vs@7s*qc4(&%r+(ny#-9626zhsmd&_k*+xm&*ydth'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'panel_randomizer_app.apps.PanelintegrationAppConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'panel_integration.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'panel_integration.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LOCALE_PATHS = (
os.path.join(BASE_DIR, 'locale'),
)
LANGUAGE_CODE = 'en-GB'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
# settings for this particular app
APP_CONFIG = {
'BASE_URL': 'test',
'AES_SECRET': 'VD2AZZoXjRVQzF8K',
'HMAC_SECRET': 'UyCtJYf1DW2xrjsY',
'TEST_KEY':'123'
}
|
py | 1a47edb7f573b625277f3ff3bb47cd0eb7b49dbf | # for django < 3.2
default_app_config = "djstripe.apps.DjstripeAppConfig"
|
py | 1a47ee4a78ea33df371990a0fb2c0db80389a1a0 | """OpenFlow 1.3 OXM match fields.
Flow's match is very different from OF 1.0. Instead of always having all
fields, there's a variable list of match fields and each one is an Openflow
eXtended Match Type-Length-Value (OXM TLV) element.
This module provides high-level Python classes for OXM TLV fields in order to
make the OF 1.3 match fields easy to use and to be coded.
"""
from abc import ABC, abstractmethod
from pyof.foundation.basic_types import HWAddress, IPAddress
from pyof.v0x04.common.flow_match import OxmOfbMatchField, OxmTLV, VlanId
class MatchField(ABC):
"""Base class for match fields. Abstract OXM TLVs of python-openflow.
Just extend this class and you will be forced to define the required
low-level attributes and methods below:
* "name" attribute (field name to be displayed in JSON);
* "oxm_field" attribute (``OxmOfbMatchField`` enum);
* Method to return a pyof OxmTLV;
* Method to create an instance from an OxmTLV.
"""
def __init__(self, value):
"""Define match field value."""
self.value = value
@property
@classmethod
@abstractmethod
def name(cls):
"""Define a name to be displayed in JSON.
It can be overriden just by a class attibute.
"""
pass
@property
@classmethod
@abstractmethod
def oxm_field(cls):
"""Define this subclass ``OxmOfbMatchField`` value.
It can be overriden just by as a class attibute.
"""
pass
@abstractmethod
def as_of_tlv(self):
"""Return a pyof OXM TLV instance."""
pass
@classmethod
@abstractmethod
def from_of_tlv(cls, tlv):
"""Return an instance from a pyof OXM TLV."""
pass
def __eq__(self, other):
"""Two objects are equal if their values are the same.
The oxm_field equality is checked indirectly when comparing whether
the objects are instances of the same class.
"""
return isinstance(other, self.__class__) and other.value == self.value
class MatchDLVLAN(MatchField):
"""Match for datalink VLAN ID."""
name = 'dl_vlan'
oxm_field = OxmOfbMatchField.OFPXMT_OFB_VLAN_VID
def as_of_tlv(self):
"""Return a pyof OXM TLV instance."""
value = self.value | VlanId.OFPVID_PRESENT
value_bytes = value.to_bytes(2, 'big')
return OxmTLV(oxm_field=self.oxm_field, oxm_value=value_bytes)
@classmethod
def from_of_tlv(cls, tlv):
"""Return an instance from a pyof OXM TLV."""
vlan_id = int.from_bytes(tlv.oxm_value, 'big') & 4095
return cls(vlan_id)
class MatchDLVLANPCP(MatchField):
"""Match for VLAN Priority Code Point."""
name = 'dl_vlan_pcp'
oxm_field = OxmOfbMatchField.OFPXMT_OFB_VLAN_PCP
def as_of_tlv(self):
"""Return a pyof OXM TLV instance."""
value_bytes = self.value.to_bytes(1, 'big')
return OxmTLV(oxm_field=self.oxm_field, oxm_value=value_bytes)
@classmethod
def from_of_tlv(cls, tlv):
"""Return an instance from a pyof OXM TLV."""
priority = int.from_bytes(tlv.oxm_value, 'big')
return cls(priority)
class MatchDLSrc(MatchField):
"""Match for datalink source."""
name = 'dl_src'
oxm_field = OxmOfbMatchField.OFPXMT_OFB_ETH_SRC
def as_of_tlv(self):
"""Return a pyof OXM TLV instance."""
value_bytes = HWAddress(self.value).pack()
return OxmTLV(oxm_field=self.oxm_field, oxm_value=value_bytes)
@classmethod
def from_of_tlv(cls, tlv):
"""Return an instance from a pyof OXM TLV."""
hw_address = HWAddress()
hw_address.unpack(tlv.oxm_value)
addr_str = str(hw_address)
return cls(addr_str)
class MatchDLDst(MatchField):
"""Match for dataling destination."""
name = 'dl_dst'
oxm_field = OxmOfbMatchField.OFPXMT_OFB_ETH_DST
def as_of_tlv(self):
"""Return a pyof OXM TLV instance."""
value_bytes = HWAddress(self.value).pack()
return OxmTLV(oxm_field=self.oxm_field, oxm_value=value_bytes)
@classmethod
def from_of_tlv(cls, tlv):
"""Return an instance from a pyof OXM TLV."""
hw_address = HWAddress()
hw_address.unpack(tlv.oxm_value)
addr_str = str(hw_address)
return cls(addr_str)
class MatchDLType(MatchField):
"""Match for datalink type."""
name = 'dl_type'
oxm_field = OxmOfbMatchField.OFPXMT_OFB_ETH_TYPE
def as_of_tlv(self):
"""Return a pyof OXM TLV instance."""
value_bytes = self.value.to_bytes(2, 'big')
return OxmTLV(oxm_field=self.oxm_field, oxm_value=value_bytes)
@classmethod
def from_of_tlv(cls, tlv):
"""Return an instance from a pyof OXM TLV."""
port = int.from_bytes(tlv.oxm_value, 'big')
return cls(port)
class MatchNwSrc(MatchField):
"""Match for IPV4 source."""
name = 'nw_src'
oxm_field = OxmOfbMatchField.OFPXMT_OFB_IPV4_SRC
def as_of_tlv(self):
"""Return a pyof OXM TLV instance."""
value_bytes = IPAddress(self.value).pack()
return OxmTLV(oxm_field=self.oxm_field, oxm_value=value_bytes)
@classmethod
def from_of_tlv(cls, tlv):
"""Return an instance from a pyof OXM TLV."""
ip_address = IPAddress()
ip_address.unpack(tlv.oxm_value)
ip_str = str(ip_address)
return cls(ip_str)
class MatchNwDst(MatchField):
"""Match for IPV4 destination."""
name = 'nw_dst'
oxm_field = OxmOfbMatchField.OFPXMT_OFB_IPV4_DST
def as_of_tlv(self):
"""Return a pyof OXM TLV instance."""
value_bytes = IPAddress(self.value).pack()
return OxmTLV(oxm_field=self.oxm_field, oxm_value=value_bytes)
@classmethod
def from_of_tlv(cls, tlv):
"""Return an instance from a pyof OXM TLV."""
ip_address = IPAddress()
ip_address.unpack(tlv.oxm_value)
ip_str = str(ip_address)
return cls(ip_str)
class MatchNwProto(MatchField):
"""Match for IP protocol."""
name = 'nw_proto'
oxm_field = OxmOfbMatchField.OFPXMT_OFB_IP_PROTO
def as_of_tlv(self):
"""Return a pyof OXM TLV instance."""
value_bytes = self.value.to_bytes(1, 'big')
return OxmTLV(oxm_field=self.oxm_field, oxm_value=value_bytes)
@classmethod
def from_of_tlv(cls, tlv):
"""Return an instance from a pyof OXM TLV."""
priority = int.from_bytes(tlv.oxm_value, 'big')
return cls(priority)
class MatchInPort(MatchField):
"""Match for input port."""
name = 'in_port'
oxm_field = OxmOfbMatchField.OFPXMT_OFB_IN_PORT
def as_of_tlv(self):
"""Return a pyof OXM TLV instance."""
value_bytes = self.value.to_bytes(4, 'big')
return OxmTLV(oxm_field=self.oxm_field, oxm_value=value_bytes)
@classmethod
def from_of_tlv(cls, tlv):
"""Return an instance from a pyof OXM TLV."""
port = int.from_bytes(tlv.oxm_value, 'big')
return cls(port)
class MatchTCPSrc(MatchField):
"""Match for TCP source."""
name = 'tp_src'
oxm_field = OxmOfbMatchField.OFPXMT_OFB_TCP_SRC
def as_of_tlv(self):
"""Return a pyof OXM TLV instance."""
value_bytes = self.value.to_bytes(2, 'big')
return OxmTLV(oxm_field=self.oxm_field, oxm_value=value_bytes)
@classmethod
def from_of_tlv(cls, tlv):
"""Return an instance from a pyof OXM TLV."""
port = int.from_bytes(tlv.oxm_value, 'big')
return cls(port)
class MatchTCPDst(MatchField):
"""Match for TCP destination."""
name = 'tp_dst'
oxm_field = OxmOfbMatchField.OFPXMT_OFB_TCP_DST
def as_of_tlv(self):
"""Return a pyof OXM TLV instance."""
value_bytes = self.value.to_bytes(2, 'big')
return OxmTLV(oxm_field=self.oxm_field, oxm_value=value_bytes)
@classmethod
def from_of_tlv(cls, tlv):
"""Return an instance from a pyof OXM TLV."""
port = int.from_bytes(tlv.oxm_value, 'big')
return cls(port)
class MatchFieldFactory(ABC):
"""Create the correct MatchField subclass instance.
As OF 1.3 has many match fields and there are many ways to (un)pack their
OxmTLV.oxm_value, this class does all the work of finding the correct
MatchField class and instantiating the corresponding object.
"""
__classes = {}
@classmethod
def from_name(cls, name, value):
"""Return the proper object from name and value."""
field_class = cls._get_class(name)
if field_class:
return field_class(value)
@classmethod
def from_of_tlv(cls, tlv):
"""Return the proper object from a pyof OXM TLV."""
field_class = cls._get_class(tlv.oxm_field)
if field_class:
return field_class.from_of_tlv(tlv)
@classmethod
def _get_class(cls, name_or_field):
"""Return the proper object from field name or OxmTLV.oxm_field."""
if not cls.__classes:
cls._index_classes()
return cls.__classes.get(name_or_field)
@classmethod
def _index_classes(cls):
for subclass in MatchField.__subclasses__():
cls.__classes[subclass.name] = subclass
cls.__classes[subclass.oxm_field] = subclass
|
py | 1a47ef87d6e3edb418bfe2dc97746dbdf214bb1b | """influxweb URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('manage/', include('management.urls')),
path('', include('login.urls')),
path('', include('mainweb.urls')),
]
|
py | 1a47f02896f49dd9bc07b5e854e1d4f9a0866b88 | import os.path
import config.basic
################################################################
# Configurations for processing
################################################################
# This is where all of the output files are stored
# Must be writable and have lots of free space...
#base_results_directory = "/home/fs01/lgs23/PALFA/results"
base_results_directory = "/mnt/data1/adam_dev/results"
# The following is the name of the scratch working directory
# basename on the individual processing nodes
base_working_directory = "/tmp"
# The following is the path where the temporary working directory
# should be created. This could be /dev/shm, or simply another
# directory on the worker node.
base_tmp_dir= "/tmp"
# Should not need to change the names of the zaplists...
zaplistdir = os.path.join(config.basic.pipelinedir, "lib", "zaplists")
default_zaplist = os.path.join(zaplistdir, "PALFA.zaplist")
# The following don't currently get used. They are placeholders.
num_cores = 1 # The number of cores to use/request for each job
use_hyperthreading = False # Whether or not to use HyperThreading
# Do only single-pulse search? Added by LGS
do_noaccel = True
import processing_check
processing_check.processing.populate_configs(locals())
processing_check.processing.check_sanity()
|
py | 1a47f37e9ef7f5986a1acc6418f49552fee2e5cd | import setuptools
from distutils.core import Extension
with open("README.md") as f:
long_description = f.read()
with open("./src/viztracer/__init__.py") as f:
for line in f.readlines():
if line.startswith("__version__"):
# __version__ = "0.9"
delim = '"' if '"' in line else "'"
version = line.split(delim)[1]
break
else:
print("Can't find version! Stop Here!")
exit(1)
setuptools.setup(
name="viztracer",
version=version,
author="Tian Gao",
author_email="[email protected]",
description="A debugging and profiling tool that can trace and visualize python code execution",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/gaogaotiantian/viztracer",
packages=setuptools.find_packages("src"),
package_dir={"": "src"},
package_data={
"viztracer": [
"html/*.js",
"html/*.css",
"html/*.html"
]
},
ext_modules=[
Extension(
"viztracer.snaptrace",
sources=[
"src/viztracer/modules/util.c",
"src/viztracer/modules/snaptrace.c"
],
extra_link_args=["-lpthread"]
)
],
classifiers=[
"Development Status :: 4 - Beta",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: MacOS",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Topic :: Software Development :: Quality Assurance",
"Topic :: Software Development :: Bug Tracking",
"Topic :: System :: Logging"
],
python_requires=">=3.6",
extras_require={
"full": ["rich", "orjson"]
},
entry_points={
"console_scripts": [
"viztracer = viztracer:main",
"vizviewer = viztracer:viewer_main",
"vdb = viztracer:sim_main"
]
},
)
|
py | 1a47f383899b46d0ea3778b6ca2dadee3e7a4f4f |
from st2actions.runners.pythonrunner import Action
import requests
__all__ = [
'NetboxBaseAction'
]
class NetboxBaseAction(Action):
"""Base Action for all Netbox API based actions
"""
def __init__(self, config):
super(NetboxBaseAction, self).__init__(config)
def get(self, endpoint_uri, **kwargs):
"""Make a get request to the API URI passed in
"""
self.logger.debug("Calling base get with kwargs: {}".format(kwargs))
if self.config['use_https']:
url = 'https://'
else:
url = 'http://'
url = url + self.config['hostname'] + endpoint_uri
headers = {
'Authorization': 'Token ' + self.config['api_token'],
'Accept': 'application/json'
}
# transform `in__id` if present
if kwargs.get('id__in'):
kwargs['id__in'] = ','.join(kwargs['id__in'])
self.logger.debug('id__in transformed to {}'.format(kwargs['id__in']))
r = requests.get(url, verify=self.config['ssl_verify'], headers=headers, params=kwargs)
return {'raw': r.json()}
|
py | 1a47f42b4b3edd09d32678d775d981474271004b | #!/usr/bin/python
# -*- coding: utf-8 -*-
r"""
This bot uses external filtering programs for munging text.
For example:
python pwb.py piper -filter:"tr A-Z a-z" -page:Wikipedia:Sandbox
Would lower case the article with tr(1).
Muliple -filter commands can be specified:
python pwb.py piper -filter:cat -filter:"tr A-Z a-z" -filter:"tr a-z A-Z" \
-page:Wikipedia:Sandbox
Would pipe the article text through cat(1) (NOOP) and then lower case
it with tr(1) and upper case it again with tr(1)
The following parameters are supported:
-always Always commit changes without asking you to accept them
-filter: Filter the article text through this program, can be
given multiple times to filter through multiple programs in
the order which they are given
The following generators and filters are supported:
¶ms;
"""
#
# (C) Pywikibot team, 2008-2019
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, division, unicode_literals
import os
import pipes
import tempfile
import pywikibot
from pywikibot import pagegenerators
from pywikibot.bot import (MultipleSitesBot, ExistingPageBot,
NoRedirectPageBot, AutomaticTWSummaryBot)
from pywikibot.tools import UnicodeType
# This is required for the text that is shown when you run this script
# with the parameter -help.
docuReplacements = {'¶ms;': pagegenerators.parameterHelp} # noqa: N816
class PiperBot(MultipleSitesBot, ExistingPageBot, NoRedirectPageBot,
AutomaticTWSummaryBot):
"""Bot for munging text using external filtering programs."""
summary_key = 'piper-edit-summary'
def __init__(self, generator, **kwargs):
"""
Initializer.
@param generator: The page generator that determines on which pages
to work on.
@type generator: generator
"""
self.availableOptions.update({
'filters': [],
})
super(PiperBot, self).__init__(generator=generator, **kwargs)
@property
def summary_parameters(self):
"""Return the filter parameter."""
return {'filters': ', '.join(self.getOption('filters'))}
def pipe(self, program, text):
"""Pipe a given text through a given program.
@return: processed text after piping
@rtype: str
"""
if not isinstance(text, str): # py2-py3 compatibility
text = text.encode('utf-8')
pipe = pipes.Template()
pipe.append(str(program), '--') # py2-py3 compatibility
# Create a temporary filename to save the piped stuff to
temp_filename = '%s.%s' % (tempfile.mktemp(), 'txt')
with pipe.open(temp_filename, 'w') as file:
file.write(text)
# Now retrieve the munged text
with open(temp_filename, 'r') as file:
unicode_text = file.read()
if not isinstance(unicode_text, UnicodeType): # py2-py3 compatibility
unicode_text = unicode_text.decode('utf-8')
# clean up
os.unlink(temp_filename)
return unicode_text
def treat_page(self):
"""Load the given page, do some changes, and save it."""
# Load the page
text = self.current_page.text
# Munge!
for program in self.getOption('filters'):
text = self.pipe(program, text)
# only save if something was changed
self.put_current(text)
def main(*args):
"""Create and run a PiperBot instance from the given command arguments."""
local_args = pywikibot.handle_args(args)
# This factory is responsible for processing command line arguments
# that are also used by other scripts and that determine on which pages
# to work on.
gen_factory = pagegenerators.GeneratorFactory()
# The program to pipe stuff through
filters = []
options = {}
# Parse command line arguments
for arg in local_args:
option, sep, value = arg.partition(':')
if option == '-filter':
filters.append(value)
elif option == '-always':
options['always'] = True
else:
# check if a standard argument like
# -start:XYZ or -ref:Asdf was given.
gen_factory.handleArg(arg)
options['filters'] = filters
gen = gen_factory.getCombinedGenerator(preload=True)
if gen:
# The preloading generator is responsible for downloading multiple
# pages from the wiki simultaneously.
bot = PiperBot(gen, **options)
bot.run()
return True
else:
pywikibot.bot.suggest_help(missing_generator=True)
return False
if __name__ == '__main__':
main()
|
py | 1a47f445fec6645db96bfd08749845ce911c8a94 | class Solution:
"""
@param nums: A set of numbers
@return: A list of lists
"""
def subsets(self, nums):
# write your code here
if not nums: return [[]]
nums = sorted(nums)
res = []
self.helper(res, [], nums, 0)
return res
def helper(self, res, part, nums, pos):
res.append(list(part))
for i in range(pos, len(nums)):
part.append(nums[i])
self.helper(res, part, nums, i + 1)
part.pop()
s=Solution()
print(s.subsets([1]))
|
py | 1a47f62e5e1be19a4d70b71883e1f182e0f0d580 | n1 = float(input('qual a primeira nota do aluno?: '))
n2 = float(input('qual a segunda nota do aluno?: '))
media = (n1 + n2)/2
if media < 5.0:
print('a media do aluno foi {}, aluno REPROVADO!'.format(media))
elif media >=5 and media <=6.9:
print('a media do aluno foi {}, aluno em RECUPERAÇÃO!'.format(media))
elif media >= 7:
print('a meida do aluno foi {}, aluno APROVADO!'.format(media))
|
py | 1a47f63b976658742f56b66ecd1806a052349903 | # pylint:disable=line-too-long
import logging
from ...sim_type import SimTypeFunction, SimTypeShort, SimTypeInt, SimTypeLong, SimTypeLongLong, SimTypeDouble, SimTypeFloat, SimTypePointer, SimTypeChar, SimStruct, SimTypeFixedSizeArray, SimTypeBottom, SimUnion, SimTypeBool
from ...calling_conventions import SimCCStdcall, SimCCMicrosoftAMD64
from .. import SIM_PROCEDURES as P
from . import SimLibrary
_l = logging.getLogger(name=__name__)
lib = SimLibrary()
lib.set_default_cc('X86', SimCCStdcall)
lib.set_default_cc('AMD64', SimCCMicrosoftAMD64)
lib.set_library_names("winmm.dll")
prototypes = \
{
#
'joyConfigChanged': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["dwFlags"]),
#
'mciSendCommandA': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["mciId", "uMsg", "dwParam1", "dwParam2"]),
#
'mciSendCommandW': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["mciId", "uMsg", "dwParam1", "dwParam2"]),
#
'mciSendStringA': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpstrCommand", "lpstrReturnString", "uReturnLength", "hwndCallback"]),
#
'mciSendStringW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpstrCommand", "lpstrReturnString", "uReturnLength", "hwndCallback"]),
#
'mciGetDeviceIDA': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pszDevice"]),
#
'mciGetDeviceIDW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pszDevice"]),
#
'mciGetDeviceIDFromElementIDA': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["dwElementID", "lpstrType"]),
#
'mciGetDeviceIDFromElementIDW': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["dwElementID", "lpstrType"]),
#
'mciGetErrorStringA': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["mcierr", "pszText", "cchText"]),
#
'mciGetErrorStringW': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["mcierr", "pszText", "cchText"]),
#
'mciSetYieldProc': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["mciId", "dwYieldData"]), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["mciId", "fpYieldProc", "dwYieldData"]),
#
'mciGetCreatorTask': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["mciId"]),
#
'mciGetYieldProc': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypePointer(SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["mciId", "dwYieldData"]), offset=0), arg_names=["mciId", "pdwYieldData"]),
#
'mciGetDriverData': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), arg_names=["wDeviceID"]),
#
'mciLoadCommandResource': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hInstance", "lpResName", "wType"]),
#
'mciSetDriverData': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["wDeviceID", "dwData"]),
#
'mciDriverYield': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["wDeviceID"]),
#
'mciDriverNotify': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["hwndCallback", "wDeviceID", "uStatus"]),
#
'mciFreeCommandResource': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["wTable"]),
#
'CloseDriver': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["hDriver", "lParam1", "lParam2"]),
#
'OpenDriver': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["szDriverName", "szSectionName", "lParam2"]),
#
'SendDriverMessage': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["hDriver", "message", "lParam1", "lParam2"]),
#
'DrvGetModuleHandle': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["hDriver"]),
#
'GetDriverModuleHandle': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["hDriver"]),
#
'DefDriverProc': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["dwDriverIdentifier", "hdrvr", "uMsg", "lParam1", "lParam2"]),
#
'DriverCallback': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwCallback", "dwFlags", "hDevice", "dwMsg", "dwUser", "dwParam1", "dwParam2"]),
#
'mmDrvInstall': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["param0", "param1", "param2", "param3", "param4"]), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hDriver", "wszDrvEntry", "drvMessage", "wFlags"]),
#
'mmioStringToFOURCCA': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["sz", "uFlags"]),
#
'mmioStringToFOURCCW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["sz", "uFlags"]),
#
'mmioInstallIOProcA': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["lpmmioinfo", "uMsg", "lParam1", "lParam2"]), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["lpmmioinfo", "uMsg", "lParam1", "lParam2"]), offset=0), arg_names=["fccIOProc", "pIOProc", "dwFlags"]),
#
'mmioInstallIOProcW': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["lpmmioinfo", "uMsg", "lParam1", "lParam2"]), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["lpmmioinfo", "uMsg", "lParam1", "lParam2"]), offset=0), arg_names=["fccIOProc", "pIOProc", "dwFlags"]),
#
'mmioOpenA': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypePointer(SimStruct({"dwFlags": SimTypeInt(signed=False, label="UInt32"), "fccIOProc": SimTypeInt(signed=False, label="UInt32"), "pIOProc": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["lpmmioinfo", "uMsg", "lParam1", "lParam2"]), offset=0), "wErrorRet": SimTypeInt(signed=False, label="UInt32"), "htask": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "cchBuffer": SimTypeInt(signed=True, label="Int32"), "pchBuffer": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchNext": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchEndRead": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchEndWrite": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "lBufOffset": SimTypeInt(signed=True, label="Int32"), "lDiskOffset": SimTypeInt(signed=True, label="Int32"), "adwInfo": SimTypeFixedSizeArray(SimTypeInt(signed=False, label="UInt32"), 3), "dwReserved1": SimTypeInt(signed=False, label="UInt32"), "dwReserved2": SimTypeInt(signed=False, label="UInt32"), "hmmio": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="MMIOINFO", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["pszFileName", "pmmioinfo", "fdwOpen"]),
#
'mmioOpenW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypePointer(SimStruct({"dwFlags": SimTypeInt(signed=False, label="UInt32"), "fccIOProc": SimTypeInt(signed=False, label="UInt32"), "pIOProc": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["lpmmioinfo", "uMsg", "lParam1", "lParam2"]), offset=0), "wErrorRet": SimTypeInt(signed=False, label="UInt32"), "htask": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "cchBuffer": SimTypeInt(signed=True, label="Int32"), "pchBuffer": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchNext": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchEndRead": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchEndWrite": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "lBufOffset": SimTypeInt(signed=True, label="Int32"), "lDiskOffset": SimTypeInt(signed=True, label="Int32"), "adwInfo": SimTypeFixedSizeArray(SimTypeInt(signed=False, label="UInt32"), 3), "dwReserved1": SimTypeInt(signed=False, label="UInt32"), "dwReserved2": SimTypeInt(signed=False, label="UInt32"), "hmmio": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="MMIOINFO", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["pszFileName", "pmmioinfo", "fdwOpen"]),
#
'mmioRenameA': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimStruct({"dwFlags": SimTypeInt(signed=False, label="UInt32"), "fccIOProc": SimTypeInt(signed=False, label="UInt32"), "pIOProc": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["lpmmioinfo", "uMsg", "lParam1", "lParam2"]), offset=0), "wErrorRet": SimTypeInt(signed=False, label="UInt32"), "htask": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "cchBuffer": SimTypeInt(signed=True, label="Int32"), "pchBuffer": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchNext": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchEndRead": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchEndWrite": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "lBufOffset": SimTypeInt(signed=True, label="Int32"), "lDiskOffset": SimTypeInt(signed=True, label="Int32"), "adwInfo": SimTypeFixedSizeArray(SimTypeInt(signed=False, label="UInt32"), 3), "dwReserved1": SimTypeInt(signed=False, label="UInt32"), "dwReserved2": SimTypeInt(signed=False, label="UInt32"), "hmmio": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="MMIOINFO", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["pszFileName", "pszNewFileName", "pmmioinfo", "fdwRename"]),
#
'mmioRenameW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimStruct({"dwFlags": SimTypeInt(signed=False, label="UInt32"), "fccIOProc": SimTypeInt(signed=False, label="UInt32"), "pIOProc": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["lpmmioinfo", "uMsg", "lParam1", "lParam2"]), offset=0), "wErrorRet": SimTypeInt(signed=False, label="UInt32"), "htask": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "cchBuffer": SimTypeInt(signed=True, label="Int32"), "pchBuffer": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchNext": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchEndRead": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchEndWrite": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "lBufOffset": SimTypeInt(signed=True, label="Int32"), "lDiskOffset": SimTypeInt(signed=True, label="Int32"), "adwInfo": SimTypeFixedSizeArray(SimTypeInt(signed=False, label="UInt32"), 3), "dwReserved1": SimTypeInt(signed=False, label="UInt32"), "dwReserved2": SimTypeInt(signed=False, label="UInt32"), "hmmio": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="MMIOINFO", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["pszFileName", "pszNewFileName", "pmmioinfo", "fdwRename"]),
#
'mmioClose': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmmio", "fuClose"]),
#
'mmioRead': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="SByte"), offset=0), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=True, label="Int32"), arg_names=["hmmio", "pch", "cch"]),
#
'mmioWrite': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=True, label="Int32"), arg_names=["hmmio", "pch", "cch"]),
#
'mmioSeek': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=True, label="Int32"), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=True, label="Int32"), arg_names=["hmmio", "lOffset", "iOrigin"]),
#
'mmioGetInfo': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"dwFlags": SimTypeInt(signed=False, label="UInt32"), "fccIOProc": SimTypeInt(signed=False, label="UInt32"), "pIOProc": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["lpmmioinfo", "uMsg", "lParam1", "lParam2"]), offset=0), "wErrorRet": SimTypeInt(signed=False, label="UInt32"), "htask": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "cchBuffer": SimTypeInt(signed=True, label="Int32"), "pchBuffer": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchNext": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchEndRead": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchEndWrite": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "lBufOffset": SimTypeInt(signed=True, label="Int32"), "lDiskOffset": SimTypeInt(signed=True, label="Int32"), "adwInfo": SimTypeFixedSizeArray(SimTypeInt(signed=False, label="UInt32"), 3), "dwReserved1": SimTypeInt(signed=False, label="UInt32"), "dwReserved2": SimTypeInt(signed=False, label="UInt32"), "hmmio": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="MMIOINFO", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmmio", "pmmioinfo", "fuInfo"]),
#
'mmioSetInfo': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"dwFlags": SimTypeInt(signed=False, label="UInt32"), "fccIOProc": SimTypeInt(signed=False, label="UInt32"), "pIOProc": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["lpmmioinfo", "uMsg", "lParam1", "lParam2"]), offset=0), "wErrorRet": SimTypeInt(signed=False, label="UInt32"), "htask": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "cchBuffer": SimTypeInt(signed=True, label="Int32"), "pchBuffer": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchNext": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchEndRead": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchEndWrite": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "lBufOffset": SimTypeInt(signed=True, label="Int32"), "lDiskOffset": SimTypeInt(signed=True, label="Int32"), "adwInfo": SimTypeFixedSizeArray(SimTypeInt(signed=False, label="UInt32"), 3), "dwReserved1": SimTypeInt(signed=False, label="UInt32"), "dwReserved2": SimTypeInt(signed=False, label="UInt32"), "hmmio": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="MMIOINFO", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmmio", "pmmioinfo", "fuInfo"]),
#
'mmioSetBuffer': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypeInt(signed=True, label="Int32"), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmmio", "pchBuffer", "cchBuffer", "fuBuffer"]),
#
'mmioFlush': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmmio", "fuFlush"]),
#
'mmioAdvance': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"dwFlags": SimTypeInt(signed=False, label="UInt32"), "fccIOProc": SimTypeInt(signed=False, label="UInt32"), "pIOProc": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["lpmmioinfo", "uMsg", "lParam1", "lParam2"]), offset=0), "wErrorRet": SimTypeInt(signed=False, label="UInt32"), "htask": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "cchBuffer": SimTypeInt(signed=True, label="Int32"), "pchBuffer": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchNext": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchEndRead": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "pchEndWrite": SimTypePointer(SimTypeChar(label="SByte"), offset=0), "lBufOffset": SimTypeInt(signed=True, label="Int32"), "lDiskOffset": SimTypeInt(signed=True, label="Int32"), "adwInfo": SimTypeFixedSizeArray(SimTypeInt(signed=False, label="UInt32"), 3), "dwReserved1": SimTypeInt(signed=False, label="UInt32"), "dwReserved2": SimTypeInt(signed=False, label="UInt32"), "hmmio": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)}, name="MMIOINFO", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmmio", "pmmioinfo", "fuAdvance"]),
#
'mmioSendMessage': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), arg_names=["hmmio", "uMsg", "lParam1", "lParam2"]),
#
'mmioDescend': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"ckid": SimTypeInt(signed=False, label="UInt32"), "cksize": SimTypeInt(signed=False, label="UInt32"), "fccType": SimTypeInt(signed=False, label="UInt32"), "dwDataOffset": SimTypeInt(signed=False, label="UInt32"), "dwFlags": SimTypeInt(signed=False, label="UInt32")}, name="MMCKINFO", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"ckid": SimTypeInt(signed=False, label="UInt32"), "cksize": SimTypeInt(signed=False, label="UInt32"), "fccType": SimTypeInt(signed=False, label="UInt32"), "dwDataOffset": SimTypeInt(signed=False, label="UInt32"), "dwFlags": SimTypeInt(signed=False, label="UInt32")}, name="MMCKINFO", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmmio", "pmmcki", "pmmckiParent", "fuDescend"]),
#
'mmioAscend': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"ckid": SimTypeInt(signed=False, label="UInt32"), "cksize": SimTypeInt(signed=False, label="UInt32"), "fccType": SimTypeInt(signed=False, label="UInt32"), "dwDataOffset": SimTypeInt(signed=False, label="UInt32"), "dwFlags": SimTypeInt(signed=False, label="UInt32")}, name="MMCKINFO", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmmio", "pmmcki", "fuAscend"]),
#
'mmioCreateChunk': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"ckid": SimTypeInt(signed=False, label="UInt32"), "cksize": SimTypeInt(signed=False, label="UInt32"), "fccType": SimTypeInt(signed=False, label="UInt32"), "dwDataOffset": SimTypeInt(signed=False, label="UInt32"), "dwFlags": SimTypeInt(signed=False, label="UInt32")}, name="MMCKINFO", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmmio", "pmmcki", "fuCreate"]),
#
'sndPlaySoundA': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pszSound", "fuSound"]),
#
'sndPlaySoundW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pszSound", "fuSound"]),
#
'PlaySoundA': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pszSound", "hmod", "fdwSound"]),
#
'PlaySoundW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pszSound", "hmod", "fdwSound"]),
#
'waveOutGetNumDevs': SimTypeFunction([], SimTypeInt(signed=False, label="UInt32")),
#
'waveOutGetDevCapsA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimStruct({"wMid": SimTypeShort(signed=False, label="UInt16"), "wPid": SimTypeShort(signed=False, label="UInt16"), "vDriverVersion": SimTypeInt(signed=False, label="UInt32"), "szPname": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 32), "dwFormats": SimTypeInt(signed=False, label="UInt32"), "wChannels": SimTypeShort(signed=False, label="UInt16"), "wReserved1": SimTypeShort(signed=False, label="UInt16"), "dwSupport": SimTypeInt(signed=False, label="UInt32")}, name="WAVEOUTCAPSA", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uDeviceID", "pwoc", "cbwoc"]),
#
'waveOutGetDevCapsW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimStruct({"wMid": SimTypeShort(signed=False, label="UInt16"), "wPid": SimTypeShort(signed=False, label="UInt16"), "vDriverVersion": SimTypeInt(signed=False, label="UInt32"), "szPname": SimTypeFixedSizeArray(SimTypeChar(label="Char"), 32), "dwFormats": SimTypeInt(signed=False, label="UInt32"), "wChannels": SimTypeShort(signed=False, label="UInt16"), "wReserved1": SimTypeShort(signed=False, label="UInt16"), "dwSupport": SimTypeInt(signed=False, label="UInt32")}, name="WAVEOUTCAPSW", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uDeviceID", "pwoc", "cbwoc"]),
#
'waveOutGetVolume': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwo", "pdwVolume"]),
#
'waveOutSetVolume': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwo", "dwVolume"]),
#
'waveOutGetErrorTextA': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["mmrError", "pszText", "cchText"]),
#
'waveOutGetErrorTextW': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["mmrError", "pszText", "cchText"]),
#
'waveOutOpen': SimTypeFunction([SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"wFormatTag": SimTypeShort(signed=False, label="UInt16"), "nChannels": SimTypeShort(signed=False, label="UInt16"), "nSamplesPerSec": SimTypeInt(signed=False, label="UInt32"), "nAvgBytesPerSec": SimTypeInt(signed=False, label="UInt32"), "nBlockAlign": SimTypeShort(signed=False, label="UInt16"), "wBitsPerSample": SimTypeShort(signed=False, label="UInt16"), "cbSize": SimTypeShort(signed=False, label="UInt16")}, name="WAVEFORMATEX", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="MIDI_WAVE_OPEN_TYPE")], SimTypeInt(signed=False, label="UInt32"), arg_names=["phwo", "uDeviceID", "pwfx", "dwCallback", "dwInstance", "fdwOpen"]),
#
'waveOutClose': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwo"]),
#
'waveOutPrepareHeader': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"lpData": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "dwBufferLength": SimTypeInt(signed=False, label="UInt32"), "dwBytesRecorded": SimTypeInt(signed=False, label="UInt32"), "dwUser": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwFlags": SimTypeInt(signed=False, label="UInt32"), "dwLoops": SimTypeInt(signed=False, label="UInt32"), "lpNext": SimTypePointer(SimTypeBottom(label="WAVEHDR"), offset=0), "reserved": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)}, name="WAVEHDR", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwo", "pwh", "cbwh"]),
#
'waveOutUnprepareHeader': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"lpData": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "dwBufferLength": SimTypeInt(signed=False, label="UInt32"), "dwBytesRecorded": SimTypeInt(signed=False, label="UInt32"), "dwUser": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwFlags": SimTypeInt(signed=False, label="UInt32"), "dwLoops": SimTypeInt(signed=False, label="UInt32"), "lpNext": SimTypePointer(SimTypeBottom(label="WAVEHDR"), offset=0), "reserved": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)}, name="WAVEHDR", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwo", "pwh", "cbwh"]),
#
'waveOutWrite': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"lpData": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "dwBufferLength": SimTypeInt(signed=False, label="UInt32"), "dwBytesRecorded": SimTypeInt(signed=False, label="UInt32"), "dwUser": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwFlags": SimTypeInt(signed=False, label="UInt32"), "dwLoops": SimTypeInt(signed=False, label="UInt32"), "lpNext": SimTypePointer(SimTypeBottom(label="WAVEHDR"), offset=0), "reserved": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)}, name="WAVEHDR", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwo", "pwh", "cbwh"]),
#
'waveOutPause': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwo"]),
#
'waveOutRestart': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwo"]),
#
'waveOutReset': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwo"]),
#
'waveOutBreakLoop': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwo"]),
#
'waveOutGetPosition': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"wType": SimTypeInt(signed=False, label="UInt32"), "u": SimUnion({"ms": SimTypeInt(signed=False, label="UInt32"), "sample": SimTypeInt(signed=False, label="UInt32"), "cb": SimTypeInt(signed=False, label="UInt32"), "ticks": SimTypeInt(signed=False, label="UInt32"), "smpte": SimStruct({"hour": SimTypeChar(label="Byte"), "min": SimTypeChar(label="Byte"), "sec": SimTypeChar(label="Byte"), "frame": SimTypeChar(label="Byte"), "fps": SimTypeChar(label="Byte"), "dummy": SimTypeChar(label="Byte"), "pad": SimTypeFixedSizeArray(SimTypeChar(label="Byte"), 2)}, name="_smpte_e__Struct", pack=False, align=None), "midi": SimStruct({"songptrpos": SimTypeInt(signed=False, label="UInt32")}, name="_midi_e__Struct", pack=False, align=None)}, name="<anon>", label="None")}, name="MMTIME", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwo", "pmmt", "cbmmt"]),
#
'waveOutGetPitch': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwo", "pdwPitch"]),
#
'waveOutSetPitch': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwo", "dwPitch"]),
#
'waveOutGetPlaybackRate': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwo", "pdwRate"]),
#
'waveOutSetPlaybackRate': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwo", "dwRate"]),
#
'waveOutGetID': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwo", "puDeviceID"]),
#
'waveOutMessage': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwo", "uMsg", "dw1", "dw2"]),
#
'waveInGetNumDevs': SimTypeFunction([], SimTypeInt(signed=False, label="UInt32")),
#
'waveInGetDevCapsA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimStruct({"wMid": SimTypeShort(signed=False, label="UInt16"), "wPid": SimTypeShort(signed=False, label="UInt16"), "vDriverVersion": SimTypeInt(signed=False, label="UInt32"), "szPname": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 32), "dwFormats": SimTypeInt(signed=False, label="UInt32"), "wChannels": SimTypeShort(signed=False, label="UInt16"), "wReserved1": SimTypeShort(signed=False, label="UInt16")}, name="WAVEINCAPSA", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uDeviceID", "pwic", "cbwic"]),
#
'waveInGetDevCapsW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimStruct({"wMid": SimTypeShort(signed=False, label="UInt16"), "wPid": SimTypeShort(signed=False, label="UInt16"), "vDriverVersion": SimTypeInt(signed=False, label="UInt32"), "szPname": SimTypeFixedSizeArray(SimTypeChar(label="Char"), 32), "dwFormats": SimTypeInt(signed=False, label="UInt32"), "wChannels": SimTypeShort(signed=False, label="UInt16"), "wReserved1": SimTypeShort(signed=False, label="UInt16")}, name="WAVEINCAPSW", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uDeviceID", "pwic", "cbwic"]),
#
'waveInGetErrorTextA': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["mmrError", "pszText", "cchText"]),
#
'waveInGetErrorTextW': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["mmrError", "pszText", "cchText"]),
#
'waveInOpen': SimTypeFunction([SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"wFormatTag": SimTypeShort(signed=False, label="UInt16"), "nChannels": SimTypeShort(signed=False, label="UInt16"), "nSamplesPerSec": SimTypeInt(signed=False, label="UInt32"), "nAvgBytesPerSec": SimTypeInt(signed=False, label="UInt32"), "nBlockAlign": SimTypeShort(signed=False, label="UInt16"), "wBitsPerSample": SimTypeShort(signed=False, label="UInt16"), "cbSize": SimTypeShort(signed=False, label="UInt16")}, name="WAVEFORMATEX", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="MIDI_WAVE_OPEN_TYPE")], SimTypeInt(signed=False, label="UInt32"), arg_names=["phwi", "uDeviceID", "pwfx", "dwCallback", "dwInstance", "fdwOpen"]),
#
'waveInClose': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwi"]),
#
'waveInPrepareHeader': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"lpData": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "dwBufferLength": SimTypeInt(signed=False, label="UInt32"), "dwBytesRecorded": SimTypeInt(signed=False, label="UInt32"), "dwUser": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwFlags": SimTypeInt(signed=False, label="UInt32"), "dwLoops": SimTypeInt(signed=False, label="UInt32"), "lpNext": SimTypePointer(SimTypeBottom(label="WAVEHDR"), offset=0), "reserved": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)}, name="WAVEHDR", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwi", "pwh", "cbwh"]),
#
'waveInUnprepareHeader': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"lpData": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "dwBufferLength": SimTypeInt(signed=False, label="UInt32"), "dwBytesRecorded": SimTypeInt(signed=False, label="UInt32"), "dwUser": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwFlags": SimTypeInt(signed=False, label="UInt32"), "dwLoops": SimTypeInt(signed=False, label="UInt32"), "lpNext": SimTypePointer(SimTypeBottom(label="WAVEHDR"), offset=0), "reserved": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)}, name="WAVEHDR", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwi", "pwh", "cbwh"]),
#
'waveInAddBuffer': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"lpData": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "dwBufferLength": SimTypeInt(signed=False, label="UInt32"), "dwBytesRecorded": SimTypeInt(signed=False, label="UInt32"), "dwUser": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwFlags": SimTypeInt(signed=False, label="UInt32"), "dwLoops": SimTypeInt(signed=False, label="UInt32"), "lpNext": SimTypePointer(SimTypeBottom(label="WAVEHDR"), offset=0), "reserved": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)}, name="WAVEHDR", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwi", "pwh", "cbwh"]),
#
'waveInStart': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwi"]),
#
'waveInStop': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwi"]),
#
'waveInReset': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwi"]),
#
'waveInGetPosition': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"wType": SimTypeInt(signed=False, label="UInt32"), "u": SimUnion({"ms": SimTypeInt(signed=False, label="UInt32"), "sample": SimTypeInt(signed=False, label="UInt32"), "cb": SimTypeInt(signed=False, label="UInt32"), "ticks": SimTypeInt(signed=False, label="UInt32"), "smpte": SimStruct({"hour": SimTypeChar(label="Byte"), "min": SimTypeChar(label="Byte"), "sec": SimTypeChar(label="Byte"), "frame": SimTypeChar(label="Byte"), "fps": SimTypeChar(label="Byte"), "dummy": SimTypeChar(label="Byte"), "pad": SimTypeFixedSizeArray(SimTypeChar(label="Byte"), 2)}, name="_smpte_e__Struct", pack=False, align=None), "midi": SimStruct({"songptrpos": SimTypeInt(signed=False, label="UInt32")}, name="_midi_e__Struct", pack=False, align=None)}, name="<anon>", label="None")}, name="MMTIME", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwi", "pmmt", "cbmmt"]),
#
'waveInGetID': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwi", "puDeviceID"]),
#
'waveInMessage': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwi", "uMsg", "dw1", "dw2"]),
#
'midiOutGetNumDevs': SimTypeFunction([], SimTypeInt(signed=False, label="UInt32")),
#
'midiStreamOpen': SimTypeFunction([SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["phms", "puDeviceID", "cMidi", "dwCallback", "dwInstance", "fdwOpen"]),
#
'midiStreamClose': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hms"]),
#
'midiStreamProperty': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hms", "lppropdata", "dwProperty"]),
#
'midiStreamPosition': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"wType": SimTypeInt(signed=False, label="UInt32"), "u": SimUnion({"ms": SimTypeInt(signed=False, label="UInt32"), "sample": SimTypeInt(signed=False, label="UInt32"), "cb": SimTypeInt(signed=False, label="UInt32"), "ticks": SimTypeInt(signed=False, label="UInt32"), "smpte": SimStruct({"hour": SimTypeChar(label="Byte"), "min": SimTypeChar(label="Byte"), "sec": SimTypeChar(label="Byte"), "frame": SimTypeChar(label="Byte"), "fps": SimTypeChar(label="Byte"), "dummy": SimTypeChar(label="Byte"), "pad": SimTypeFixedSizeArray(SimTypeChar(label="Byte"), 2)}, name="_smpte_e__Struct", pack=False, align=None), "midi": SimStruct({"songptrpos": SimTypeInt(signed=False, label="UInt32")}, name="_midi_e__Struct", pack=False, align=None)}, name="<anon>", label="None")}, name="MMTIME", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hms", "lpmmt", "cbmmt"]),
#
'midiStreamOut': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"lpData": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "dwBufferLength": SimTypeInt(signed=False, label="UInt32"), "dwBytesRecorded": SimTypeInt(signed=False, label="UInt32"), "dwUser": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwFlags": SimTypeInt(signed=False, label="UInt32"), "lpNext": SimTypePointer(SimTypeBottom(label="MIDIHDR"), offset=0), "reserved": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwOffset": SimTypeInt(signed=False, label="UInt32"), "dwReserved": SimTypeFixedSizeArray(SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), 8)}, name="MIDIHDR", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hms", "pmh", "cbmh"]),
#
'midiStreamPause': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hms"]),
#
'midiStreamRestart': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hms"]),
#
'midiStreamStop': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hms"]),
#
'midiConnect': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmi", "hmo", "pReserved"]),
#
'midiDisconnect': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmi", "hmo", "pReserved"]),
#
'midiOutGetDevCapsA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimStruct({"wMid": SimTypeShort(signed=False, label="UInt16"), "wPid": SimTypeShort(signed=False, label="UInt16"), "vDriverVersion": SimTypeInt(signed=False, label="UInt32"), "szPname": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 32), "wTechnology": SimTypeShort(signed=False, label="UInt16"), "wVoices": SimTypeShort(signed=False, label="UInt16"), "wNotes": SimTypeShort(signed=False, label="UInt16"), "wChannelMask": SimTypeShort(signed=False, label="UInt16"), "dwSupport": SimTypeInt(signed=False, label="UInt32")}, name="MIDIOUTCAPSA", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uDeviceID", "pmoc", "cbmoc"]),
#
'midiOutGetDevCapsW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimStruct({"wMid": SimTypeShort(signed=False, label="UInt16"), "wPid": SimTypeShort(signed=False, label="UInt16"), "vDriverVersion": SimTypeInt(signed=False, label="UInt32"), "szPname": SimTypeFixedSizeArray(SimTypeChar(label="Char"), 32), "wTechnology": SimTypeShort(signed=False, label="UInt16"), "wVoices": SimTypeShort(signed=False, label="UInt16"), "wNotes": SimTypeShort(signed=False, label="UInt16"), "wChannelMask": SimTypeShort(signed=False, label="UInt16"), "dwSupport": SimTypeInt(signed=False, label="UInt32")}, name="MIDIOUTCAPSW", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uDeviceID", "pmoc", "cbmoc"]),
#
'midiOutGetVolume': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmo", "pdwVolume"]),
#
'midiOutSetVolume': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmo", "dwVolume"]),
#
'midiOutGetErrorTextA': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["mmrError", "pszText", "cchText"]),
#
'midiOutGetErrorTextW': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["mmrError", "pszText", "cchText"]),
#
'midiOutOpen': SimTypeFunction([SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="MIDI_WAVE_OPEN_TYPE")], SimTypeInt(signed=False, label="UInt32"), arg_names=["phmo", "uDeviceID", "dwCallback", "dwInstance", "fdwOpen"]),
#
'midiOutClose': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmo"]),
#
'midiOutPrepareHeader': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"lpData": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "dwBufferLength": SimTypeInt(signed=False, label="UInt32"), "dwBytesRecorded": SimTypeInt(signed=False, label="UInt32"), "dwUser": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwFlags": SimTypeInt(signed=False, label="UInt32"), "lpNext": SimTypePointer(SimTypeBottom(label="MIDIHDR"), offset=0), "reserved": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwOffset": SimTypeInt(signed=False, label="UInt32"), "dwReserved": SimTypeFixedSizeArray(SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), 8)}, name="MIDIHDR", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmo", "pmh", "cbmh"]),
#
'midiOutUnprepareHeader': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"lpData": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "dwBufferLength": SimTypeInt(signed=False, label="UInt32"), "dwBytesRecorded": SimTypeInt(signed=False, label="UInt32"), "dwUser": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwFlags": SimTypeInt(signed=False, label="UInt32"), "lpNext": SimTypePointer(SimTypeBottom(label="MIDIHDR"), offset=0), "reserved": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwOffset": SimTypeInt(signed=False, label="UInt32"), "dwReserved": SimTypeFixedSizeArray(SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), 8)}, name="MIDIHDR", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmo", "pmh", "cbmh"]),
#
'midiOutShortMsg': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmo", "dwMsg"]),
#
'midiOutLongMsg': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"lpData": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "dwBufferLength": SimTypeInt(signed=False, label="UInt32"), "dwBytesRecorded": SimTypeInt(signed=False, label="UInt32"), "dwUser": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwFlags": SimTypeInt(signed=False, label="UInt32"), "lpNext": SimTypePointer(SimTypeBottom(label="MIDIHDR"), offset=0), "reserved": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwOffset": SimTypeInt(signed=False, label="UInt32"), "dwReserved": SimTypeFixedSizeArray(SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), 8)}, name="MIDIHDR", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmo", "pmh", "cbmh"]),
#
'midiOutReset': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmo"]),
#
'midiOutCachePatches': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeShort(signed=False, label="UInt16"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmo", "uBank", "pwpa", "fuCache"]),
#
'midiOutCacheDrumPatches': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeShort(signed=False, label="UInt16"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmo", "uPatch", "pwkya", "fuCache"]),
#
'midiOutGetID': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmo", "puDeviceID"]),
#
'midiOutMessage': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmo", "uMsg", "dw1", "dw2"]),
#
'midiInGetNumDevs': SimTypeFunction([], SimTypeInt(signed=False, label="UInt32")),
#
'midiInGetDevCapsA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimStruct({"wMid": SimTypeShort(signed=False, label="UInt16"), "wPid": SimTypeShort(signed=False, label="UInt16"), "vDriverVersion": SimTypeInt(signed=False, label="UInt32"), "szPname": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 32), "dwSupport": SimTypeInt(signed=False, label="UInt32")}, name="MIDIINCAPSA", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uDeviceID", "pmic", "cbmic"]),
#
'midiInGetDevCapsW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimStruct({"wMid": SimTypeShort(signed=False, label="UInt16"), "wPid": SimTypeShort(signed=False, label="UInt16"), "vDriverVersion": SimTypeInt(signed=False, label="UInt32"), "szPname": SimTypeFixedSizeArray(SimTypeChar(label="Char"), 32), "dwSupport": SimTypeInt(signed=False, label="UInt32")}, name="MIDIINCAPSW", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uDeviceID", "pmic", "cbmic"]),
#
'midiInGetErrorTextA': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["mmrError", "pszText", "cchText"]),
#
'midiInGetErrorTextW': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["mmrError", "pszText", "cchText"]),
#
'midiInOpen': SimTypeFunction([SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="MIDI_WAVE_OPEN_TYPE")], SimTypeInt(signed=False, label="UInt32"), arg_names=["phmi", "uDeviceID", "dwCallback", "dwInstance", "fdwOpen"]),
#
'midiInClose': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmi"]),
#
'midiInPrepareHeader': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"lpData": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "dwBufferLength": SimTypeInt(signed=False, label="UInt32"), "dwBytesRecorded": SimTypeInt(signed=False, label="UInt32"), "dwUser": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwFlags": SimTypeInt(signed=False, label="UInt32"), "lpNext": SimTypePointer(SimTypeBottom(label="MIDIHDR"), offset=0), "reserved": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwOffset": SimTypeInt(signed=False, label="UInt32"), "dwReserved": SimTypeFixedSizeArray(SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), 8)}, name="MIDIHDR", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmi", "pmh", "cbmh"]),
#
'midiInUnprepareHeader': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"lpData": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "dwBufferLength": SimTypeInt(signed=False, label="UInt32"), "dwBytesRecorded": SimTypeInt(signed=False, label="UInt32"), "dwUser": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwFlags": SimTypeInt(signed=False, label="UInt32"), "lpNext": SimTypePointer(SimTypeBottom(label="MIDIHDR"), offset=0), "reserved": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwOffset": SimTypeInt(signed=False, label="UInt32"), "dwReserved": SimTypeFixedSizeArray(SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), 8)}, name="MIDIHDR", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmi", "pmh", "cbmh"]),
#
'midiInAddBuffer': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"lpData": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "dwBufferLength": SimTypeInt(signed=False, label="UInt32"), "dwBytesRecorded": SimTypeInt(signed=False, label="UInt32"), "dwUser": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwFlags": SimTypeInt(signed=False, label="UInt32"), "lpNext": SimTypePointer(SimTypeBottom(label="MIDIHDR"), offset=0), "reserved": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwOffset": SimTypeInt(signed=False, label="UInt32"), "dwReserved": SimTypeFixedSizeArray(SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), 8)}, name="MIDIHDR", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmi", "pmh", "cbmh"]),
#
'midiInStart': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmi"]),
#
'midiInStop': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmi"]),
#
'midiInReset': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmi"]),
#
'midiInGetID': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmi", "puDeviceID"]),
#
'midiInMessage': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmi", "uMsg", "dw1", "dw2"]),
#
'auxGetNumDevs': SimTypeFunction([], SimTypeInt(signed=False, label="UInt32")),
#
'auxGetDevCapsA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimStruct({"wMid": SimTypeShort(signed=False, label="UInt16"), "wPid": SimTypeShort(signed=False, label="UInt16"), "vDriverVersion": SimTypeInt(signed=False, label="UInt32"), "szPname": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 32), "wTechnology": SimTypeShort(signed=False, label="UInt16"), "wReserved1": SimTypeShort(signed=False, label="UInt16"), "dwSupport": SimTypeInt(signed=False, label="UInt32")}, name="AUXCAPSA", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uDeviceID", "pac", "cbac"]),
#
'auxGetDevCapsW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimStruct({"wMid": SimTypeShort(signed=False, label="UInt16"), "wPid": SimTypeShort(signed=False, label="UInt16"), "vDriverVersion": SimTypeInt(signed=False, label="UInt32"), "szPname": SimTypeFixedSizeArray(SimTypeChar(label="Char"), 32), "wTechnology": SimTypeShort(signed=False, label="UInt16"), "wReserved1": SimTypeShort(signed=False, label="UInt16"), "dwSupport": SimTypeInt(signed=False, label="UInt32")}, name="AUXCAPSW", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uDeviceID", "pac", "cbac"]),
#
'auxSetVolume': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uDeviceID", "dwVolume"]),
#
'auxGetVolume': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["uDeviceID", "pdwVolume"]),
#
'auxOutMessage': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["uDeviceID", "uMsg", "dw1", "dw2"]),
#
'mixerGetNumDevs': SimTypeFunction([], SimTypeInt(signed=False, label="UInt32")),
#
'mixerGetDevCapsA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimStruct({"wMid": SimTypeShort(signed=False, label="UInt16"), "wPid": SimTypeShort(signed=False, label="UInt16"), "vDriverVersion": SimTypeInt(signed=False, label="UInt32"), "szPname": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 32), "fdwSupport": SimTypeInt(signed=False, label="UInt32"), "cDestinations": SimTypeInt(signed=False, label="UInt32")}, name="MIXERCAPSA", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uMxId", "pmxcaps", "cbmxcaps"]),
#
'mixerGetDevCapsW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimStruct({"wMid": SimTypeShort(signed=False, label="UInt16"), "wPid": SimTypeShort(signed=False, label="UInt16"), "vDriverVersion": SimTypeInt(signed=False, label="UInt32"), "szPname": SimTypeFixedSizeArray(SimTypeChar(label="Char"), 32), "fdwSupport": SimTypeInt(signed=False, label="UInt32"), "cDestinations": SimTypeInt(signed=False, label="UInt32")}, name="MIXERCAPSW", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uMxId", "pmxcaps", "cbmxcaps"]),
#
'mixerOpen': SimTypeFunction([SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["phmx", "uMxId", "dwCallback", "dwInstance", "fdwOpen"]),
#
'mixerClose': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmx"]),
#
'mixerMessage': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmx", "uMsg", "dwParam1", "dwParam2"]),
#
'mixerGetLineInfoA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"cbStruct": SimTypeInt(signed=False, label="UInt32"), "dwDestination": SimTypeInt(signed=False, label="UInt32"), "dwSource": SimTypeInt(signed=False, label="UInt32"), "dwLineID": SimTypeInt(signed=False, label="UInt32"), "fdwLine": SimTypeInt(signed=False, label="UInt32"), "dwUser": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwComponentType": SimTypeInt(signed=False, label="MIXERLINE_COMPONENTTYPE"), "cChannels": SimTypeInt(signed=False, label="UInt32"), "cConnections": SimTypeInt(signed=False, label="UInt32"), "cControls": SimTypeInt(signed=False, label="UInt32"), "szShortName": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 16), "szName": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 64), "Target": SimStruct({"dwType": SimTypeInt(signed=False, label="UInt32"), "dwDeviceID": SimTypeInt(signed=False, label="UInt32"), "wMid": SimTypeShort(signed=False, label="UInt16"), "wPid": SimTypeShort(signed=False, label="UInt16"), "vDriverVersion": SimTypeInt(signed=False, label="UInt32"), "szPname": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 32)}, name="_Target_e__Struct", pack=False, align=None)}, name="MIXERLINEA", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmxobj", "pmxl", "fdwInfo"]),
#
'mixerGetLineInfoW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"cbStruct": SimTypeInt(signed=False, label="UInt32"), "dwDestination": SimTypeInt(signed=False, label="UInt32"), "dwSource": SimTypeInt(signed=False, label="UInt32"), "dwLineID": SimTypeInt(signed=False, label="UInt32"), "fdwLine": SimTypeInt(signed=False, label="UInt32"), "dwUser": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "dwComponentType": SimTypeInt(signed=False, label="MIXERLINE_COMPONENTTYPE"), "cChannels": SimTypeInt(signed=False, label="UInt32"), "cConnections": SimTypeInt(signed=False, label="UInt32"), "cControls": SimTypeInt(signed=False, label="UInt32"), "szShortName": SimTypeFixedSizeArray(SimTypeChar(label="Char"), 16), "szName": SimTypeFixedSizeArray(SimTypeChar(label="Char"), 64), "Target": SimStruct({"dwType": SimTypeInt(signed=False, label="UInt32"), "dwDeviceID": SimTypeInt(signed=False, label="UInt32"), "wMid": SimTypeShort(signed=False, label="UInt16"), "wPid": SimTypeShort(signed=False, label="UInt16"), "vDriverVersion": SimTypeInt(signed=False, label="UInt32"), "szPname": SimTypeFixedSizeArray(SimTypeChar(label="Char"), 32)}, name="_Target_e__Struct", pack=False, align=None)}, name="MIXERLINEW", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmxobj", "pmxl", "fdwInfo"]),
#
'mixerGetID': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmxobj", "puMxId", "fdwId"]),
#
'mixerGetLineControlsA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"cbStruct": SimTypeInt(signed=False, label="UInt32"), "dwLineID": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"dwControlID": SimTypeInt(signed=False, label="UInt32"), "dwControlType": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None"), "cControls": SimTypeInt(signed=False, label="UInt32"), "cbmxctrl": SimTypeInt(signed=False, label="UInt32"), "pamxctrl": SimTypePointer(SimStruct({"cbStruct": SimTypeInt(signed=False, label="UInt32"), "dwControlID": SimTypeInt(signed=False, label="UInt32"), "dwControlType": SimTypeInt(signed=False, label="UInt32"), "fdwControl": SimTypeInt(signed=False, label="UInt32"), "cMultipleItems": SimTypeInt(signed=False, label="UInt32"), "szShortName": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 16), "szName": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 64), "Bounds": SimUnion({"Anonymous1": SimStruct({"lMinimum": SimTypeInt(signed=True, label="Int32"), "lMaximum": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous1_e__Struct", pack=False, align=None), "Anonymous2": SimStruct({"dwMinimum": SimTypeInt(signed=False, label="UInt32"), "dwMaximum": SimTypeInt(signed=False, label="UInt32")}, name="_Anonymous2_e__Struct", pack=False, align=None), "dwReserved": SimTypeFixedSizeArray(SimTypeInt(signed=False, label="UInt32"), 6)}, name="<anon>", label="None"), "Metrics": SimUnion({"cSteps": SimTypeInt(signed=False, label="UInt32"), "cbCustomData": SimTypeInt(signed=False, label="UInt32"), "dwReserved": SimTypeFixedSizeArray(SimTypeInt(signed=False, label="UInt32"), 6)}, name="<anon>", label="None")}, name="MIXERCONTROLA", pack=False, align=None), offset=0)}, name="MIXERLINECONTROLSA", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmxobj", "pmxlc", "fdwControls"]),
#
'mixerGetLineControlsW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"cbStruct": SimTypeInt(signed=False, label="UInt32"), "dwLineID": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"dwControlID": SimTypeInt(signed=False, label="UInt32"), "dwControlType": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None"), "cControls": SimTypeInt(signed=False, label="UInt32"), "cbmxctrl": SimTypeInt(signed=False, label="UInt32"), "pamxctrl": SimTypePointer(SimStruct({"cbStruct": SimTypeInt(signed=False, label="UInt32"), "dwControlID": SimTypeInt(signed=False, label="UInt32"), "dwControlType": SimTypeInt(signed=False, label="UInt32"), "fdwControl": SimTypeInt(signed=False, label="UInt32"), "cMultipleItems": SimTypeInt(signed=False, label="UInt32"), "szShortName": SimTypeFixedSizeArray(SimTypeChar(label="Char"), 16), "szName": SimTypeFixedSizeArray(SimTypeChar(label="Char"), 64), "Bounds": SimUnion({"Anonymous1": SimStruct({"lMinimum": SimTypeInt(signed=True, label="Int32"), "lMaximum": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous1_e__Struct", pack=False, align=None), "Anonymous2": SimStruct({"dwMinimum": SimTypeInt(signed=False, label="UInt32"), "dwMaximum": SimTypeInt(signed=False, label="UInt32")}, name="_Anonymous2_e__Struct", pack=False, align=None), "dwReserved": SimTypeFixedSizeArray(SimTypeInt(signed=False, label="UInt32"), 6)}, name="<anon>", label="None"), "Metrics": SimUnion({"cSteps": SimTypeInt(signed=False, label="UInt32"), "cbCustomData": SimTypeInt(signed=False, label="UInt32"), "dwReserved": SimTypeFixedSizeArray(SimTypeInt(signed=False, label="UInt32"), 6)}, name="<anon>", label="None")}, name="MIXERCONTROLW", pack=False, align=None), offset=0)}, name="MIXERLINECONTROLSW", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmxobj", "pmxlc", "fdwControls"]),
#
'mixerGetControlDetailsA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"cbStruct": SimTypeInt(signed=False, label="UInt32"), "dwControlID": SimTypeInt(signed=False, label="UInt32"), "cChannels": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"hwndOwner": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "cMultipleItems": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None"), "cbDetails": SimTypeInt(signed=False, label="UInt32"), "paDetails": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="MIXERCONTROLDETAILS", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmxobj", "pmxcd", "fdwDetails"]),
#
'mixerGetControlDetailsW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"cbStruct": SimTypeInt(signed=False, label="UInt32"), "dwControlID": SimTypeInt(signed=False, label="UInt32"), "cChannels": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"hwndOwner": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "cMultipleItems": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None"), "cbDetails": SimTypeInt(signed=False, label="UInt32"), "paDetails": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="MIXERCONTROLDETAILS", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmxobj", "pmxcd", "fdwDetails"]),
#
'mixerSetControlDetails': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"cbStruct": SimTypeInt(signed=False, label="UInt32"), "dwControlID": SimTypeInt(signed=False, label="UInt32"), "cChannels": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"hwndOwner": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "cMultipleItems": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None"), "cbDetails": SimTypeInt(signed=False, label="UInt32"), "paDetails": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="MIXERCONTROLDETAILS", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hmxobj", "pmxcd", "fdwDetails"]),
#
'timeGetSystemTime': SimTypeFunction([SimTypePointer(SimStruct({"wType": SimTypeInt(signed=False, label="UInt32"), "u": SimUnion({"ms": SimTypeInt(signed=False, label="UInt32"), "sample": SimTypeInt(signed=False, label="UInt32"), "cb": SimTypeInt(signed=False, label="UInt32"), "ticks": SimTypeInt(signed=False, label="UInt32"), "smpte": SimStruct({"hour": SimTypeChar(label="Byte"), "min": SimTypeChar(label="Byte"), "sec": SimTypeChar(label="Byte"), "frame": SimTypeChar(label="Byte"), "fps": SimTypeChar(label="Byte"), "dummy": SimTypeChar(label="Byte"), "pad": SimTypeFixedSizeArray(SimTypeChar(label="Byte"), 2)}, name="_smpte_e__Struct", pack=False, align=None), "midi": SimStruct({"songptrpos": SimTypeInt(signed=False, label="UInt32")}, name="_midi_e__Struct", pack=False, align=None)}, name="<anon>", label="None")}, name="MMTIME", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["pmmt", "cbmmt"]),
#
'timeGetTime': SimTypeFunction([], SimTypeInt(signed=False, label="UInt32")),
#
'timeGetDevCaps': SimTypeFunction([SimTypePointer(SimStruct({"wPeriodMin": SimTypeInt(signed=False, label="UInt32"), "wPeriodMax": SimTypeInt(signed=False, label="UInt32")}, name="TIMECAPS", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["ptc", "cbtc"]),
#
'timeBeginPeriod': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uPeriod"]),
#
'timeEndPeriod': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uPeriod"]),
#
'joyGetPosEx': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"dwSize": SimTypeInt(signed=False, label="UInt32"), "dwFlags": SimTypeInt(signed=False, label="UInt32"), "dwXpos": SimTypeInt(signed=False, label="UInt32"), "dwYpos": SimTypeInt(signed=False, label="UInt32"), "dwZpos": SimTypeInt(signed=False, label="UInt32"), "dwRpos": SimTypeInt(signed=False, label="UInt32"), "dwUpos": SimTypeInt(signed=False, label="UInt32"), "dwVpos": SimTypeInt(signed=False, label="UInt32"), "dwButtons": SimTypeInt(signed=False, label="UInt32"), "dwButtonNumber": SimTypeInt(signed=False, label="UInt32"), "dwPOV": SimTypeInt(signed=False, label="UInt32"), "dwReserved1": SimTypeInt(signed=False, label="UInt32"), "dwReserved2": SimTypeInt(signed=False, label="UInt32")}, name="JOYINFOEX", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["uJoyID", "pji"]),
#
'joyGetNumDevs': SimTypeFunction([], SimTypeInt(signed=False, label="UInt32")),
#
'joyGetDevCapsA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimStruct({"wMid": SimTypeShort(signed=False, label="UInt16"), "wPid": SimTypeShort(signed=False, label="UInt16"), "szPname": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 32), "wXmin": SimTypeInt(signed=False, label="UInt32"), "wXmax": SimTypeInt(signed=False, label="UInt32"), "wYmin": SimTypeInt(signed=False, label="UInt32"), "wYmax": SimTypeInt(signed=False, label="UInt32"), "wZmin": SimTypeInt(signed=False, label="UInt32"), "wZmax": SimTypeInt(signed=False, label="UInt32"), "wNumButtons": SimTypeInt(signed=False, label="UInt32"), "wPeriodMin": SimTypeInt(signed=False, label="UInt32"), "wPeriodMax": SimTypeInt(signed=False, label="UInt32"), "wRmin": SimTypeInt(signed=False, label="UInt32"), "wRmax": SimTypeInt(signed=False, label="UInt32"), "wUmin": SimTypeInt(signed=False, label="UInt32"), "wUmax": SimTypeInt(signed=False, label="UInt32"), "wVmin": SimTypeInt(signed=False, label="UInt32"), "wVmax": SimTypeInt(signed=False, label="UInt32"), "wCaps": SimTypeInt(signed=False, label="UInt32"), "wMaxAxes": SimTypeInt(signed=False, label="UInt32"), "wNumAxes": SimTypeInt(signed=False, label="UInt32"), "wMaxButtons": SimTypeInt(signed=False, label="UInt32"), "szRegKey": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 32), "szOEMVxD": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 260)}, name="JOYCAPSA", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uJoyID", "pjc", "cbjc"]),
#
'joyGetDevCapsW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimStruct({"wMid": SimTypeShort(signed=False, label="UInt16"), "wPid": SimTypeShort(signed=False, label="UInt16"), "szPname": SimTypeFixedSizeArray(SimTypeChar(label="Char"), 32), "wXmin": SimTypeInt(signed=False, label="UInt32"), "wXmax": SimTypeInt(signed=False, label="UInt32"), "wYmin": SimTypeInt(signed=False, label="UInt32"), "wYmax": SimTypeInt(signed=False, label="UInt32"), "wZmin": SimTypeInt(signed=False, label="UInt32"), "wZmax": SimTypeInt(signed=False, label="UInt32"), "wNumButtons": SimTypeInt(signed=False, label="UInt32"), "wPeriodMin": SimTypeInt(signed=False, label="UInt32"), "wPeriodMax": SimTypeInt(signed=False, label="UInt32"), "wRmin": SimTypeInt(signed=False, label="UInt32"), "wRmax": SimTypeInt(signed=False, label="UInt32"), "wUmin": SimTypeInt(signed=False, label="UInt32"), "wUmax": SimTypeInt(signed=False, label="UInt32"), "wVmin": SimTypeInt(signed=False, label="UInt32"), "wVmax": SimTypeInt(signed=False, label="UInt32"), "wCaps": SimTypeInt(signed=False, label="UInt32"), "wMaxAxes": SimTypeInt(signed=False, label="UInt32"), "wNumAxes": SimTypeInt(signed=False, label="UInt32"), "wMaxButtons": SimTypeInt(signed=False, label="UInt32"), "szRegKey": SimTypeFixedSizeArray(SimTypeChar(label="Char"), 32), "szOEMVxD": SimTypeFixedSizeArray(SimTypeChar(label="Char"), 260)}, name="JOYCAPSW", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uJoyID", "pjc", "cbjc"]),
#
'joyGetPos': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"wXpos": SimTypeInt(signed=False, label="UInt32"), "wYpos": SimTypeInt(signed=False, label="UInt32"), "wZpos": SimTypeInt(signed=False, label="UInt32"), "wButtons": SimTypeInt(signed=False, label="UInt32")}, name="JOYINFO", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["uJoyID", "pji"]),
#
'joyGetThreshold': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["uJoyID", "puThreshold"]),
#
'joyReleaseCapture': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uJoyID"]),
#
'joySetCapture': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["hwnd", "uJoyID", "uPeriod", "fChanged"]),
#
'joySetThreshold': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uJoyID", "uThreshold"]),
#
'mmTaskCreate': SimTypeFunction([SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)], SimTypeBottom(label="Void"), arg_names=["dwInst"]), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpfn", "lph", "dwInst"]),
#
'mmTaskBlock': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypeBottom(label="Void"), arg_names=["h"]),
#
'mmTaskSignal': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["h"]),
#
'mmTaskYield': SimTypeFunction([], SimTypeBottom(label="Void")),
#
'mmGetCurrentTask': SimTypeFunction([], SimTypeInt(signed=False, label="UInt32")),
#
'timeSetEvent': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0)], SimTypeBottom(label="Void"), arg_names=["uTimerID", "uMsg", "dwUser", "dw1", "dw2"]), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uDelay", "uResolution", "fptc", "dwUser", "fuEvent"]),
#
'timeKillEvent': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="UInt32"), arg_names=["uTimerID"]),
}
lib.set_prototypes(prototypes)
|
py | 1a47f68523cb146393f25f5521027b8ad27cdda2 | """
WSGI config for Betting Website Project project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import sys
import os
from django.core.wsgi import get_wsgi_application
# This allows easy placement of apps within the interior
# betting directory.
app_path = os.path.abspath(os.path.join(
os.path.dirname(os.path.abspath(__file__)), os.pardir))
sys.path.append(os.path.join(app_path, 'betting'))
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
py | 1a47f7328dccb75e71651bf5d3a1cbef9c1c0c74 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Tests for the SKA Dish simulator.
"""
import pkg_resources
import time
import pytest
from unittest import mock
from tango_simlib import tango_sim_generator
from ska_dish_master_mid.dish_master_behaviour import AzEl, OverrideDish, get_enum_str, set_enum
FGO_FILE_PATH = pkg_resources.resource_filename("ska_dish_master_mid", "dish_master.fgo")
JSON_FILE_PATH = pkg_resources.resource_filename("ska_dish_master_mid", "dish_master_SimDD.json")
class TestMpiDshModel:
@pytest.fixture(scope="function")
def provision_setup(self):
model = tango_sim_generator.configure_device_models(
[FGO_FILE_PATH, JSON_FILE_PATH], "test/nodb/mpidish"
)
return model["test/nodb/mpidish"], OverrideDish()
def test_update_desired_pointing_history(self, provision_setup):
"""Check the logic in get_new_pointing_coordinates and that the update gets
applied correctly
"""
# Note: coords are are sets of 3: [timestamp, azim, elev]
device_model, dish_override = provision_setup
now = time.time()
now_millisec = now * 1000.0
dish_override.desired_pointings = [[now_millisec + 10.0, 2.0, 3.0]]
desired_pointing_coordinates = [now_millisec + 40.0, 5.0, 6.0]
program_track_table_coordinates = [
now_millisec + 70.0,
8.0,
9.0,
now_millisec + 100.0,
11.0,
12.0,
]
# desiredPointing is newest, so must be used
dish_override.last_coordinate_update_timestamp = now - 5.0
device_model.sim_quantities["programTrackTable"].set_val(
program_track_table_coordinates, now - 3.0
)
device_model.sim_quantities["desiredPointing"].set_val(
desired_pointing_coordinates, now - 2.0
)
current_pointings = list(dish_override.desired_pointings)
dish_override.update_desired_pointing_history(device_model)
expected_pointings = current_pointings + [desired_pointing_coordinates]
assert dish_override.desired_pointings == expected_pointings
# programTrackTable is newest, so must be used
dish_override.last_coordinate_update_timestamp = now - 5.0
device_model.sim_quantities["desiredPointing"].set_val(
desired_pointing_coordinates, now - 3.0
)
device_model.sim_quantities["programTrackTable"].set_val(
program_track_table_coordinates, now - 2.0
)
current_pointings = list(dish_override.desired_pointings)
dish_override.update_desired_pointing_history(device_model)
expected_pointings = (
current_pointings
+ [program_track_table_coordinates[0:3]]
+ [program_track_table_coordinates[3:6]]
)
assert dish_override.desired_pointings == expected_pointings
# Neither is newest, so no update expected
current_pointings = list(dish_override.desired_pointings)
dish_override.last_coordinate_update_timestamp = now
device_model.sim_quantities["desiredPointing"].set_val(
desired_pointing_coordinates, now - 2.0
)
device_model.sim_quantities["programTrackTable"].set_val(
program_track_table_coordinates, now - 3.0
)
dish_override.update_desired_pointing_history(device_model)
assert dish_override.desired_pointings == current_pointings
device_model.sim_quantities["desiredPointing"].set_val(
desired_pointing_coordinates, now - 3.0
)
device_model.sim_quantities["programTrackTable"].set_val(
program_track_table_coordinates, now - 2.0
)
dish_override.update_desired_pointing_history(device_model)
assert dish_override.desired_pointings == current_pointings
# New updates, but timestamps in the past, so no update expected
desired_pointing_coordinates = [now_millisec - 40.0, 5.0, 6.0]
program_track_table_coordinates = [
now_millisec - 60.0,
8.0,
9.0,
now_millisec - 50.0,
10.0,
11.0,
]
dish_override.last_coordinate_update_timestamp = now - 10
device_model.sim_quantities["desiredPointing"].set_val(desired_pointing_coordinates, now)
device_model.sim_quantities["programTrackTable"].set_val(
program_track_table_coordinates, now - 1.0
)
dish_override.update_desired_pointing_history(device_model)
assert dish_override.desired_pointings == current_pointings
dish_override.last_coordinate_update_timestamp = now - 10
device_model.sim_quantities["desiredPointing"].set_val(
desired_pointing_coordinates, now - 1.0
)
device_model.sim_quantities["programTrackTable"].set_val(
program_track_table_coordinates, now
)
dish_override.update_desired_pointing_history(device_model)
assert dish_override.desired_pointings == current_pointings
def test_pointing_state_reports_track_when_on_target(self, provision_setup):
def _update_pointing_state(device_model, dish_override):
now = time.time()
# ensure dish is in allowed mode before requesting track
# track command will change pointing state to slew
set_enum(device_model.sim_quantities["dishMode"], "OPERATE", now)
dish_override.action_track(device_model)
# update pointing state to TRACK if dish is on target, otherwise report slew
dish_override.update_movement_attributes(device_model, now)
current_pointing_state = get_enum_str(device_model.sim_quantities["pointingState"])
return current_pointing_state
device_model, dish_override = provision_setup
# ensure pointing state reports TRACK for requested and
# actual position default values of AzEl(0, 30)
current_pointing_state = _update_pointing_state(device_model, dish_override)
assert current_pointing_state == "TRACK"
# ensure pointing state reports SLEW when the dish is not on target
dish_override.requested_position = AzEl(azim=10.0, elev=40.0)
current_pointing_state = _update_pointing_state(device_model, dish_override)
assert current_pointing_state == "SLEW"
# move the dish to the desired position and check that pointing state is TRACK
dish_override.actual_position = AzEl(azim=10.0, elev=40.0)
current_pointing_state = _update_pointing_state(device_model, dish_override)
assert current_pointing_state == "TRACK"
def test_achieved_pointing_changes_when_dish_is_stowing(self, provision_setup):
device_model, dish_override = provision_setup
# send the dish closer to the stow position
dish_override.requested_position = AzEl(azim=0.0, elev=82.0)
dish_override.actual_position = AzEl(azim=0.0, elev=82.0)
# record initial az, el before movement
initial_az = device_model.sim_quantities["achievedPointing"].last_val[1]
initial_el = device_model.sim_quantities["achievedPointing"].last_val[2]
# request stow mode and move the dish close to the stow position
dish_override.action_setstowmode(device_model, tango_dev=mock.Mock())
stow_position = dish_override.STOW_ELEV_POSITION
dish_far_from_target = True
last_time = time.time()
timeout = time.time() + 5 # 5 seconds from now
while dish_far_from_target:
start_time = time.time()
dish_override.pre_update(device_model, start_time, start_time - last_time)
last_time = start_time
current_el = device_model.sim_quantities["achievedPointing"].last_val[2]
dish_far_from_target = not (stow_position - current_el == pytest.approx(1, abs=1))
time.sleep(1)
if timeout < start_time:
raise Exception("Timeout occurred")
current_az = device_model.sim_quantities["achievedPointing"].last_val[1]
current_el = device_model.sim_quantities["achievedPointing"].last_val[2]
assert current_el != initial_el, "The stow command did not move the dish at all"
assert (
current_az == initial_az
), "The dish should only move in elevation to stow, azimuth movement detected"
assert stow_position - current_el == pytest.approx(
1, abs=1
), "Dish did not arrive at stow position"
|
py | 1a47f74f24af7dd65f3d8fdd6ca7a06f5b04b008 | def findDecision(obj): #obj[0]: Passanger, obj[1]: Weather, obj[2]: Time, obj[3]: Coupon, obj[4]: Coupon_validity, obj[5]: Gender, obj[6]: Age, obj[7]: Maritalstatus, obj[8]: Children, obj[9]: Education, obj[10]: Occupation, obj[11]: Income, obj[12]: Bar, obj[13]: Coffeehouse, obj[14]: Restaurant20to50, obj[15]: Direction_same, obj[16]: Distance
# {"feature": "Age", "instances": 23, "metric_value": 0.9986, "depth": 1}
if obj[6]>1:
# {"feature": "Children", "instances": 13, "metric_value": 0.8905, "depth": 2}
if obj[8]>0:
# {"feature": "Income", "instances": 7, "metric_value": 0.9852, "depth": 3}
if obj[11]<=3:
# {"feature": "Passanger", "instances": 4, "metric_value": 0.8113, "depth": 4}
if obj[0]>1:
return 'True'
elif obj[0]<=1:
return 'False'
else: return 'False'
elif obj[11]>3:
return 'False'
else: return 'False'
elif obj[8]<=0:
return 'True'
else: return 'True'
elif obj[6]<=1:
# {"feature": "Distance", "instances": 10, "metric_value": 0.8813, "depth": 2}
if obj[16]>1:
# {"feature": "Education", "instances": 6, "metric_value": 1.0, "depth": 3}
if obj[9]>1:
# {"feature": "Weather", "instances": 4, "metric_value": 0.8113, "depth": 4}
if obj[1]<=1:
return 'False'
elif obj[1]>1:
return 'True'
else: return 'True'
elif obj[9]<=1:
return 'True'
else: return 'True'
elif obj[16]<=1:
return 'False'
else: return 'False'
else: return 'False'
|
py | 1a47f857e863ecec8d2ea693db093f9750bd75ad | # -*- coding: utf-8 -*-
#
# Copyright 2017-2021 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Options for BigMLer retrain option
"""
def get_retrain_options(defaults=None):
"""Retrain-related options
"""
if defaults is None:
defaults = {}
options = {
# Resource ID
'--id': {
'dest': 'resource_id',
'default': defaults.get('resource_id', None),
'help': ("ID for the resource to be retrained.")},
# path to the data file to be added
'--add': {
'dest': 'add',
'default': defaults.get('add', None),
'help': ("Path to the data file to be added.")},
# maximum number of datasets to be used when retraining
'--window-size': {
'type': int,
'dest': 'window_size',
'default': defaults.get('window_size', -1),
'help': ("Maximum number of datasets to be used in retraining."
" When not set, the new dataset will be added to the"
" last one used.")}
}
return options
|
py | 1a47f8a8efec1d153f10f8f9c3032f43da295aa9 | import json
from typing import Dict, List
from mach_nix.data.providers import WheelDependencyProvider, SdistDependencyProvider, NixpkgsDependencyProvider
from mach_nix.data.nixpkgs import NixpkgsIndex
from mach_nix.generators import ExpressionGenerator
from mach_nix.resolver import ResolvedPkg
def unindent(text: str, remove: int):
# removes indentation of text
# also strips leading newlines
return ''.join(map(lambda l: l[remove:], text.splitlines(keepends=True)))
class OverridesGenerator(ExpressionGenerator):
def __init__(
self,
py_ver,
nixpkgs: NixpkgsIndex,
pypi_fetcher_commit,
pypi_fetcher_sha256,
disable_checks,
*args,
**kwargs):
self.nixpkgs = nixpkgs
self.disable_checks = disable_checks
self.pypi_fetcher_commit = pypi_fetcher_commit
self.pypi_fetcher_sha256 = pypi_fetcher_sha256
self.py_ver_nix = py_ver.nix()
super(OverridesGenerator, self).__init__(*args, **kwargs)
def generate(self, reqs) -> str:
pkgs = self.resolver.resolve(reqs)
pkgs = dict(sorted(((p.name, p) for p in pkgs), key=lambda x: x[1].name))
return self._gen_python_env(pkgs)
def _gen_imports(self):
out = f"""
{{ pkgs, python, ... }}:
with builtins;
with pkgs.lib;
let
pypi_fetcher_src = builtins.fetchTarball {{
name = "nix-pypi-fetcher";
url = "https://github.com/DavHau/nix-pypi-fetcher/tarball/{self.pypi_fetcher_commit}";
# Hash obtained using `nix-prefetch-url --unpack <url>`
sha256 = "{self.pypi_fetcher_sha256}";
}};
pypiFetcher = import pypi_fetcher_src {{ inherit pkgs; }};
fetchPypi = pypiFetcher.fetchPypi;
fetchPypiWheel = pypiFetcher.fetchPypiWheel;
isPyModule = pkg:
isAttrs pkg && hasAttr "pythonModule" pkg;
normalizeName = name: (replaceStrings ["_"] ["-"] (toLower name));
depNamesOther = [
"depsBuildBuild"
"depsBuildBuildPropagated"
"nativeBuildInputs"
"propagatedNativeBuildInputs"
"depsBuildTarget"
"depsBuildTargetPropagated"
"depsHostHost"
"depsHostHostPropagated"
"depsTargetTarget"
"depsTargetTargetPropagated"
"checkInputs"
"installCheckInputs"
];
depNamesAll = depNamesOther ++ [
"propagatedBuildInputs"
"buildInputs"
];
updatePythonDepsRec = newPkgs: pkg:
if ! isPyModule pkg then pkg else
let
pname = normalizeName (get_pname pkg);
newP =
if newPkgs ? "${{pname}}" && pkg != newPkgs."${{pname}}" then
trace "Updated inherited nixpkgs dep ${{pname}} from ${{pkg.version}} to ${{newPkgs."${{pname}}".version}}"
newPkgs."${{pname}}"
else
pkg;
in
newP.overrideAttrs (old: mapAttrs (n: v:
if elem n depNamesAll then
map (p: updatePythonDepsRec newPkgs p) v
else v
) old);
override = pkg:
if hasAttr "overridePythonAttrs" pkg then
pkg.overridePythonAttrs
else
pkg.overrideAttrs;
nameMap = {{
pytorch = "torch";
}};
get_pname = pkg:
let
res = tryEval (
if pkg ? src.pname then
pkg.src.pname
else if pkg ? pname then
let pname = pkg.pname; in
if nameMap ? "${{pname}}" then nameMap."${{pname}}" else pname
else ""
);
in
toString res.value;
get_passthru = pypi_name: nix_name:
# if pypi_name is in nixpkgs, we must pick it, otherwise risk infinite recursion.
let
python_pkgs = python.pkgs;
pname = if hasAttr "${{pypi_name}}" python_pkgs then pypi_name else nix_name;
in
if hasAttr "${{pname}}" python_pkgs then
let result = (tryEval
(if isNull python_pkgs."${{pname}}" then
{{}}
else
python_pkgs."${{pname}}".passthru));
in
if result.success then result.value else {{}}
else {{}};
tests_on_off = enabled: pySelf: pySuper:
let
mod = {{
doCheck = enabled;
doInstallCheck = enabled;
}};
in
{{
buildPythonPackage = args: pySuper.buildPythonPackage ( args // {{
doCheck = enabled;
doInstallCheck = enabled;
}} );
buildPythonApplication = args: pySuper.buildPythonPackage ( args // {{
doCheck = enabled;
doInstallCheck = enabled;
}} );
}};
pname_passthru_override = pySelf: pySuper: {{
fetchPypi = args: (pySuper.fetchPypi args).overrideAttrs (oa: {{
passthru = {{ inherit (args) pname; }};
}});
}};
mergeOverrides = with pkgs.lib; foldl composeExtensions (self: super: {{}});
merge_with_overr = enabled: overr:
mergeOverrides [(tests_on_off enabled) pname_passthru_override overr];
"""
return unindent(out, 12)
def _gen_build_inputs(self, build_inputs_local, build_inputs_nixpkgs) -> str:
name = lambda n: f'python-self."{n}"' if '.' in n else n
build_inputs_str = ' '.join(
name(b) for b in sorted(build_inputs_local | build_inputs_nixpkgs))
return build_inputs_str
def _gen_prop_build_inputs(self, prop_build_inputs_local, prop_build_inputs_nixpkgs) -> str:
name = lambda n: f'python-self."{n}"' if '.' in n else n
prop_build_inputs_str = ' '.join(
name(b) for b in sorted(prop_build_inputs_local | prop_build_inputs_nixpkgs))
return prop_build_inputs_str
def _gen_overrideAttrs(
self, name, ver, circular_deps, nix_name, provider, build_inputs_str, prop_build_inputs_str,
keep_src=False):
out = f"""
"{name}" = override python-super.{nix_name} ( oldAttrs:
(mapAttrs (n: v: if elem n depNamesOther then map (dep: updatePythonDepsRec python-self dep) v else v ) oldAttrs) // {{
pname = "{name}";
version = "{ver}";
passthru = (get_passthru "{name}" "{nix_name}") // {{ provider = "{provider}"; }};
buildInputs = with python-self; (map (dep: updatePythonDepsRec python-self dep) (oldAttrs."buildInputs" or [])) ++ [ {build_inputs_str} ];
propagatedBuildInputs = with python-self; (map (dep: updatePythonDepsRec python-self dep) (oldAttrs."propagatedBuildInputs" or [])) ++ [ {prop_build_inputs_str} ];"""
if not keep_src:
out += f"""
src = fetchPypi "{name}" "{ver}";"""
if circular_deps:
out += f"""
pipInstallFlags = "--no-dependencies";"""
out += """
}
);\n"""
return unindent(out, 8)
def _gen_builPythonPackage(self, name, ver, circular_deps, nix_name, build_inputs_str, prop_build_inputs_str):
out = f"""
"{name}" = python-self.buildPythonPackage {{
pname = "{name}";
version = "{ver}";
src = fetchPypi "{name}" "{ver}";
passthru = (get_passthru "{name}" "{nix_name}") // {{ provider = "sdist"; }};"""
if circular_deps:
out += f"""
pipInstallFlags = "--no-dependencies";"""
if build_inputs_str.strip():
out += f"""
buildInputs = with python-self; [ {build_inputs_str} ];"""
if prop_build_inputs_str.strip():
out += f"""
propagatedBuildInputs = with python-self; [ {prop_build_inputs_str} ];"""
out += """
};\n"""
return unindent(out, 8)
def _gen_wheel_buildPythonPackage(self, name, ver, circular_deps, nix_name, prop_build_inputs_str, fname):
manylinux = "manylinux1 ++ " if 'manylinux' in fname else ''
# dontStrip added due to this bug - https://github.com/pypa/manylinux/issues/119
out = f"""
"{name}" = python-self.buildPythonPackage {{
pname = "{name}";
version = "{ver}";
src = fetchPypiWheel "{name}" "{ver}" "{fname}";
format = "wheel";
dontStrip = true;
passthru = (get_passthru "{name}" "{nix_name}") // {{ provider = "wheel"; }};"""
if circular_deps:
out += f"""
pipInstallFlags = "--no-dependencies";"""
if manylinux:
out += f"""
nativeBuildInputs = [ autoPatchelfHook ];
autoPatchelfIgnoreMissingDeps = true;"""
if prop_build_inputs_str.strip() or manylinux:
out += f"""
propagatedBuildInputs = with python-self; {manylinux}[ {prop_build_inputs_str} ];"""
out += """
};\n"""
return unindent(out, 8)
def _gen_overrides(self, pkgs: Dict[str, ResolvedPkg], overrides_keys):
pkg_names_str = "".join(
(f"ps.\"{name}\"\n{' ' * 14}"
for (name, pkg) in pkgs.items() if pkg.is_root))
check = json.dumps(not self.disable_checks)
out = f"""
select_pkgs = ps: [
{pkg_names_str.strip()}
];
overrides = manylinux1: autoPatchelfHook: merge_with_overr {check} (python-self: python-super: {{
"""
out = unindent(out, 10)
for pkg in pkgs.values():
if pkg.name not in overrides_keys:
continue
overlays_required = True
build_inputs_local = {b for b in pkg.build_inputs if b in overrides_keys}
build_inputs_nixpkgs = set(pkg.build_inputs) - build_inputs_local
prop_build_inputs_local = {b for b in pkg.prop_build_inputs if b in overrides_keys}
prop_build_inputs_nixpkgs = set(pkg.prop_build_inputs) - prop_build_inputs_local
# convert build inputs to string
build_inputs_str = self._gen_build_inputs(build_inputs_local, build_inputs_nixpkgs, ).strip()
# convert prop build inputs to string
prop_build_inputs_str = self._gen_prop_build_inputs(
prop_build_inputs_local, prop_build_inputs_nixpkgs).strip()
# SDIST
if isinstance(pkg.provider_info.provider, SdistDependencyProvider):
# generate package overlays either via `overrideAttrs` if package already exists in nixpkgs,
# or by creating it from scratch using `buildPythonPackage`
nix_name = self._get_ref_name(pkg.name, pkg.ver)
if self.nixpkgs.exists(pkg.name):
out += self._gen_overrideAttrs(
pkg.name,
pkg.provider_info.provider.deviated_version(pkg.name, pkg.ver),
pkg.removed_circular_deps,
nix_name,
'sdist',
build_inputs_str,
prop_build_inputs_str)
else:
out += self._gen_builPythonPackage(
pkg.name,
pkg.provider_info.provider.deviated_version(pkg.name, pkg.ver),
pkg.removed_circular_deps,
nix_name,
build_inputs_str,
prop_build_inputs_str)
# WHEEL
elif isinstance(pkg.provider_info.provider, WheelDependencyProvider):
out += self._gen_wheel_buildPythonPackage(
pkg.name,
pkg.provider_info.provider.deviated_version(pkg.name, pkg.ver),
pkg.removed_circular_deps,
self._get_ref_name(pkg.name, pkg.ver),
prop_build_inputs_str,
pkg.provider_info.wheel_fname)
# NIXPKGS
elif isinstance(pkg.provider_info.provider, NixpkgsDependencyProvider):
nix_name = self.nixpkgs.find_best_nixpkgs_candidate(pkg.name, pkg.ver)
out += self._gen_overrideAttrs(
pkg.name,
pkg.ver,
pkg.removed_circular_deps,
nix_name,
'nixpkgs',
build_inputs_str,
prop_build_inputs_str,
keep_src=True)
end_overlay_section = f"""
}});
"""
return out + unindent(end_overlay_section, 14)
def _get_ref_name(self, name, ver) -> str:
if self.nixpkgs.exists(name):
return self.nixpkgs.find_best_nixpkgs_candidate(name, ver)
return name
def _gen_python_env(self, pkgs: Dict[str, ResolvedPkg]):
overrides_keys = {p.name for p in pkgs.values()}
out = self._gen_imports() + self._gen_overrides(pkgs, overrides_keys)
python_with_packages = f"""
in
{{ inherit overrides select_pkgs; }}
"""
return out + unindent(python_with_packages, 12)
|
py | 1a47f974866d802115149ae5d57275fbc6b31752 | import unittest
import time
from datetime import datetime
from sqlalchemy.exc import IntegrityError
from app import mail
from flask import current_app, render_template
from flask.ext.mail import Message
from email.mime.text import MIMEText
from app import create_app, db
from app.models import User, AnonymousUser, Role, Permission, Follow
from document_filder.email_context import *
def send_mail():
msg = Message(app.config['FLASKY_MAIL_SUBJECT_PREFIX'] + ' ' + '123',
sender=app.config['FLASKY_MAIL_SENDER'], recipients=['[email protected]'])
print(body)
# print(html)
body1 = body
text = "Wiadomość testowa"
msg.body = MIMEText(text.encode('utf-8'), 'plain', 'utf-8')
msg.html = '123'
with app.app_context():
mail.send(msg)
app = create_app('default')
send_mail()
# app.run()
|
py | 1a47fa54c6f650701f2d5acf5ec9e60e16dcab49 | # from https://www.datacamp.com/community/tutorials/face-detection-python-opencv
# from https://github.com/parulnith/Face-Detection-in-Python-using-OpenCV
# Import the necessary libraries
import numpy as np
import cv2
import matplotlib.pyplot as plt
def convertToRGB(image):
return cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
def detect_faces(cascade, test_image, scaleFactor = 1.1):
# create a copy of the image to prevent any changes to the original one.
image_copy = test_image.copy()
#convert the test image to gray scale as opencv face detector expects gray images
gray_image = cv2.cvtColor(image_copy, cv2.COLOR_BGR2GRAY)
# Applying the haar classifier to detect faces
faces_rect = cascade.detectMultiScale(gray_image, scaleFactor=scaleFactor, minNeighbors=5)
for (x, y, w, h) in faces_rect:
cv2.rectangle(image_copy, (x, y), (x+w, y+h), (0, 255, 0), 2)
return image_copy
#loading image
test_image = cv2.imread('data/baby1.png')
#call the function to detect faces
haar_cascade_face = cv2.CascadeClassifier('data/haarcascades/haarcascade_frontalface_alt2.xml')
faces = detect_faces(haar_cascade_face, test_image)
#convert to RGB and display image
plt.imshow(convertToRGB(faces))
plt.show()
|
py | 1a47fa9f1d8cc46a7fe5b8657b20dea6de60b0bf | import traceback
from abc import ABCMeta, abstractmethod
class Protocol(object):
"""Backend for a specific browser-control protocol.
Each Protocol is composed of a set of ProtocolParts that implement
the APIs required for specific interactions. This reflects the fact
that not all implementaions will support exactly the same feature set.
Each ProtocolPart is exposed directly on the protocol through an accessor
attribute with a name given by its `name` property.
:param Executor executor: The Executor instance that's using this Protocol
:param Browser browser: The Browser using this protocol"""
__metaclass__ = ABCMeta
implements = []
def __init__(self, executor, browser):
self.executor = executor
self.browser = browser
for cls in self.implements:
name = cls.name
assert not hasattr(self, name)
setattr(self, name, cls(self))
@property
def logger(self):
""":returns: Current logger"""
return self.executor.logger
@property
def is_alive(self):
"""Is the browser connection still active
:returns: A boolean indicating whether the connection is still active."""
return True
def setup(self, runner):
"""Handle protocol setup, and send a message to the runner to indicate
success or failure."""
msg = None
try:
msg = "Failed to start protocol connection"
self.connect()
msg = None
for cls in self.implements:
getattr(self, cls.name).setup()
msg = "Post-connection steps failed"
self.after_connect()
except Exception:
if msg is not None:
self.logger.warning(msg)
self.logger.warning(traceback.format_exc())
raise
@abstractmethod
def connect(self):
"""Make a connection to the remote browser"""
pass
@abstractmethod
def after_connect(self):
"""Run any post-connection steps. This happens after the ProtocolParts are
initalized so can depend on a fully-populated object."""
pass
def teardown(self):
"""Run cleanup steps after the tests are finished."""
for cls in self.implements:
getattr(self, cls.name).teardown()
class ProtocolPart(object):
"""Base class for all ProtocolParts.
:param Protocol parent: The parent protocol"""
__metaclass__ = ABCMeta
name = None
def __init__(self, parent):
self.parent = parent
@property
def logger(self):
""":returns: Current logger"""
return self.parent.logger
def setup(self):
"""Run any setup steps required for the ProtocolPart."""
pass
def teardown(self):
"""Run any teardown steps required for the ProtocolPart."""
pass
class BaseProtocolPart(ProtocolPart):
"""Generic bits of protocol that are required for multiple test types"""
__metaclass__ = ABCMeta
name = "base"
@abstractmethod
def execute_script(self, script, asynchronous=False):
"""Execute javascript in the current Window.
:param str script: The js source to execute. This is implicitly wrapped in a function.
:param bool asynchronous: Whether the script is asynchronous in the webdriver
sense i.e. whether the return value is the result of
the initial function call or if it waits for some callback.
:returns: The result of the script execution.
"""
pass
@abstractmethod
def set_timeout(self, timeout):
"""Set the timeout for script execution.
:param timeout: Script timeout in seconds"""
pass
@abstractmethod
def wait(self):
"""Wait indefinitely for the browser to close"""
pass
@property
def current_window(self):
"""Return a handle identifying the current top level browsing context
:returns: A protocol-specific handle"""
pass
@abstractmethod
def set_window(self, handle):
"""Set the top level browsing context to one specified by a given handle.
:param handle: A protocol-specific handle identifying a top level browsing
context."""
pass
class TestharnessProtocolPart(ProtocolPart):
"""Protocol part required to run testharness tests."""
__metaclass__ = ABCMeta
name = "testharness"
@abstractmethod
def load_runner(self, url_protocol):
"""Load the initial page used to control the tests.
:param str url_protocol: "https" or "http" depending on the test metadata.
"""
pass
@abstractmethod
def close_old_windows(self, url_protocol):
"""Close existing windows except for the initial runner window.
After calling this method there must be exactly one open window that
contains the initial runner page.
:param str url_protocol: "https" or "http" depending on the test metadata.
"""
pass
@abstractmethod
def get_test_window(self, window_id, parent):
"""Get the window handle dorresponding to the window containing the
currently active test.
:param window_id: A string containing the DOM name of the Window that
contains the test, or None.
:param parent: The handle of the runner window.
:returns: A protocol-specific window handle.
"""
pass
class PrefsProtocolPart(ProtocolPart):
"""Protocol part that allows getting and setting browser prefs."""
__metaclass__ = ABCMeta
name = "prefs"
@abstractmethod
def set(self, name, value):
"""Set the named pref to value.
:param name: A pref name of browser-specific type
:param value: A pref value of browser-specific type"""
pass
@abstractmethod
def get(self, name):
"""Get the current value of a named pref
:param name: A pref name of browser-specific type
:returns: A pref value of browser-specific type"""
pass
@abstractmethod
def clear(self, name):
"""Reset the value of a named pref back to the default.
:param name: A pref name of browser-specific type"""
pass
class StorageProtocolPart(ProtocolPart):
"""Protocol part for manipulating browser storage."""
__metaclass__ = ABCMeta
name = "storage"
@abstractmethod
def clear_origin(self, url):
"""Clear all the storage for a specified origin.
:param url: A url belonging to the origin"""
pass
class SelectorProtocolPart(ProtocolPart):
"""Protocol part for selecting elements on the page."""
__metaclass__ = ABCMeta
name = "select"
def element_by_selector(self, element_selector, frame="window"):
elements = self.elements_by_selector_and_frame(element_selector, frame)
frame_name = "window"
if (frame != "window"):
frame_name = frame.id
if len(elements) == 0:
raise ValueError("Selector '%s' in frame '%s' matches no elements" % (element_selector, frame_name))
elif len(elements) > 1:
raise ValueError("Selector '%s' in frame '%s' matches multiple elements" % (element_selector, frame_name))
return elements[0]
@abstractmethod
def elements_by_selector(self, selector):
"""Select elements matching a CSS selector
:param str selector: The CSS selector
:returns: A list of protocol-specific handles to elements"""
pass
@abstractmethod
def elements_by_selector_and_frame(self, element_selector, frame):
"""Select elements matching a CSS selector
:param str selector: The CSS selector
:returns: A list of protocol-specific handles to elements"""
pass
class ClickProtocolPart(ProtocolPart):
"""Protocol part for performing trusted clicks"""
__metaclass__ = ABCMeta
name = "click"
@abstractmethod
def element(self, element):
"""Perform a trusted click somewhere on a specific element.
:param element: A protocol-specific handle to an element."""
pass
class SendKeysProtocolPart(ProtocolPart):
"""Protocol part for performing trusted clicks"""
__metaclass__ = ABCMeta
name = "send_keys"
@abstractmethod
def send_keys(self, element, keys):
"""Send keys to a specific element.
:param element: A protocol-specific handle to an element.
:param keys: A protocol-specific handle to a string of input keys."""
pass
class GenerateTestReportProtocolPart(ProtocolPart):
"""Protocol part for generating test reports"""
__metaclass__ = ABCMeta
name = "generate_test_report"
@abstractmethod
def generate_test_report(self, message):
"""Generate a test report.
:param message: The message to be contained in the report."""
pass
class SetPermissionProtocolPart(ProtocolPart):
"""Protocol part for setting permissions"""
__metaclass__ = ABCMeta
name = "set_permission"
@abstractmethod
def set_permission(self, name, state, one_realm=False):
"""Set permission state.
:param name: The name of the permission to set.
:param state: The state to set the permission to.
:param one_realm: Whether to set the permission for only one realm."""
pass
class ActionSequenceProtocolPart(ProtocolPart):
"""Protocol part for performing trusted clicks"""
__metaclass__ = ABCMeta
name = "action_sequence"
@abstractmethod
def send_actions(self, actions):
"""Send a sequence of actions to the window.
:param actions: A protocol-specific handle to an array of actions."""
pass
class TestDriverProtocolPart(ProtocolPart):
"""Protocol part that implements the basic functionality required for
all testdriver-based tests."""
__metaclass__ = ABCMeta
name = "testdriver"
@abstractmethod
def send_message(self, message_type, status, message=None):
"""Send a testdriver message to the browser.
:param str message_type: The kind of the message.
:param str status: Either "failure" or "success" depending on whether the
previous command succeeded.
:param str message: Additional data to add to the message."""
pass
class AssertsProtocolPart(ProtocolPart):
"""ProtocolPart that implements the functionality required to get a count of non-fatal
assertions triggered"""
__metaclass__ = ABCMeta
name = "asserts"
@abstractmethod
def get(self):
"""Get a count of assertions since the last browser start"""
pass
class CoverageProtocolPart(ProtocolPart):
"""Protocol part for collecting per-test coverage data."""
__metaclass__ = ABCMeta
name = "coverage"
@abstractmethod
def reset(self):
"""Reset coverage counters"""
pass
@abstractmethod
def dump(self):
"""Dump coverage counters"""
pass
class VirtualAuthenticatorProtocolPart(ProtocolPart):
"""Protocol part for creating and manipulating virtual authenticators"""
__metaclass__ = ABCMeta
name = "virtual_authenticator"
@abstractmethod
def add_virtual_authenticator(self, config):
"""Add a virtual authenticator
:param config: The Authenticator Configuration"""
pass
@abstractmethod
def remove_virtual_authenticator(self, authenticator_id):
"""Remove a virtual authenticator
:param str authenticator_id: The ID of the authenticator to remove"""
pass
@abstractmethod
def add_credential(self, authenticator_id, credential):
"""Inject a credential onto an authenticator
:param str authenticator_id: The ID of the authenticator to add the credential to
:param credential: The credential to inject"""
pass
@abstractmethod
def get_credentials(self, authenticator_id):
"""Get the credentials stored in an authenticator
:param str authenticator_id: The ID of the authenticator
:returns: An array with the credentials stored on the authenticator"""
pass
@abstractmethod
def remove_credential(self, authenticator_id, credential_id):
"""Remove a credential stored in an authenticator
:param str authenticator_id: The ID of the authenticator
:param str credential_id: The ID of the credential"""
pass
@abstractmethod
def remove_all_credentials(self, authenticator_id):
"""Remove all the credentials stored in an authenticator
:param str authenticator_id: The ID of the authenticator"""
pass
@abstractmethod
def set_user_verified(self, authenticator_id, uv):
"""Sets the user verified flag on an authenticator
:param str authenticator_id: The ID of the authenticator
:param bool uv: the user verified flag"""
pass
|
py | 1a47fb57ba4b64a458a7f1da0b5228acd70f6868 | """Policy base classes without Parameterized."""
import tensorflow as tf
class Policy2:
"""
Policy base class without Parameterzied.
Args:
env_spec: Environment specification.
"""
def __init__(self, name, env_spec):
self._name = name
self._env_spec = env_spec
self._variable_scope = tf.VariableScope(reuse=False, name=name)
# Should be implemented by all policies
def get_action(self, observation):
"""Get action given observation."""
raise NotImplementedError
def get_actions(self, observations):
"""Get actions given observations."""
raise NotImplementedError
def reset(self, dones=None):
"""Reset policy."""
pass
@property
def name(self):
return self._name
@property
def vectorized(self):
"""
Boolean for vectorized.
Indicates whether the policy is vectorized. If True, it should
implement get_actions(), and support resetting
with multiple simultaneous states.
"""
return False
@property
def observation_space(self):
"""Observation space."""
return self._env_spec.observation_space
@property
def action_space(self):
"""Policy action space."""
return self._env_spec.action_space
@property
def env_spec(self):
"""Policy environment specification."""
return self._env_spec
@property
def recurrent(self):
"""Boolean indicating if the policy is recurrent."""
return False
def log_diagnostics(self, paths):
"""Log extra information per iteration based on the collected paths."""
pass
@property
def state_info_keys(self):
"""
State info keys.
Return keys for the information related to the policy's state when
taking an action.
:return:
"""
return [k for k, _ in self.state_info_specs]
@property
def state_info_specs(self):
"""
State info specifcation.
Return keys and shapes for the information related to the policy's
state when taking an action.
:return:
"""
return list()
def terminate(self):
"""Clean up operation."""
pass
def get_trainable_vars(self):
"""Get trainable vars."""
return self._variable_scope.trainable_variables()
def get_global_vars(self):
"""Get global vars."""
return self._variable_scope.global_variables()
class StochasticPolicy2(Policy2):
"""StochasticPolicy."""
@property
def distribution(self):
"""Distribution."""
raise NotImplementedError
def dist_info_sym(self, obs_var, state_info_vars, name="dist_info_sym"):
"""
Symbolic graph of the distribution.
Return the symbolic distribution information about the actions.
:param obs_var: symbolic variable for observations
:param state_info_vars: a dictionary whose values should contain
information about the state of the policy at
the time it received the observation
:return:
"""
raise NotImplementedError
def dist_info(self, obs, state_infos):
"""
Distribution info.
Return the distribution information about the actions.
:param obs_var: observation values
:param state_info_vars: a dictionary whose values should contain
information about the state of the policy at the time it received the
observation
:return:
"""
raise NotImplementedError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.