blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dcabca991e405722f266c26a79a6218eee88a8e5 | bcda330eab528871da7fe007851f9554f2e19e66 | /dfp/get_orders.py | 839da7a3b615c878b631f55ed7c0aa7da9f8bd90 | [
"MIT"
]
| permissive | gmalta/dfp-prebid-setup | 2b80cc6ac53240c8e2caec6abadb6df349ada6ae | d965f9a70e56a8444ecd80566028f09964b51d04 | refs/heads/master | 2023-08-19T02:02:30.265693 | 2021-09-22T19:47:17 | 2021-09-22T19:47:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,087 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from googleads import ad_manager
from dfp.client import get_client
logger = logging.getLogger(__name__)
def get_order_by_name(order_name):
"""
Gets an order by name from DFP.
Args:
order_name (str): the name of the DFP order
Returns:
a DFP order, or None
"""
dfp_client = get_client()
order_service = dfp_client.GetService('OrderService', version='v202108')
# Filter by name.
query = 'WHERE name = :name'
values = [{
'key': 'name',
'value': {
'xsi_type': 'TextValue',
'value': order_name
}
}]
statement = ad_manager.FilterStatement(query, values)
response = order_service.getOrdersByStatement(statement.ToStatement())
no_order_found = False
try:
no_order_found = True if len(response['results']) < 1 else False
except (AttributeError, KeyError):
no_order_found = True
if no_order_found:
return None
else:
order = response['results'][0]
logger.info(u'Found an order with name "{name}".'.format(name=order['name']))
return order
def get_all_orders(print_orders=False):
"""
Logs all orders in DFP.
Returns:
None
"""
dfp_client = get_client()
# Initialize appropriate service.
order_service = dfp_client.GetService('OrderService', version='v202108')
# Create a statement to select orders.
statement = ad_manager.FilterStatement()
print('Getting all orders...')
# Retrieve a small amount of orders at a time, paging
# through until all orders have been retrieved.
while True:
response = order_service.getOrdersByStatement(statement.ToStatement())
if 'results' in response and len(response['results']) > 0:
for order in response['results']:
msg = u'Found an order with name "{name}".'.format(name=order['name'])
if print_orders:
print(msg)
statement.offset += ad_manager.SUGGESTED_PAGE_LIMIT
else:
print('No additional orders found.')
break
def main():
get_all_orders(print_orders=True)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
65837fe0fa38a44d96ddeb2ef1f74ed692cc0f06 | fa14db78eac812bed2acf5801f4cbae87753d5aa | /setup.py | 003a7ee178ac36569d02a6cea310d9bc66e5f341 | []
| no_license | rogerlew/dictset | 61abcf14313d29150d4e320c4a28fce73bcaa8ca | 97ece69bf4ceb265c36f6ea79f98b6553b9ccc1c | refs/heads/master | 2016-09-03T07:41:45.098506 | 2011-05-13T19:41:57 | 2011-05-13T19:41:57 | 32,486,726 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,422 | py | # Copyright (c) 2011, Roger Lew [see LICENSE.txt]
# This software is funded in part by NIH Grant P20 RR016454.
##from distutils.core import setup
from setuptools import setup
setup(name='dictset',
version='0.3.1.2',
description='A specialized Python container datatype for managing collections of sets.',
author='Roger Lew',
author_email='[email protected]',
license = "BSD",
classifiers=["Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Natural Language :: English",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.0",
"Programming Language :: Python :: 3.1",
"Programming Language :: Python :: 3.2",
"Topic :: Scientific/Engineering :: Information Analysis",
"Topic :: Scientific/Engineering :: Mathematics",
"Topic :: Software Development :: Libraries :: Python Modules"],
url='http://code.google.com/p/dictset/',
py_modules=['dictset'],
)
"""setup.py sdist upload --identity="Roger Lew" --sign"""
| [
"[email protected]"
]
| |
e955f96713a44733da4b31bb06747fa8d3a6e015 | d844572e1a69dd08a31744754b7c336b93cef045 | /test.py | 1065e7de72fd207440b26b382caedb6316d2763f | [
"MIT"
]
| permissive | Kyria/EFTParser | 2ffe2a55777dc22367491f959830b66e5ce7683d | 59420dc69cc3083f81205d17d897ecb482b3aed2 | refs/heads/master | 2021-01-02T09:02:23.395417 | 2013-12-23T15:31:50 | 2013-12-23T15:31:50 | 15,377,000 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 11,376 | py | import unittest
from eftparser import EFTParser
class EFTParsingTestCase(unittest.TestCase):
def setUp(self):
self.standard_fit = """
[Naglfar, TestFit]
Siege Module II
6x2500mm Repeating Artillery I, Arch Angel EMP XL
6x2500mm Repeating Artillery I, Arch Angel EMP XL
Tracking Computer II, Tracking Speed Script
Tracking Computer II, Tracking Speed Script
Tracking Computer II, Optimal Range Script
Sensor Booster II, Scan Resolution Script
Sensor Booster II, Scan Resolution Script
Republic Fleet Target Painter
Capital Armor Repairer I
Gyrostabilizer II
Gyrostabilizer II
Gyrostabilizer II
Damage Control II
Energized Adaptive Nano Membrane II
Capital Projectile Ambit Extension II
Capital Trimark Armor Pump I
Capital Projectile Metastasis Adjuster I
"""
self.fit_with_drones_and_ammunition = """
[Dominix, TestFit With Cargo And Drone bay]
Armor Explosive Hardener II
Pseudoelectron Containment Field I
1600mm Reinforced Steel Plates II
Armor Kinetic Hardener II
1600mm Reinforced Steel Plates II
Energized Adaptive Nano Membrane II
Armor Thermic Hardener II
Prototype 100MN Microwarpdrive I
Omnidirectional Tracking Link II
Omnidirectional Tracking Link II
Omnidirectional Tracking Link II
Heavy Capacitor Booster II, Navy Cap Booster 800
Heavy Unstable Power Fluctuator I
Drone Link Augmentor II
Drone Link Augmentor II
Drone Link Augmentor II
Drone Link Augmentor II
Heavy Unstable Power Fluctuator I
Large Trimark Armor Pump I
Large Trimark Armor Pump I
Large Trimark Armor Pump I
Garde II x5
Garde II x5
Bouncer II x5
Navy Cap Booster 800 x20
"""
self.empty_fit = """
[Sample NonRealShip To Test, Empty Fit]
[Empty Subsystem slot]
[Empty Subsystem slot]
[Empty Subsystem slot]
[Empty Subsystem slot]
[Empty Subsystem slot]
[Empty Low slot]
[Empty Low slot]
[Empty Low slot]
[Empty Low slot]
[Empty Low slot]
[Empty Med slot]
[Empty Med slot]
[Empty Med slot]
[Empty Med slot]
[Empty Med slot]
[Empty Med slot]
[Empty High slot]
[Empty High slot]
[Empty High slot]
[Empty High slot]
[Empty High slot]
[Empty High slot]
[Empty High slot]
[Empty High slot]
[Empty Rig slot]
[Empty Rig slot]
[Empty Rig slot]
"""
self.parser = ()
def test_parsing_standard_fit(self):
fit = EFTParser.parse(self.standard_fit)
self.assertEqual(fit['ship_type'] ,'Naglfar')
self.assertEqual(fit['fit_name'] ,'TestFit')
self.assertEqual(len(fit['modules']) , 18)
self.assertEqual(fit['modules'][0]['name'] ,'Siege Module II')
self.assertEqual(fit['modules'][0]['charge'] ,'')
self.assertEqual(fit['modules'][1]['name'] ,'6x2500mm Repeating Artillery I')
self.assertEqual(fit['modules'][1]['charge'] ,'Arch Angel EMP XL')
self.assertEqual(fit['modules'][2]['name'] ,'6x2500mm Repeating Artillery I')
self.assertEqual(fit['modules'][2]['charge'] ,'Arch Angel EMP XL')
self.assertEqual(fit['modules'][3]['name'] ,'Tracking Computer II')
self.assertEqual(fit['modules'][3]['charge'] ,'Tracking Speed Script')
self.assertEqual(fit['modules'][4]['name'] ,'Tracking Computer II')
self.assertEqual(fit['modules'][4]['charge'] ,'Tracking Speed Script')
self.assertEqual(fit['modules'][5]['name'] ,'Tracking Computer II')
self.assertEqual(fit['modules'][5]['charge'] ,'Optimal Range Script')
self.assertEqual(fit['modules'][6]['name'] ,'Sensor Booster II')
self.assertEqual(fit['modules'][6]['charge'] ,'Scan Resolution Script')
self.assertEqual(fit['modules'][7]['name'] ,'Sensor Booster II')
self.assertEqual(fit['modules'][7]['charge'] ,'Scan Resolution Script')
self.assertEqual(fit['modules'][8]['name'] ,'Republic Fleet Target Painter')
self.assertEqual(fit['modules'][8]['charge'] ,'')
self.assertEqual(fit['modules'][9]['name'] ,'Capital Armor Repairer I')
self.assertEqual(fit['modules'][9]['charge'] ,'')
self.assertEqual(fit['modules'][10]['name'] ,'Gyrostabilizer II')
self.assertEqual(fit['modules'][10]['charge'] ,'')
self.assertEqual(fit['modules'][11]['name'] ,'Gyrostabilizer II')
self.assertEqual(fit['modules'][11]['charge'] ,'')
self.assertEqual(fit['modules'][12]['name'] ,'Gyrostabilizer II')
self.assertEqual(fit['modules'][12]['charge'] ,'')
self.assertEqual(fit['modules'][13]['name'] ,'Damage Control II')
self.assertEqual(fit['modules'][13]['charge'] ,'')
self.assertEqual(fit['modules'][14]['name'] ,'Energized Adaptive Nano Membrane II')
self.assertEqual(fit['modules'][14]['charge'] ,'')
self.assertEqual(fit['modules'][15]['name'] ,'Capital Projectile Ambit Extension II')
self.assertEqual(fit['modules'][15]['charge'] ,'')
self.assertEqual(fit['modules'][16]['name'] ,'Capital Trimark Armor Pump I')
self.assertEqual(fit['modules'][16]['charge'] ,'')
self.assertEqual(fit['modules'][17]['name'] ,'Capital Projectile Metastasis Adjuster I')
self.assertEqual(fit['modules'][17]['charge'] ,'')
self.assertEqual(len(fit['cargodrones']) , 0)
def test_parsing_ammo_drone_fit(self):
fit = EFTParser.parse(self.fit_with_drones_and_ammunition)
self.assertEqual(fit['ship_type'] ,'Dominix')
self.assertEqual(fit['fit_name'] ,'TestFit With Cargo And Drone bay')
self.assertEqual(len(fit['modules']) , 21)
self.assertEqual(fit['modules'][0]['name'] ,'Armor Explosive Hardener II')
self.assertEqual(fit['modules'][0]['charge'] ,'')
self.assertEqual(fit['modules'][1]['name'] ,'Pseudoelectron Containment Field I')
self.assertEqual(fit['modules'][1]['charge'] ,'')
self.assertEqual(fit['modules'][2]['name'] ,'1600mm Reinforced Steel Plates II')
self.assertEqual(fit['modules'][2]['charge'] ,'')
self.assertEqual(fit['modules'][3]['name'] ,'Armor Kinetic Hardener II')
self.assertEqual(fit['modules'][3]['charge'] ,'')
self.assertEqual(fit['modules'][4]['name'] ,'1600mm Reinforced Steel Plates II')
self.assertEqual(fit['modules'][4]['charge'] ,'')
self.assertEqual(fit['modules'][5]['name'] ,'Energized Adaptive Nano Membrane II')
self.assertEqual(fit['modules'][5]['charge'] ,'')
self.assertEqual(fit['modules'][6]['name'] ,'Armor Thermic Hardener II')
self.assertEqual(fit['modules'][6]['charge'] ,'')
self.assertEqual(fit['modules'][7]['name'] ,'Prototype 100MN Microwarpdrive I')
self.assertEqual(fit['modules'][7]['charge'] ,'')
self.assertEqual(fit['modules'][8]['name'] ,'Omnidirectional Tracking Link II')
self.assertEqual(fit['modules'][8]['charge'] ,'')
self.assertEqual(fit['modules'][9]['name'] ,'Omnidirectional Tracking Link II')
self.assertEqual(fit['modules'][9]['charge'] ,'')
self.assertEqual(fit['modules'][10]['name'] ,'Omnidirectional Tracking Link II')
self.assertEqual(fit['modules'][10]['charge'] ,'')
self.assertEqual(fit['modules'][11]['name'] ,'Heavy Capacitor Booster II')
self.assertEqual(fit['modules'][11]['charge'] ,'Navy Cap Booster 800')
self.assertEqual(fit['modules'][12]['name'] ,'Heavy Unstable Power Fluctuator I')
self.assertEqual(fit['modules'][12]['charge'] ,'')
self.assertEqual(fit['modules'][13]['name'] ,'Drone Link Augmentor II')
self.assertEqual(fit['modules'][13]['charge'] ,'')
self.assertEqual(fit['modules'][14]['name'] ,'Drone Link Augmentor II')
self.assertEqual(fit['modules'][14]['charge'] ,'')
self.assertEqual(fit['modules'][15]['name'] ,'Drone Link Augmentor II')
self.assertEqual(fit['modules'][15]['charge'] ,'')
self.assertEqual(fit['modules'][16]['name'] ,'Drone Link Augmentor II')
self.assertEqual(fit['modules'][16]['charge'] ,'')
self.assertEqual(fit['modules'][17]['name'] ,'Heavy Unstable Power Fluctuator I')
self.assertEqual(fit['modules'][17]['charge'] ,'')
self.assertEqual(fit['modules'][18]['name'] ,'Large Trimark Armor Pump I')
self.assertEqual(fit['modules'][18]['charge'] ,'')
self.assertEqual(fit['modules'][19]['name'] ,'Large Trimark Armor Pump I')
self.assertEqual(fit['modules'][19]['charge'] ,'')
self.assertEqual(fit['modules'][20]['name'] ,'Large Trimark Armor Pump I')
self.assertEqual(fit['modules'][20]['charge'] ,'')
self.assertEqual(len(fit['cargodrones']) , 4)
self.assertEqual(fit['cargodrones'][0]['name'] ,'Garde II')
self.assertEqual(fit['cargodrones'][0]['quantity'] ,5)
self.assertEqual(fit['cargodrones'][1]['name'] ,'Garde II')
self.assertEqual(fit['cargodrones'][1]['quantity'] ,5)
self.assertEqual(fit['cargodrones'][2]['name'] ,'Bouncer II')
self.assertEqual(fit['cargodrones'][2]['quantity'] ,5)
self.assertEqual(fit['cargodrones'][3]['name'] ,'Navy Cap Booster 800')
self.assertEqual(fit['cargodrones'][3]['quantity'] ,20)
def test_parsing_empty_fit(self):
fit = EFTParser.parse(self.empty_fit)
self.assertEqual(fit['ship_type'] ,'Sample NonRealShip To Test')
self.assertEqual(fit['fit_name'] ,'Empty Fit')
self.assertEqual(len(fit['modules']) , 0)
self.assertEqual(len(fit['cargodrones']) , 0)
def suite():
suite = unittest.TestSuite()
suite.addTest(EFTParsingTestCase('test_parsing_standard_fit'))
suite.addTest(EFTParsingTestCase('test_parsing_ammo_drone_fit'))
suite.addTest(EFTParsingTestCase('test_parsing_empty_fit'))
return suite
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=3).run(suite())
| [
"[email protected]"
]
| |
d6922a8c30675933f85e75cadc718b465e3f1613 | 9d3171d191914bb980f8fea2b895de79d9893db6 | /scikits/statsmodels/regression/tests/test_glsar_gretl.py | ce868ff7a5f89edae46b86e3484832ef3478e887 | [
"BSD-2-Clause",
"BSD-3-Clause"
]
| permissive | takluyver/statsmodels | 2d3ba11035501bd1e35f23daf27bca823eec2cb5 | 3f1aeba98cd4bc2f326f9c18c34e66c396be99cf | refs/heads/master | 2023-06-19T18:51:28.464440 | 2012-02-29T16:24:25 | 2012-02-29T16:24:25 | 3,585,072 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,803 | py | # -*- coding: utf-8 -*-
"""Tests of GLSAR and diagnostics against Gretl
Created on Thu Feb 02 21:15:47 2012
Author: Josef Perktold
License: BSD-3
"""
import numpy as np
from numpy.testing import assert_almost_equal, assert_equal, assert_approx_equal
from scikits.statsmodels.regression.linear_model import OLS, GLSAR
from scikits.statsmodels.tools.tools import add_constant
from scikits.statsmodels.datasets import macrodata
import scikits.statsmodels.sandbox.panel.sandwich_covariance as sw
import scikits.statsmodels.stats.diagnostic as smsdia
#import scikits.statsmodels.sandbox.stats.diagnostic as smsdia
import scikits.statsmodels.stats.outliers_influence as oi
def compare_ftest(contrast_res, other, decimal=(5,4)):
assert_almost_equal(contrast_res.fvalue, other[0], decimal=decimal[0])
assert_almost_equal(contrast_res.pvalue, other[1], decimal=decimal[1])
assert_equal(contrast_res.df_num, other[2])
assert_equal(contrast_res.df_denom, other[3])
assert_equal("f", other[4])
class TestGLSARGretl(object):
def test_all(self):
d = macrodata.load().data
#import datasetswsm.greene as g
#d = g.load('5-1')
#growth rates
gs_l_realinv = 400 * np.diff(np.log(d['realinv']))
gs_l_realgdp = 400 * np.diff(np.log(d['realgdp']))
#simple diff, not growthrate, I want heteroscedasticity later for testing
endogd = np.diff(d['realinv'])
exogd = add_constant(np.c_[np.diff(d['realgdp']), d['realint'][:-1]],
prepend=True)
endogg = gs_l_realinv
exogg = add_constant(np.c_[gs_l_realgdp, d['realint'][:-1]],prepend=True)
res_ols = OLS(endogg, exogg).fit()
#print res_ols.params
mod_g1 = GLSAR(endogg, exogg, rho=-0.108136)
res_g1 = mod_g1.fit()
#print res_g1.params
mod_g2 = GLSAR(endogg, exogg, rho=-0.108136) #-0.1335859) from R
res_g2 = mod_g2.iterative_fit(maxiter=5)
#print res_g2.params
rho = -0.108136
# coefficient std. error t-ratio p-value 95% CONFIDENCE INTERVAL
partable = np.array([
[-9.50990, 0.990456, -9.602, 3.65e-018, -11.4631, -7.55670], # ***
[ 4.37040, 0.208146, 21.00, 2.93e-052, 3.95993, 4.78086], # ***
[-0.579253, 0.268009, -2.161, 0.0319, -1.10777, -0.0507346]]) # **
#Statistics based on the rho-differenced data:
result_gretl_g1 = dict(
endog_mean = ("Mean dependent var", 3.113973),
endog_std = ("S.D. dependent var", 18.67447),
ssr = ("Sum squared resid", 22530.90),
mse_resid_sqrt = ("S.E. of regression", 10.66735),
rsquared = ("R-squared", 0.676973),
rsquared_adj = ("Adjusted R-squared", 0.673710),
fvalue = ("F(2, 198)", 221.0475),
f_pvalue = ("P-value(F)", 3.56e-51),
resid_acf1 = ("rho", -0.003481),
dw = ("Durbin-Watson", 1.993858))
#fstatistic, p-value, df1, df2
reset_2_3 = [5.219019, 0.00619, 2, 197, "f"]
reset_2 = [7.268492, 0.00762, 1, 198, "f"]
reset_3 = [5.248951, 0.023, 1, 198, "f"]
#LM-statistic, p-value, df
arch_4 = [7.30776, 0.120491, 4, "chi2"]
#multicollinearity
vif = [1.002, 1.002]
cond_1norm = 6862.0664
determinant = 1.0296049e+009
reciprocal_condition_number = 0.013819244
#Chi-square(2): test-statistic, pvalue, df
normality = [20.2792, 3.94837e-005, 2]
#tests
res = res_g1 #with rho from Gretl
#basic
assert_almost_equal(res.params, partable[:,0], 4)
assert_almost_equal(res.bse, partable[:,1], 6)
assert_almost_equal(res.tvalues, partable[:,2], 2)
assert_almost_equal(res.ssr, result_gretl_g1['ssr'][1], decimal=2)
#assert_almost_equal(res.llf, result_gretl_g1['llf'][1], decimal=7) #not in gretl
#assert_almost_equal(res.rsquared, result_gretl_g1['rsquared'][1], decimal=7) #FAIL
#assert_almost_equal(res.rsquared_adj, result_gretl_g1['rsquared_adj'][1], decimal=7) #FAIL
assert_almost_equal(np.sqrt(res.mse_resid), result_gretl_g1['mse_resid_sqrt'][1], decimal=5)
assert_almost_equal(res.fvalue, result_gretl_g1['fvalue'][1], decimal=4)
assert_approx_equal(res.f_pvalue, result_gretl_g1['f_pvalue'][1], significant=2)
#assert_almost_equal(res.durbin_watson, result_gretl_g1['dw'][1], decimal=7) #TODO
#arch
#sm_arch = smsdia.acorr_lm(res.wresid**2, maxlag=4, autolag=None)
sm_arch = smsdia.het_arch(res.wresid, maxlag=4)
assert_almost_equal(sm_arch[0], arch_4[0], decimal=4)
assert_almost_equal(sm_arch[1], arch_4[1], decimal=6)
#tests
res = res_g2 #with estimated rho
#estimated lag coefficient
assert_almost_equal(res.model.rho, rho, decimal=3)
#basic
assert_almost_equal(res.params, partable[:,0], 4)
assert_almost_equal(res.bse, partable[:,1], 3)
assert_almost_equal(res.tvalues, partable[:,2], 2)
assert_almost_equal(res.ssr, result_gretl_g1['ssr'][1], decimal=2)
#assert_almost_equal(res.llf, result_gretl_g1['llf'][1], decimal=7) #not in gretl
#assert_almost_equal(res.rsquared, result_gretl_g1['rsquared'][1], decimal=7) #FAIL
#assert_almost_equal(res.rsquared_adj, result_gretl_g1['rsquared_adj'][1], decimal=7) #FAIL
assert_almost_equal(np.sqrt(res.mse_resid), result_gretl_g1['mse_resid_sqrt'][1], decimal=5)
assert_almost_equal(res.fvalue, result_gretl_g1['fvalue'][1], decimal=0)
assert_almost_equal(res.f_pvalue, result_gretl_g1['f_pvalue'][1], decimal=6)
#assert_almost_equal(res.durbin_watson, result_gretl_g1['dw'][1], decimal=7) #TODO
c = oi.reset_ramsey(res, degree=2)
compare_ftest(c, reset_2, decimal=(2,4))
c = oi.reset_ramsey(res, degree=3)
compare_ftest(c, reset_2_3, decimal=(2,4))
#arch
#sm_arch = smsdia.acorr_lm(res.wresid**2, maxlag=4, autolag=None)
sm_arch = smsdia.het_arch(res.wresid, maxlag=4)
assert_almost_equal(sm_arch[0], arch_4[0], decimal=1)
assert_almost_equal(sm_arch[1], arch_4[1], decimal=2)
'''
Performing iterative calculation of rho...
ITER RHO ESS
1 -0.10734 22530.9
2 -0.10814 22530.9
Model 4: Cochrane-Orcutt, using observations 1959:3-2009:3 (T = 201)
Dependent variable: ds_l_realinv
rho = -0.108136
coefficient std. error t-ratio p-value
-------------------------------------------------------------
const -9.50990 0.990456 -9.602 3.65e-018 ***
ds_l_realgdp 4.37040 0.208146 21.00 2.93e-052 ***
realint_1 -0.579253 0.268009 -2.161 0.0319 **
Statistics based on the rho-differenced data:
Mean dependent var 3.113973 S.D. dependent var 18.67447
Sum squared resid 22530.90 S.E. of regression 10.66735
R-squared 0.676973 Adjusted R-squared 0.673710
F(2, 198) 221.0475 P-value(F) 3.56e-51
rho -0.003481 Durbin-Watson 1.993858
'''
'''
RESET test for specification (squares and cubes)
Test statistic: F = 5.219019,
with p-value = P(F(2,197) > 5.21902) = 0.00619
RESET test for specification (squares only)
Test statistic: F = 7.268492,
with p-value = P(F(1,198) > 7.26849) = 0.00762
RESET test for specification (cubes only)
Test statistic: F = 5.248951,
with p-value = P(F(1,198) > 5.24895) = 0.023:
'''
'''
Test for ARCH of order 4
coefficient std. error t-ratio p-value
--------------------------------------------------------
alpha(0) 97.0386 20.3234 4.775 3.56e-06 ***
alpha(1) 0.176114 0.0714698 2.464 0.0146 **
alpha(2) -0.0488339 0.0724981 -0.6736 0.5014
alpha(3) -0.0705413 0.0737058 -0.9571 0.3397
alpha(4) 0.0384531 0.0725763 0.5298 0.5968
Null hypothesis: no ARCH effect is present
Test statistic: LM = 7.30776
with p-value = P(Chi-square(4) > 7.30776) = 0.120491:
'''
'''
Variance Inflation Factors
Minimum possible value = 1.0
Values > 10.0 may indicate a collinearity problem
ds_l_realgdp 1.002
realint_1 1.002
VIF(j) = 1/(1 - R(j)^2), where R(j) is the multiple correlation coefficient
between variable j and the other independent variables
Properties of matrix X'X:
1-norm = 6862.0664
Determinant = 1.0296049e+009
Reciprocal condition number = 0.013819244
'''
'''
Test for ARCH of order 4 -
Null hypothesis: no ARCH effect is present
Test statistic: LM = 7.30776
with p-value = P(Chi-square(4) > 7.30776) = 0.120491
Test of common factor restriction -
Null hypothesis: restriction is acceptable
Test statistic: F(2, 195) = 0.426391
with p-value = P(F(2, 195) > 0.426391) = 0.653468
Test for normality of residual -
Null hypothesis: error is normally distributed
Test statistic: Chi-square(2) = 20.2792
with p-value = 3.94837e-005:
'''
#no idea what this is
'''
Augmented regression for common factor test
OLS, using observations 1959:3-2009:3 (T = 201)
Dependent variable: ds_l_realinv
coefficient std. error t-ratio p-value
---------------------------------------------------------------
const -10.9481 1.35807 -8.062 7.44e-014 ***
ds_l_realgdp 4.28893 0.229459 18.69 2.40e-045 ***
realint_1 -0.662644 0.334872 -1.979 0.0492 **
ds_l_realinv_1 -0.108892 0.0715042 -1.523 0.1294
ds_l_realgdp_1 0.660443 0.390372 1.692 0.0923 *
realint_2 0.0769695 0.341527 0.2254 0.8219
Sum of squared residuals = 22432.8
Test of common factor restriction
Test statistic: F(2, 195) = 0.426391, with p-value = 0.653468
'''
################ with OLS, HAC errors
#Model 5: OLS, using observations 1959:2-2009:3 (T = 202)
#Dependent variable: ds_l_realinv
#HAC standard errors, bandwidth 4 (Bartlett kernel)
#coefficient std. error t-ratio p-value 95% CONFIDENCE INTERVAL
#for confidence interval t(199, 0.025) = 1.972
partable = np.array([
[-9.48167, 1.17709, -8.055, 7.17e-014, -11.8029, -7.16049], # ***
[4.37422, 0.328787, 13.30, 2.62e-029, 3.72587, 5.02258], #***
[-0.613997, 0.293619, -2.091, 0.0378, -1.19300, -0.0349939]]) # **
result_gretl_g1 = dict(
endog_mean = ("Mean dependent var", 3.257395),
endog_std = ("S.D. dependent var", 18.73915),
ssr = ("Sum squared resid", 22799.68),
mse_resid_sqrt = ("S.E. of regression", 10.70380),
rsquared = ("R-squared", 0.676978),
rsquared_adj = ("Adjusted R-squared", 0.673731),
fvalue = ("F(2, 199)", 90.79971),
f_pvalue = ("P-value(F)", 9.53e-29),
llf = ("Log-likelihood", -763.9752),
aic = ("Akaike criterion", 1533.950),
bic = ("Schwarz criterion", 1543.875),
hqic = ("Hannan-Quinn", 1537.966),
resid_acf1 = ("rho", -0.107341),
dw = ("Durbin-Watson", 2.213805))
linear_logs = [1.68351, 0.430953, 2, "chi2"]
#for logs: dropping 70 nan or incomplete observations, T=133
#(res_ols.model.exog <=0).any(1).sum() = 69 ?not 70
linear_squares = [7.52477, 0.0232283, 2, "chi2"]
#Autocorrelation, Breusch-Godfrey test for autocorrelation up to order 4
lm_acorr4 = [1.17928, 0.321197, 4, 195, "F"]
lm2_acorr4 = [4.771043, 0.312, 4, "chi2"]
acorr_ljungbox4 = [5.23587, 0.264, 4, "chi2"]
#break
cusum_Harvey_Collier = [0.494432, 0.621549, 198, "t"] #stats.t.sf(0.494432, 198)*2
#see cusum results in files
break_qlr = [3.01985, 0.1, 3, 196, "maxF"] #TODO check this, max at 2001:4
break_chow = [13.1897, 0.00424384, 3, "chi2"] # break at 1984:1
arch_4 = [3.43473, 0.487871, 4, "chi2"]
normality = [23.962, 0.00001, 2, "chi2"]
het_white = [33.503723, 0.000003, 5, "chi2"]
het_breush_pagan = [1.302014, 0.521520, 2, "chi2"] #TODO: not available
het_breush_pagan_konker = [0.709924, 0.701200, 2, "chi2"]
reset_2_3 = [5.219019, 0.00619, 2, 197, "f"]
reset_2 = [7.268492, 0.00762, 1, 198, "f"]
reset_3 = [5.248951, 0.023, 1, 198, "f"] #not available
cond_1norm = 5984.0525
determinant = 7.1087467e+008
reciprocal_condition_number = 0.013826504
vif = [1.001, 1.001]
names = 'date residual leverage influence DFFITS'.split()
lev = np.genfromtxt("E:\Josef\eclipsegworkspace\statsmodels-git\statsmodels-josef\scikits\statsmodels\datasets\macrodata\leverage_influence_ols_nostars.txt",
skip_header=3, skip_footer=1,
converters={0:lambda s: s})
lev.dtype.names = names
res = res_ols #for easier copying
cov_hac, bse_hac = sw.cov_hac_simple(res, nlags=4, use_correction=False)
assert_almost_equal(res.params, partable[:,0], 5)
assert_almost_equal(bse_hac, partable[:,1], 5)
#TODO
assert_almost_equal(res.ssr, result_gretl_g1['ssr'][1], decimal=2)
#assert_almost_equal(res.llf, result_gretl_g1['llf'][1], decimal=7) #not in gretl
assert_almost_equal(res.rsquared, result_gretl_g1['rsquared'][1], decimal=6) #FAIL
assert_almost_equal(res.rsquared_adj, result_gretl_g1['rsquared_adj'][1], decimal=6) #FAIL
assert_almost_equal(np.sqrt(res.mse_resid), result_gretl_g1['mse_resid_sqrt'][1], decimal=5)
#f-value is based on cov_hac I guess
#assert_almost_equal(res.fvalue, result_gretl_g1['fvalue'][1], decimal=0) #FAIL
#assert_approx_equal(res.f_pvalue, result_gretl_g1['f_pvalue'][1], significant=1) #FAIL
#assert_almost_equal(res.durbin_watson, result_gretl_g1['dw'][1], decimal=7) #TODO
c = oi.reset_ramsey(res, degree=2)
compare_ftest(c, reset_2, decimal=(6,5))
c = oi.reset_ramsey(res, degree=3)
compare_ftest(c, reset_2_3, decimal=(6,5))
linear_sq = smsdia.linear_lm(res.resid, res.model.exog)
assert_almost_equal(linear_sq[0], linear_squares[0], decimal=6)
assert_almost_equal(linear_sq[1], linear_squares[1], decimal=7)
hbpk = smsdia.het_breushpagan(res.resid, res.model.exog)
assert_almost_equal(hbpk[0], het_breush_pagan_konker[0], decimal=6)
assert_almost_equal(hbpk[1], het_breush_pagan_konker[1], decimal=6)
hw = smsdia.het_white(res.resid, res.model.exog)
assert_almost_equal(hw[:2], het_white[:2], 6)
#arch
#sm_arch = smsdia.acorr_lm(res.resid**2, maxlag=4, autolag=None)
sm_arch = smsdia.het_arch(res.resid, maxlag=4)
assert_almost_equal(sm_arch[0], arch_4[0], decimal=5)
assert_almost_equal(sm_arch[1], arch_4[1], decimal=6)
vif2 = [oi.variance_inflation_factor(res.model.exog, k) for k in [1,2]]
infl = oi.Influence(res_ols)
#print np.max(np.abs(lev['DFFITS'] - infl.dffits[0]))
#print np.max(np.abs(lev['leverage'] - infl.hat_matrix_diag))
#print np.max(np.abs(lev['influence'] - infl.influence)) #just added this based on Gretl
#just rough test, low decimal in Gretl output,
assert_almost_equal(lev['residual'], res.resid, decimal=3)
assert_almost_equal(lev['DFFITS'], infl.dffits[0], decimal=3)
assert_almost_equal(lev['leverage'], infl.hat_matrix_diag, decimal=3)
assert_almost_equal(lev['influence'], infl.influence, decimal=4)
if __name__ == '__main__':
t = TestGLSARGretl()
t.test_all()
'''
Model 5: OLS, using observations 1959:2-2009:3 (T = 202)
Dependent variable: ds_l_realinv
HAC standard errors, bandwidth 4 (Bartlett kernel)
coefficient std. error t-ratio p-value
-------------------------------------------------------------
const -9.48167 1.17709 -8.055 7.17e-014 ***
ds_l_realgdp 4.37422 0.328787 13.30 2.62e-029 ***
realint_1 -0.613997 0.293619 -2.091 0.0378 **
Mean dependent var 3.257395 S.D. dependent var 18.73915
Sum squared resid 22799.68 S.E. of regression 10.70380
R-squared 0.676978 Adjusted R-squared 0.673731
F(2, 199) 90.79971 P-value(F) 9.53e-29
Log-likelihood -763.9752 Akaike criterion 1533.950
Schwarz criterion 1543.875 Hannan-Quinn 1537.966
rho -0.107341 Durbin-Watson 2.213805
QLR test for structural break -
Null hypothesis: no structural break
Test statistic: max F(3, 196) = 3.01985 at observation 2001:4
(10 percent critical value = 4.09)
Non-linearity test (logs) -
Null hypothesis: relationship is linear
Test statistic: LM = 1.68351
with p-value = P(Chi-square(2) > 1.68351) = 0.430953
Non-linearity test (squares) -
Null hypothesis: relationship is linear
Test statistic: LM = 7.52477
with p-value = P(Chi-square(2) > 7.52477) = 0.0232283
LM test for autocorrelation up to order 4 -
Null hypothesis: no autocorrelation
Test statistic: LMF = 1.17928
with p-value = P(F(4,195) > 1.17928) = 0.321197
CUSUM test for parameter stability -
Null hypothesis: no change in parameters
Test statistic: Harvey-Collier t(198) = 0.494432
with p-value = P(t(198) > 0.494432) = 0.621549
Chow test for structural break at observation 1984:1 -
Null hypothesis: no structural break
Asymptotic test statistic: Chi-square(3) = 13.1897
with p-value = 0.00424384
Test for ARCH of order 4 -
Null hypothesis: no ARCH effect is present
Test statistic: LM = 3.43473
with p-value = P(Chi-square(4) > 3.43473) = 0.487871:
#ANOVA
Analysis of Variance:
Sum of squares df Mean square
Regression 47782.7 2 23891.3
Residual 22799.7 199 114.571
Total 70582.3 201 351.156
R^2 = 47782.7 / 70582.3 = 0.676978
F(2, 199) = 23891.3 / 114.571 = 208.528 [p-value 1.47e-049]
#LM-test autocorrelation
Breusch-Godfrey test for autocorrelation up to order 4
OLS, using observations 1959:2-2009:3 (T = 202)
Dependent variable: uhat
coefficient std. error t-ratio p-value
------------------------------------------------------------
const 0.0640964 1.06719 0.06006 0.9522
ds_l_realgdp -0.0456010 0.217377 -0.2098 0.8341
realint_1 0.0511769 0.293136 0.1746 0.8616
uhat_1 -0.104707 0.0719948 -1.454 0.1475
uhat_2 -0.00898483 0.0742817 -0.1210 0.9039
uhat_3 0.0837332 0.0735015 1.139 0.2560
uhat_4 -0.0636242 0.0737363 -0.8629 0.3893
Unadjusted R-squared = 0.023619
Test statistic: LMF = 1.179281,
with p-value = P(F(4,195) > 1.17928) = 0.321
Alternative statistic: TR^2 = 4.771043,
with p-value = P(Chi-square(4) > 4.77104) = 0.312
Ljung-Box Q' = 5.23587,
with p-value = P(Chi-square(4) > 5.23587) = 0.264:
RESET test for specification (squares and cubes)
Test statistic: F = 5.219019,
with p-value = P(F(2,197) > 5.21902) = 0.00619
RESET test for specification (squares only)
Test statistic: F = 7.268492,
with p-value = P(F(1,198) > 7.26849) = 0.00762
RESET test for specification (cubes only)
Test statistic: F = 5.248951,
with p-value = P(F(1,198) > 5.24895) = 0.023
#heteroscedasticity White
White's test for heteroskedasticity
OLS, using observations 1959:2-2009:3 (T = 202)
Dependent variable: uhat^2
coefficient std. error t-ratio p-value
-------------------------------------------------------------
const 104.920 21.5848 4.861 2.39e-06 ***
ds_l_realgdp -29.7040 6.24983 -4.753 3.88e-06 ***
realint_1 -6.93102 6.95607 -0.9964 0.3203
sq_ds_l_realg 4.12054 0.684920 6.016 8.62e-09 ***
X2_X3 2.89685 1.38571 2.091 0.0379 **
sq_realint_1 0.662135 1.10919 0.5970 0.5512
Unadjusted R-squared = 0.165860
Test statistic: TR^2 = 33.503723,
with p-value = P(Chi-square(5) > 33.503723) = 0.000003:
#heteroscedasticity Breusch-Pagan (original)
Breusch-Pagan test for heteroskedasticity
OLS, using observations 1959:2-2009:3 (T = 202)
Dependent variable: scaled uhat^2
coefficient std. error t-ratio p-value
-------------------------------------------------------------
const 1.09468 0.192281 5.693 4.43e-08 ***
ds_l_realgdp -0.0323119 0.0386353 -0.8363 0.4040
realint_1 0.00410778 0.0512274 0.08019 0.9362
Explained sum of squares = 2.60403
Test statistic: LM = 1.302014,
with p-value = P(Chi-square(2) > 1.302014) = 0.521520
#heteroscedasticity Breusch-Pagan Koenker
Breusch-Pagan test for heteroskedasticity
OLS, using observations 1959:2-2009:3 (T = 202)
Dependent variable: scaled uhat^2 (Koenker robust variant)
coefficient std. error t-ratio p-value
------------------------------------------------------------
const 10.6870 21.7027 0.4924 0.6230
ds_l_realgdp -3.64704 4.36075 -0.8363 0.4040
realint_1 0.463643 5.78202 0.08019 0.9362
Explained sum of squares = 33174.2
Test statistic: LM = 0.709924,
with p-value = P(Chi-square(2) > 0.709924) = 0.701200
########## forecast
#forecast mean y
For 95% confidence intervals, t(199, 0.025) = 1.972
Obs ds_l_realinv prediction std. error 95% interval
2008:3 -7.134492 -17.177905 2.946312 -22.987904 - -11.367905
2008:4 -27.665860 -36.294434 3.036851 -42.282972 - -30.305896
2009:1 -70.239280 -44.018178 4.007017 -51.919841 - -36.116516
2009:2 -27.024588 -12.284842 1.427414 -15.099640 - -9.470044
2009:3 8.078897 4.483669 1.315876 1.888819 - 7.078520
Forecast evaluation statistics
Mean Error -3.7387
Mean Squared Error 218.61
Root Mean Squared Error 14.785
Mean Absolute Error 12.646
Mean Percentage Error -7.1173
Mean Absolute Percentage Error -43.867
Theil's U 0.4365
Bias proportion, UM 0.06394
Regression proportion, UR 0.13557
Disturbance proportion, UD 0.80049
#forecast actual y
For 95% confidence intervals, t(199, 0.025) = 1.972
Obs ds_l_realinv prediction std. error 95% interval
2008:3 -7.134492 -17.177905 11.101892 -39.070353 - 4.714544
2008:4 -27.665860 -36.294434 11.126262 -58.234939 - -14.353928
2009:1 -70.239280 -44.018178 11.429236 -66.556135 - -21.480222
2009:2 -27.024588 -12.284842 10.798554 -33.579120 - 9.009436
2009:3 8.078897 4.483669 10.784377 -16.782652 - 25.749991
Forecast evaluation statistics
Mean Error -3.7387
Mean Squared Error 218.61
Root Mean Squared Error 14.785
Mean Absolute Error 12.646
Mean Percentage Error -7.1173
Mean Absolute Percentage Error -43.867
Theil's U 0.4365
Bias proportion, UM 0.06394
Regression proportion, UR 0.13557
Disturbance proportion, UD 0.80049
'''
| [
"[email protected]"
]
| |
3c9255c8322cd490ba9c22476e6343bc1b6cc27b | 95fe42bbae799ef76d21af95d51807d0f6e29638 | /conf/settings.py | 5e6a56205a322686ea3cb3edd7e8ad75798da3ab | []
| no_license | PhyCosmos/Back-end | 8a8eda71a30eee3de7a58acb9829004db5664845 | 1826db6efaef5114267e8e684fc66f2316280259 | refs/heads/main | 2023-06-21T16:08:53.287483 | 2021-06-07T17:14:08 | 2021-06-07T17:14:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,394 | py | """
Django settings for conf project.
Generated by 'django-admin startproject' using Django 3.2.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.getenv("SECRET_KEY")
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
]
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "conf.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "conf.wsgi.application"
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql",
"NAME": os.getenv("DATABASE_NAME"),
"USER": os.getenv("DATABASE_USER"),
"PASSWORD": os.getenv("DATABASE_PASSWORD"),
"HOST": os.getenv("DATABASE_HOST"),
"PORT": os.getenv("DATABASE_PORT"),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "Asia/Seoul"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = "/static/"
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
| [
"[email protected]"
]
| |
fb2ba7b1c482895b10125ce25e783e321ac8582f | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03543/s094616983.py | 009e1a2eac6a99783659ecb7fdeb03953450dca3 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 125 | py | N = str(input())
if N[0] == N[1] == N[2] or N[1] == N[2] == N[3]:
result = "Yes"
else:
result = "No"
print(result)
| [
"[email protected]"
]
| |
34a1c01f9276612864b317d7f6156216a0bbf4c8 | 723e52a46ef0b3b3927a842799fa8b99d7a678a6 | /generic_crud/serializers.py | dee90946baae87a30031eae803273dbc2f1bc630 | []
| no_license | MilanTagline2021/rest-api-views | aa2f17279405f839ad86f03e0eea034d82c6bbb5 | 5f1b7743d84ce754d4e69192fdb9f0526945e801 | refs/heads/master | 2023-08-21T02:27:37.917621 | 2021-10-21T10:59:38 | 2021-10-21T10:59:38 | 419,275,142 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 219 | py | from rest_framework import serializers
from generic_crud.models import Student
class StudentSerializers(serializers.ModelSerializer):
class Meta:
model = Student
fields = ['id','name','roll','city'] | [
"[email protected]"
]
| |
55f888fe74738900a474126796c6ce311ab676a7 | 5d32d0e65aa3bfa677fd1b8c92569e07e9b82af1 | /Section 7 - Dictionaries/buildDictResults v2a.py | 2d965ef573e752a8df80252691d83a5278a6d5d0 | [
"CC0-1.0"
]
| permissive | pdst-lccs/lccs-python | b74ef2a02ac8ad2637f713fff5559f4e56c9827d | 95cb7ece05716521e9951d7a40de8fb20a88021f | refs/heads/master | 2023-05-28T00:46:57.313972 | 2023-05-22T10:16:43 | 2023-05-22T10:16:43 | 240,501,524 | 21 | 18 | null | null | null | null | UTF-8 | Python | false | false | 550 | py | # Event: LCCS Python Fundamental Skills Workshop
# Date: Dec 2018
# Author: Joe English, PDST
# eMail: [email protected]
# Purpose: A program to demonstrate how to build a dictionary
# Version 2a. A dictionary to store multiple results for a student
results = {}
name = input("Enter student name: ")
results['name'] = name
while True:
subject = input ("Enter subject name: ")
if subject == "":
break
mark = input ("Enter percentage mark for "+subject+": ")
results[subject] = mark
print(results)
| [
"[email protected]"
]
| |
3fdd0785bd5ebb9309a5ef62c389b7f7d145fada | 128dac2cee9a1022fafcc15a8d4903af678d88ee | /669. Trim a Binary Search Tree.py | 522cc40d1e528387b1417db51c99f82f3ecafa2c | []
| no_license | wangtaodd/LeetCodeSolutions | b998d9de408bc2e01b6bff11fcb315b453389bc8 | 364107c1a74b2fbe72cbf7076f38b9089f7017fb | refs/heads/master | 2021-09-11T19:21:10.223002 | 2018-04-11T11:18:32 | 2018-04-11T11:18:32 | 125,821,429 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,205 | py | """
Given a binary search tree and the lowest and highest boundaries as L and R, trim the tree so that all its elements lies in [L, R] (R >= L). You might need to change the root of the tree, so the result should return the new root of the trimmed binary search tree.
Example 1:
Input:
1
/ \
0 2
L = 1
R = 2
Output:
1
\
2
Example 2:
Input:
3
/ \
0 4
\
2
/
1
L = 1
R = 3
Output:
3
/
2
/
1
"""
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def trimBST(self, root, L, R):
"""
:type root: TreeNode
:type L: int
:type R: int
:rtype: TreeNode
"""
def trim(node):
if not node:
return None
elif node.val > R:
return trim(node.left)
elif node.val < L:
return trim(node.right)
else:
node.left = trim(node.left)
node.right = trim(node.right)
return node
return trim(root) | [
"[email protected]"
]
| |
73f7a9ecc236f542ada437f4643ea62163cf9f9a | bb33e6be8316f35decbb2b81badf2b6dcf7df515 | /source/res/scripts/client/gui/clans/restrictions.py | 66ef025b31ed78f0d1115478c56bf6c0174e879a | []
| no_license | StranikS-Scan/WorldOfTanks-Decompiled | 999c9567de38c32c760ab72c21c00ea7bc20990c | d2fe9c195825ececc728e87a02983908b7ea9199 | refs/heads/1.18 | 2023-08-25T17:39:27.718097 | 2022-09-22T06:49:44 | 2022-09-22T06:49:44 | 148,696,315 | 103 | 39 | null | 2022-09-14T17:50:03 | 2018-09-13T20:49:11 | Python | UTF-8 | Python | false | false | 6,260 | py | # Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/clans/restrictions.py
import weakref
from constants import CLAN_MEMBER_FLAGS
from debug_utils import LOG_DEBUG, LOG_WARNING
from account_helpers import isOutOfWallet, isClanEnabled
from gui.clans.settings import error, success, CLIENT_CLAN_RESTRICTIONS as _CCR
from gui.clans.settings import isValidPattern
from helpers import dependency
from skeletons.gui.shared import IItemsCache
MAY_SEE_TREASURY = CLAN_MEMBER_FLAGS.LEADER | CLAN_MEMBER_FLAGS.VICE_LEADER | CLAN_MEMBER_FLAGS.TREASURER
class ClanMemberPermissions(object):
def __init__(self, bwRoleMask):
self.__roleMask = bwRoleMask
def canChangeSettings(self):
return self.__checkFlags(CLAN_MEMBER_FLAGS.MAY_CHANGE_SETTINGS)
def canChangeRole(self):
return self.__checkFlags(CLAN_MEMBER_FLAGS.MAY_CHANGE_ROLE)
def canActivateReserves(self):
return self.__checkFlags(CLAN_MEMBER_FLAGS.MAY_ACTIVATE_ORDER)
def canEditRecruiterProfile(self):
return self.__checkFlags(CLAN_MEMBER_FLAGS.MAY_EDIT_RECRUIT_PROFILE)
def canChangeCommander(self):
return self.__checkFlags(CLAN_MEMBER_FLAGS.MAY_CHANGE_COMMANDER)
def canHandleClanInvites(self):
return self.__checkFlags(CLAN_MEMBER_FLAGS.MAY_HANDLE_INVITES)
def canRemoveMembers(self):
return self.__checkFlags(CLAN_MEMBER_FLAGS.MAY_REMOVE_MEMBERS)
def canRemoveClan(self):
return self.__checkFlags(CLAN_MEMBER_FLAGS.MAY_REMOVE_CLAN)
def canTrade(self):
return self.__checkFlags(MAY_SEE_TREASURY)
def canExchangeMoney(self):
return self.__checkFlags(CLAN_MEMBER_FLAGS.MAY_EXCHANGE_MONEY)
def canSendApplication(self):
return self.isValidAccountType()
def canRevokeApplication(self):
LOG_DEBUG('Application revoking is not supported')
return False
def canAcceptInvite(self):
return self.isValidAccountType()
def canDeclineInvite(self):
return self.isValidAccountType()
def canSeeClans(self):
return True
@dependency.replace_none_kwargs(itemsCache=IItemsCache)
def isValidAccountType(self, itemsCache=None):
attrs = itemsCache.items.stats.attributes if itemsCache is not None else 0
return not (isOutOfWallet(attrs) and not isClanEnabled(attrs))
def __checkFlags(self, flags):
return self.__roleMask & flags != 0
class DefaultClanMemberPermissions(ClanMemberPermissions):
def __init__(self):
super(DefaultClanMemberPermissions, self).__init__(0)
class BaseAccountClanLimits(object):
def canHandleClanInvites(self, clan):
return error(_CCR.DEFAULT)
def canSendApplication(self, clan):
return error(_CCR.DEFAULT)
def canRevokeApplication(self, clan):
return error(_CCR.DEFAULT)
def canAcceptApplication(self, clan):
return error(_CCR.DEFAULT)
def canDeclineApplication(self, clan):
return error(_CCR.DEFAULT)
def canSendInvite(self, clan):
return error(_CCR.DEFAULT)
def canRevokeInvite(self, clan):
return error(_CCR.DEFAULT)
def canAcceptInvite(self, clan):
return error(_CCR.DEFAULT)
def canDeclineInvite(self, clan):
return error(_CCR.DEFAULT)
def canSearchClans(self, pattern):
return error(_CCR.DEFAULT)
def canSeeTreasury(self, clan):
return error(_CCR.DEFAULT)
class DefaultAccountClanLimits(BaseAccountClanLimits):
pass
class AccountClanLimits(BaseAccountClanLimits):
def __init__(self, profile):
super(AccountClanLimits, self).__init__()
self.__profile = weakref.proxy(profile)
def canSeeTreasury(self, clan):
return self.__checkPermissions('canExchangeMoney', clan)
def canSendApplication(self, clan):
if self.__profile.isInClan():
if self.__profile.getClanDbID() == clan.getDbID():
return error(_CCR.OWN_CLAN)
return error(_CCR.ALREADY_IN_CLAN)
if self.__profile.hasClanInvite(clan.getDbID()):
return error(_CCR.CLAN_INVITE_ALREADY_RECEIVED)
if self.__profile.isClanApplicationSent(clan.getDbID()):
return error(_CCR.CLAN_APPLICATION_ALREADY_SENT)
if self.__profile.isInvitesLimitReached():
return error(_CCR.SENT_INVITES_LIMIT_REACHED)
if not clan.canAcceptsJoinRequests():
return error(_CCR.CLAN_CONSCRIPTION_CLOSED)
if not self.__profile.getPermissions(clan).isValidAccountType():
return error(_CCR.FORBIDDEN_ACCOUNT_TYPE)
if not clan.hasFreePlaces():
return error(_CCR.CLAN_IS_FULL)
return error(_CCR.CLAN_ENTER_COOLDOWN) if self.__profile.isInClanEnterCooldown() else self.__checkPermissions('canSendApplication', clan)
def canRevokeApplication(self, clan):
return self.__checkPermissions('canRevokeApplication', clan)
def canHandleClanInvites(self, clan):
return self.__checkPermissions('canHandleClanInvites', clan)
def canAcceptApplication(self, clan):
return self.__checkPermissions('canHandleClanInvites', clan)
def canDeclineApplication(self, clan):
return self.__checkPermissions('canHandleClanInvites', clan)
def canSendInvite(self, clan):
return self.__checkPermissions('canHandleClanInvites', clan)
def canRevokeInvite(self, clan):
return self.__checkPermissions('canHandleClanInvites', clan)
def canAcceptInvite(self, clan):
return self.__checkPermissions('canAcceptInvite', clan)
def canDeclineInvite(self, clan):
return self.__checkPermissions('canDeclineInvite', clan)
def canSearchClans(self, pattern):
return error(_CCR.SEARCH_PATTERN_INVALID) if not isValidPattern(pattern) else self.__checkPermissions('canSeeClans')
def __checkPermissions(self, permName, clan=None):
perms = self.__profile.getPermissions(clan)
if not hasattr(perms, permName):
LOG_WARNING('There is error while checking account clan permissions', clan, permName)
return error(_CCR.DEFAULT)
return error(_CCR.DEFAULT) if not getattr(perms, permName)() else success()
| [
"[email protected]"
]
| |
c8505627362494931737ca5085fe06aca508dd70 | 731c6170829acf912143f9e65d86f45bce9102ea | /test/test_device.py | caf9bb0a60e0bc05ed0e29dd1b6a4314f93db19f | []
| no_license | Oculus-Dei/Oculus-Dei | 9774eae334a86d8e2b4a91d6da889f546a695ee2 | cde8cec5898e11aa274c609e1d4106b6e51f7896 | refs/heads/master | 2021-01-19T06:36:32.709805 | 2016-07-31T17:30:10 | 2016-07-31T17:30:10 | 60,718,426 | 3 | 0 | null | 2016-07-12T17:01:53 | 2016-06-08T17:53:48 | Python | UTF-8 | Python | false | false | 175 | py | # encoding: utf-8
"""
Created by misaka-10032 ([email protected]).
TODO: purpose
"""
import ocd.device
def test_device():
d = ocd.device.Device()
assert True
| [
"[email protected]"
]
| |
1284b29c7687c448925744f3184bd31b318594fd | e845f7f61ff76b3c0b8f4d8fd98f6192e48d542a | /djangocg/contrib/gis/db/models/fields.py | 55a33405216b7e3c6703d327ecdd148dc43ba484 | [
"BSD-3-Clause"
]
| permissive | timothyclemans/djangocg | fd150c028013cb5f53f5a3b4fdc960a07fdaaa78 | 52cf28e046523bceb5d436f8e6bf61e7d4ba6312 | refs/heads/master | 2021-01-18T13:20:13.636812 | 2012-08-31T23:38:14 | 2012-08-31T23:38:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,206 | py | from djangocg.db.models.fields import Field
from djangocg.db.models.sql.expressions import SQLEvaluator
from djangocg.utils.translation import ugettext_lazy as _
from djangocg.contrib.gis import forms
from djangocg.contrib.gis.db.models.proxy import GeometryProxy
from djangocg.contrib.gis.geometry.backend import Geometry, GeometryException
from djangocg.utils import six
# Local cache of the spatial_ref_sys table, which holds SRID data for each
# spatial database alias. This cache exists so that the database isn't queried
# for SRID info each time a distance query is constructed.
_srid_cache = {}
def get_srid_info(srid, connection):
"""
Returns the units, unit name, and spheroid WKT associated with the
given SRID from the `spatial_ref_sys` (or equivalent) spatial database
table for the given database connection. These results are cached.
"""
global _srid_cache
try:
# The SpatialRefSys model for the spatial backend.
SpatialRefSys = connection.ops.spatial_ref_sys()
except NotImplementedError:
# No `spatial_ref_sys` table in spatial backend (e.g., MySQL).
return None, None, None
if not connection.alias in _srid_cache:
# Initialize SRID dictionary for database if it doesn't exist.
_srid_cache[connection.alias] = {}
if not srid in _srid_cache[connection.alias]:
# Use `SpatialRefSys` model to query for spatial reference info.
sr = SpatialRefSys.objects.using(connection.alias).get(srid=srid)
units, units_name = sr.units
spheroid = SpatialRefSys.get_spheroid(sr.wkt)
_srid_cache[connection.alias][srid] = (units, units_name, spheroid)
return _srid_cache[connection.alias][srid]
class GeometryField(Field):
"The base GIS field -- maps to the OpenGIS Specification Geometry type."
# The OpenGIS Geometry name.
geom_type = 'GEOMETRY'
# Geodetic units.
geodetic_units = ('Decimal Degree', 'degree')
description = _("The base GIS field -- maps to the OpenGIS Specification Geometry type.")
def __init__(self, verbose_name=None, srid=4326, spatial_index=True, dim=2,
geography=False, **kwargs):
"""
The initialization function for geometry fields. Takes the following
as keyword arguments:
srid:
The spatial reference system identifier, an OGC standard.
Defaults to 4326 (WGS84).
spatial_index:
Indicates whether to create a spatial index. Defaults to True.
Set this instead of 'db_index' for geographic fields since index
creation is different for geometry columns.
dim:
The number of dimensions for this geometry. Defaults to 2.
extent:
Customize the extent, in a 4-tuple of WGS 84 coordinates, for the
geometry field entry in the `USER_SDO_GEOM_METADATA` table. Defaults
to (-180.0, -90.0, 180.0, 90.0).
tolerance:
Define the tolerance, in meters, to use for the geometry field
entry in the `USER_SDO_GEOM_METADATA` table. Defaults to 0.05.
"""
# Setting the index flag with the value of the `spatial_index` keyword.
self.spatial_index = spatial_index
# Setting the SRID and getting the units. Unit information must be
# easily available in the field instance for distance queries.
self.srid = srid
# Setting the dimension of the geometry field.
self.dim = dim
# Setting the verbose_name keyword argument with the positional
# first parameter, so this works like normal fields.
kwargs['verbose_name'] = verbose_name
# Is this a geography rather than a geometry column?
self.geography = geography
# Oracle-specific private attributes for creating the entrie in
# `USER_SDO_GEOM_METADATA`
self._extent = kwargs.pop('extent', (-180.0, -90.0, 180.0, 90.0))
self._tolerance = kwargs.pop('tolerance', 0.05)
super(GeometryField, self).__init__(**kwargs)
# The following functions are used to get the units, their name, and
# the spheroid corresponding to the SRID of the GeometryField.
def _get_srid_info(self, connection):
# Get attributes from `get_srid_info`.
self._units, self._units_name, self._spheroid = get_srid_info(self.srid, connection)
def spheroid(self, connection):
if not hasattr(self, '_spheroid'):
self._get_srid_info(connection)
return self._spheroid
def units(self, connection):
if not hasattr(self, '_units'):
self._get_srid_info(connection)
return self._units
def units_name(self, connection):
if not hasattr(self, '_units_name'):
self._get_srid_info(connection)
return self._units_name
### Routines specific to GeometryField ###
def geodetic(self, connection):
"""
Returns true if this field's SRID corresponds with a coordinate
system that uses non-projected units (e.g., latitude/longitude).
"""
return self.units_name(connection) in self.geodetic_units
def get_distance(self, value, lookup_type, connection):
"""
Returns a distance number in units of the field. For example, if
`D(km=1)` was passed in and the units of the field were in meters,
then 1000 would be returned.
"""
return connection.ops.get_distance(self, value, lookup_type)
def get_prep_value(self, value):
"""
Spatial lookup values are either a parameter that is (or may be
converted to) a geometry, or a sequence of lookup values that
begins with a geometry. This routine will setup the geometry
value properly, and preserve any other lookup parameters before
returning to the caller.
"""
if isinstance(value, SQLEvaluator):
return value
elif isinstance(value, (tuple, list)):
geom = value[0]
seq_value = True
else:
geom = value
seq_value = False
# When the input is not a GEOS geometry, attempt to construct one
# from the given string input.
if isinstance(geom, Geometry):
pass
elif isinstance(geom, six.string_types) or hasattr(geom, '__geo_interface__'):
try:
geom = Geometry(geom)
except GeometryException:
raise ValueError('Could not create geometry from lookup value.')
else:
raise ValueError('Cannot use object with type %s for a geometry lookup parameter.' % type(geom).__name__)
# Assigning the SRID value.
geom.srid = self.get_srid(geom)
if seq_value:
lookup_val = [geom]
lookup_val.extend(value[1:])
return tuple(lookup_val)
else:
return geom
def get_srid(self, geom):
"""
Returns the default SRID for the given geometry, taking into account
the SRID set for the field. For example, if the input geometry
has no SRID, then that of the field will be returned.
"""
gsrid = geom.srid # SRID of given geometry.
if gsrid is None or self.srid == -1 or (gsrid == -1 and self.srid != -1):
return self.srid
else:
return gsrid
### Routines overloaded from Field ###
def contribute_to_class(self, cls, name):
super(GeometryField, self).contribute_to_class(cls, name)
# Setup for lazy-instantiated Geometry object.
setattr(cls, self.attname, GeometryProxy(Geometry, self))
def db_type(self, connection):
return connection.ops.geo_db_type(self)
def formfield(self, **kwargs):
defaults = {'form_class' : forms.GeometryField,
'null' : self.null,
'geom_type' : self.geom_type,
'srid' : self.srid,
}
defaults.update(kwargs)
return super(GeometryField, self).formfield(**defaults)
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False):
"""
Prepare for the database lookup, and return any spatial parameters
necessary for the query. This includes wrapping any geometry
parameters with a backend-specific adapter and formatting any distance
parameters into the correct units for the coordinate system of the
field.
"""
if lookup_type in connection.ops.gis_terms:
# special case for isnull lookup
if lookup_type == 'isnull':
return []
# Populating the parameters list, and wrapping the Geometry
# with the Adapter of the spatial backend.
if isinstance(value, (tuple, list)):
params = [connection.ops.Adapter(value[0])]
if lookup_type in connection.ops.distance_functions:
# Getting the distance parameter in the units of the field.
params += self.get_distance(value[1:], lookup_type, connection)
elif lookup_type in connection.ops.truncate_params:
# Lookup is one where SQL parameters aren't needed from the
# given lookup value.
pass
else:
params += value[1:]
elif isinstance(value, SQLEvaluator):
params = []
else:
params = [connection.ops.Adapter(value)]
return params
else:
raise ValueError('%s is not a valid spatial lookup for %s.' %
(lookup_type, self.__class__.__name__))
def get_prep_lookup(self, lookup_type, value):
if lookup_type == 'isnull':
return bool(value)
else:
return self.get_prep_value(value)
def get_db_prep_save(self, value, connection):
"Prepares the value for saving in the database."
if value is None:
return None
else:
return connection.ops.Adapter(self.get_prep_value(value))
def get_placeholder(self, value, connection):
"""
Returns the placeholder for the geometry column for the
given value.
"""
return connection.ops.get_geom_placeholder(self, value)
# The OpenGIS Geometry Type Fields
class PointField(GeometryField):
geom_type = 'POINT'
description = _("Point")
class LineStringField(GeometryField):
geom_type = 'LINESTRING'
description = _("Line string")
class PolygonField(GeometryField):
geom_type = 'POLYGON'
description = _("Polygon")
class MultiPointField(GeometryField):
geom_type = 'MULTIPOINT'
description = _("Multi-point")
class MultiLineStringField(GeometryField):
geom_type = 'MULTILINESTRING'
description = _("Multi-line string")
class MultiPolygonField(GeometryField):
geom_type = 'MULTIPOLYGON'
description = _("Multi polygon")
class GeometryCollectionField(GeometryField):
geom_type = 'GEOMETRYCOLLECTION'
description = _("Geometry collection")
| [
"[email protected]"
]
| |
61cf66905bc0a97d62d0b3be655a9527446f7069 | ad3e8de09b8a314f4a6d9a59c54a421805776dc5 | /chapter2_notebook.py | a82b3519e837bab532e53b551c2ce2e4337e76e6 | []
| no_license | georstef/Python_ObjectOrientedProgramming | 6401a96195238fcc1624c519984e37e0894f92a7 | 6c00f30b31e8754dd18aa45bb42033c4d15b46ae | refs/heads/master | 2020-12-24T17:25:58.577160 | 2015-11-08T19:01:50 | 2015-11-08T19:01:50 | 15,637,218 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,332 | py | import datetime
# global (singleton)
last_id = 0
class Note:
def __init__(self, memo, tags=''):
self.memo = memo
self.tags = tags
self.creation_date = datetime.date.today()
global last_id
last_id += 1
self.id = last_id
def match(self, filter):
'''
returns true if filter exists in memo or tags
'''
return (filter in self.memo) or (filter in self.tags)
class Notebook:
def __init__(self):
self.notes = []
def new_note(self, memo, tags = ''):
self.notes.append(Note(memo, tags))
def find_note(self, note_id):
for note in self.notes:
if str(note.id) == str(note_id):
return note
return None
def modify_memo(self, note_id, memo):
try:
self.find_note(note_id).memo = memo
return True
except AttributeError:
print('Note not found.')
return False
def modify_tags(self, note_id, tags):
try:
self.find_note(note_id).tags = tags
return True
except AttributeError:
print('Note not found.')
return False
def search(self, filter):
return [note for note in self.notes if note.match(filter)]
| [
"[email protected]"
]
| |
2d5465cffaec39f1e38615d0dc066898635e8dfc | 7775a073201f568022bbb4ed3d04cb1639ae5e65 | /AtguiguShop/apps/trade/migrations/0001_initial.py | e2f1b9b40d2d911ffa6afe615e9928b8c1f80b8c | []
| no_license | a289237642/rest-api | 25db777fa1ca85fee77f86b8ae92d3656ce2ef40 | fd2c70245cb12212dcd2fd8899f789c3e41d1af2 | refs/heads/master | 2020-04-24T14:12:13.312539 | 2019-03-14T08:41:27 | 2019-03-14T08:41:27 | 172,012,413 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,218 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-06-18 11:28
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('goods', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='OrderGoods',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('goods_num', models.IntegerField(default=0, verbose_name='商品数量')),
('add_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='添加时间')),
],
options={
'verbose_name_plural': '订单商品详情',
'verbose_name': '订单商品详情',
},
),
migrations.CreateModel(
name='OrderInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('order_sn', models.CharField(blank=True, max_length=30, null=True, unique=True, verbose_name='订单号')),
('trade_no', models.CharField(blank=True, max_length=100, null=True, unique=True, verbose_name='交易号')),
('pay_status', models.CharField(blank=True, choices=[('PAYING', '待支付'), ('TRADE_SUCESS', '支付成功'), ('TRADE_CLOSE', '支付关闭'), ('TRADE_FAIL', '支付失败'), ('TRADE_FINSHED', '交易结束')], default='PAYING', max_length=30, null=True, verbose_name='订单状态')),
('post_script', models.CharField(max_length=200, verbose_name='订单留言')),
('order_mount', models.FloatField(default=0.0, verbose_name='订单金额')),
('pay_time', models.DateTimeField(blank=True, null=True, verbose_name='支付时间')),
('signer_name', models.CharField(max_length=30, verbose_name='签收人')),
('signer_mobile', models.CharField(max_length=11, verbose_name='联系电话')),
('address', models.CharField(max_length=200, verbose_name='收货地址')),
('add_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='添加时间')),
],
options={
'verbose_name_plural': '订单',
'verbose_name': '订单',
},
),
migrations.CreateModel(
name='ShopingCart',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nums', models.IntegerField(default=0, verbose_name='商品数量')),
('add_time', models.DateTimeField(default=datetime.datetime.now, verbose_name='添加时间')),
('goods', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='goods.Goods', verbose_name='商品')),
],
options={
'verbose_name_plural': '购物车',
'verbose_name': '购物车',
},
),
]
| [
"[email protected]"
]
| |
4c9ab3dc477a8746e0cb8fbe150a54692002b6d8 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5686275109552128_1/Python/SiaKateb/B.py | 6a3e7e9c2dee218a9b3e27a1dcc9f988f0fd6609 | []
| no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 286 | py | __author__ = 'siavash'
T = input()
for t in range(T):
D = input()
P = list(map(int, raw_input().split(" ")))
ans = max(P)
Z = 2
while Z < ans:
ans = min(ans, sum([(x - 1) // Z for x in P]) + Z)
Z += 1
print 'Case #{0}: {1}'.format(t + 1, ans) | [
"[email protected]"
]
| |
d2438001e91dfcd299d78bf2947f8b3fb2d7bbb2 | 340886f11b67cebed3b4ad62d1376790243c7ee9 | /manage.py | cb6636a15c3ca1678733c9d5b09c91769697c5eb | [
"CC-BY-3.0",
"MIT"
]
| permissive | heitorchang/students | f9f728bcc7af8d3582814f38000f269bb92a960d | ba5d6ca721d85aacb5f1563fff6c7d1c4b021d54 | refs/heads/master | 2020-08-08T01:52:01.031227 | 2020-02-03T17:44:25 | 2020-02-03T17:44:25 | 213,666,043 | 0 | 0 | NOASSERTION | 2020-06-05T23:35:40 | 2019-10-08T14:30:04 | JavaScript | UTF-8 | Python | false | false | 649 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'students.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
a0c388980789e8279aead795a5456b5d20c71d8c | 3bae1ed6460064f997264091aca0f37ac31c1a77 | /apps/cloud_api_generator/generatedServer/tasklets/disk/getXMLSchema/disk_getXMLSchema.py | 7fb7c7b36663e1883b9bbf6c12744ce01caf7615 | []
| no_license | racktivity/ext-pylabs-core | 04d96b80ac1942754257d59e91460c3a141f0a32 | 53d349fa6bee0ccead29afd6676979b44c109a61 | refs/heads/master | 2021-01-22T10:33:18.523799 | 2017-06-08T09:09:28 | 2017-06-08T09:09:28 | 54,314,984 | 0 | 0 | null | 2017-06-08T09:09:29 | 2016-03-20T11:55:01 | Python | UTF-8 | Python | false | false | 180 | py | __author__ = 'aserver'
__tags__ = 'disk', 'getXMLSchema'
__priority__= 3
def main(q, i, params, tags):
params['result'] = ''
def match(q, i, params, tags):
return True
| [
"devnull@localhost"
]
| devnull@localhost |
adbdceca5ed229f3715c5b02449a90573cf11f36 | 673e829dda9583c8dd2ac8d958ba1dc304bffeaf | /data/multilingual/Latn.MCD/Serif_12/pdf_to_json_test_Latn.MCD_Serif_12.py | e2a6c3fafba9bcc5590f3fd798d4299201704f19 | [
"BSD-3-Clause"
]
| permissive | antoinecarme/pdf_to_json_tests | 58bab9f6ba263531e69f793233ddc4d33b783b7e | d57a024fde862e698d916a1178f285883d7a3b2f | refs/heads/master | 2021-01-26T08:41:47.327804 | 2020-02-27T15:54:48 | 2020-02-27T15:54:48 | 243,359,934 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 305 | py | import pdf_to_json as p2j
import json
url = "file:data/multilingual/Latn.MCD/Serif_12/udhr_Latn.MCD_Serif_12.pdf"
lConverter = p2j.pdf_to_json.pdf_to_json_converter()
lConverter.mImageHashOnly = True
lDict = lConverter.convert(url)
print(json.dumps(lDict, indent=4, ensure_ascii=False, sort_keys=True))
| [
"[email protected]"
]
| |
0d5726f152f9f29dc6f7a99fdac51cb48fd8542c | e6dab5aa1754ff13755a1f74a28a201681ab7e1c | /.parts/lib/django-1.5/django/core/files/locks.py | cbaa7b25d79ec5bfb418ecc45a01915b02922066 | []
| no_license | ronkagan/Euler_1 | 67679203a9510147320f7c6513eefd391630703e | 022633cc298475c4f3fd0c6e2bde4f4728713995 | refs/heads/master | 2021-01-06T20:45:52.901025 | 2014-09-06T22:34:16 | 2014-09-06T22:34:16 | 23,744,842 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 92 | py | /home/action/.parts/packages/googleappengine/1.9.4/lib/django-1.5/django/core/files/locks.py | [
"[email protected]"
]
| |
9bd9447b16887d8c853d33373cfe76d2145cc3ee | 1ba2f5282084f967aed5df2f614b79366ea8070c | /sort/bubble_sort/bubble_sort.py | bb27adb5aa11000f9146b1eae2954c22a32fddb3 | []
| no_license | rohitmi0023/cs_programs | fcdb45a7ff3291c6fa0b44cfdbfd76d826bd805f | 7396daf0c2a65574da674c6dfef91a09138034ac | refs/heads/master | 2023-07-17T08:47:54.806128 | 2021-09-01T16:36:52 | 2021-09-01T16:36:52 | 205,328,085 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 805 | py | def bubble_sort_func(lists):
# In total we have to make length - 1 iterations
for i in range(1, len(lists)):
# In an iteration we have to further make length - i iterations
for j in range(0, len(lists) - i):
# Comparing the current and the next element
if lists[j] > lists[j + 1]:
# swapping if the current element is bigger than the next one
lists[j], lists[j + 1] = lists[j + 1], lists[j]
return lists
# User defined length of the list
SIZE = input("Enter the number of elements ")
# Creating an empty list
LISTS = []
for k in range(0, SIZE):
element = input('Enter the number: ')
# appending each user defined value into the list
LISTS.append(element)
print("Sorted array is: ", bubble_sort_func(LISTS))
| [
"[email protected]"
]
| |
53d3bdc39fa2dc67b61a5ac0caf041498f56ccdb | 6206620e1a20c2d8847e63be974f90408c0cfa3c | /Advanced_python/polymorphism/method_overriding.py | 1bedabce0838dc15938ad1f2989ed30fdd8f7934 | []
| no_license | vinuv296/luminar_python_programs | c1eb4f91201634c999b427dd13b79968486e9e9e | 08c078e25c63b71e97c60a6b2ddd3911e2c915d2 | refs/heads/master | 2023-04-05T05:02:47.023113 | 2021-04-19T10:18:18 | 2021-04-19T10:18:18 | 358,550,878 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 207 | py | class Parent:
def properties(self):
print("10 lakh rs,2 Car")
def mary(self):
print("with raju")
class Child(Parent):
def mary(self):
print("With Gopi")
c=Child()
c.mary() | [
"[email protected]"
]
| |
b8dbb70f4b3a26af37196a9a463f6061d6e2e864 | bfa4447ec5e92017aec95ee6d349d91b5733afca | /test/test_assign/files/otter-correct/student/tests/q1.py | bf7a73d9e1cf2f82d664f62d60f74fae56746002 | [
"BSD-3-Clause"
]
| permissive | ucbds-infra/otter-grader | 4020c14614fc62a93ce564c6b8ad88269defac97 | e6ece6b53ef2291f2724ff9965f09d910ad10e7e | refs/heads/master | 2023-08-23T22:46:15.793814 | 2023-08-18T21:53:52 | 2023-08-18T21:53:52 | 208,363,438 | 112 | 62 | BSD-3-Clause | 2023-09-12T00:01:41 | 2019-09-13T23:40:57 | Python | UTF-8 | Python | false | false | 526 | py | OK_FORMAT = True
test = { 'name': 'q1',
'points': 3.75,
'suites': [ { 'cases': [ {'code': '>>> isinstance(x, int)\nTrue', 'hidden': False, 'locked': False},
{'code': '>>> None\n', 'hidden': False, 'locked': False},
{'code': '>>> 0 < x < 100\nTrue', 'hidden': False, 'locked': False}],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'}]}
| [
"[email protected]"
]
| |
ed2e5a15b0308028be8c4dcf2f25b68e7a37e18a | 771b8d8b60783ed8181de344e418691bd2cf882d | /nb_hook/migrations/0001_initial.py | 18adea7e3bab930b9cade0de8bcf78deae8ae901 | []
| no_license | mozilla/mozilla_webhook_sync | 3c99eaa43e36e21b5a55e95c9e75c613fbef6aaa | 8e955e1f6416bbb4e04246f0bbc67acab6e73af3 | refs/heads/master | 2023-09-03T22:53:22.737607 | 2017-11-06T11:22:40 | 2017-11-06T11:22:40 | 66,299,225 | 0 | 0 | null | 2016-10-18T21:54:49 | 2016-08-22T18:48:53 | Python | UTF-8 | Python | false | false | 679 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-08-19 22:45
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='TestHook',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(default=django.utils.timezone.now)),
('content', models.TextField(blank=True, null=True)),
],
),
]
| [
"[email protected]"
]
| |
7a0b281c0707828a0e8c7afce58a3cbce2546ca8 | faf793376991092615975a559c6bed4e093acc44 | /SECTION 25 first step with Django/180 to install Django.py | 0ce11cffde7701e2493656f21e382c11f53f4e95 | []
| no_license | jdiaz-dev/practicing-python | 2385f2541759cfc9ed221b62030c28e8cf6ddde4 | 139b7dd4332e9ab3dd73abee0308cff41f4657fe | refs/heads/master | 2023-04-05T06:13:53.590830 | 2023-03-19T16:06:00 | 2023-03-19T16:06:00 | 320,443,146 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 771 | py | """
--there are many ways to install Django
--first: podemos montar un entorno virual de Python, que permite saltar de una version a otra de python y saltar de otra version a otra de Django, esto esta bien; pero la instalación se complicaría, y no es necesario para una entorno local de desarrollo
--la instalación de Django se hace directamente en el sistema
command to instal Django:
py -m pip install Django==3.1.4
to check version of Django
py -m django --version
to check disponible commands
py manage.py help
to migrate the project
py manage.py migrate
--it generate a database sqlite with functionalities by default of django
to run server
py manage.py runserver
"""
| [
"[email protected]"
]
| |
6e1b092a7e6f85d6efca87805706d92718c98325 | 77311ad9622a7d8b88707d7cee3f44de7c8860cb | /res/scripts/client/gui/scaleform/daapi/view/battle/resource_points.py | 58a0117e13ee33a8d9d218c7496c89e9e8398f20 | []
| no_license | webiumsk/WOT-0.9.14-CT | 9b193191505a4560df4e872e022eebf59308057e | cfe0b03e511d02c36ce185f308eb48f13ecc05ca | refs/heads/master | 2021-01-10T02:14:10.830715 | 2016-02-14T11:59:59 | 2016-02-14T11:59:59 | 51,606,676 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 10,567 | py | # 2016.02.14 12:38:44 Střední Evropa (běžný čas)
# Embedded file name: scripts/client/gui/Scaleform/daapi/view/battle/resource_points.py
import weakref
import BigWorld
from CTFManager import g_ctfManager
from constants import RESOURCE_POINT_STATE
from gui.battle_control import g_sessionProvider
from gui.shared.utils.plugins import IPlugin
from gui.Scaleform.locale.FALLOUT import FALLOUT
from helpers import i18n, time_utils
from account_helpers.settings_core.SettingsCore import g_settingsCore
_CALLBACK_INDICATOR_NAME = 'battle.onLoadResourceIndicator'
_CALLBACK_PANEL_NAME = 'battle.onLoadResourcePointsPanel'
class _POINTS_STATE:
FREEZE = 'freeze'
COOLDOWN = 'cooldown'
READY = 'ready'
OWN_MINING = 'ownMining'
ENEMY_MINING = 'enemyMining'
OWN_MINING_FROZEN = 'ownMiningFrozen'
ENEMY_MINING_FROZEN = 'enemyMiningFrozen'
CONFLICT = 'conflict'
_CAPTURE_STATE_BY_TEAMS = {True: _POINTS_STATE.OWN_MINING,
False: _POINTS_STATE.ENEMY_MINING}
_CAPTURE_FROZEN_STATE_BY_TEAMS = {True: _POINTS_STATE.OWN_MINING_FROZEN,
False: _POINTS_STATE.ENEMY_MINING_FROZEN}
class _ResourceIndicator(object):
def __init__(self, plugin):
self.__plugin = weakref.proxy(plugin)
self.__flashObject = weakref.proxy(plugin.parentObj.movie.resourceIndicator.instance)
def init(self):
disabledStr = i18n.makeString(FALLOUT.RESOURCEPOINTS_DISABLED_DESCR)
miningStr = i18n.makeString(FALLOUT.RESOURCEPOINTS_MINING_DESCR)
cooldownStr = i18n.makeString(FALLOUT.RESOURCEPOINTS_COOLDOWN_DESCR)
self.flashObject.as_setTexts(cooldownStr, miningStr, disabledStr)
g_settingsCore.onSettingsChanged += self.__onSettingsChanged
def destroy(self):
g_settingsCore.onSettingsChanged -= self.__onSettingsChanged
self.__plugin = None
self.__flashObject = None
return
def __onSettingsChanged(self, diff = None):
self.flashObject.as_onSettingsChanged()
@property
def flashObject(self):
return self.__flashObject
def show(self, pointIdx):
self.flashObject.as_show(pointIdx)
def setFreeze(self, isFrozen, timeStr):
self.flashObject.as_setFreeze(isFrozen, timeStr)
def hide(self):
self.flashObject.as_hide()
class _ResourcePointsPanel(object):
def __init__(self, plugin):
self.__plugin = weakref.proxy(plugin)
self.__flashObject = weakref.proxy(plugin.parentObj.movie.resourcePointsPanel.instance)
def init(self):
g_settingsCore.onSettingsChanged += self.__onSettingsChanged
self.flashObject.as_init(self._makeData())
def update(self):
self.flashObject.as_updateData(self._makeData())
def destroy(self):
g_settingsCore.onSettingsChanged -= self.__onSettingsChanged
self.__plugin = None
self.__flashObject = None
return
@property
def flashObject(self):
return self.__flashObject
def _makeData(self):
result = []
arenaDP = g_sessionProvider.getArenaDP()
for pointID, point in g_ctfManager.getResourcePoints():
pointState = point['state']
timeLeft = ''
amount = point['amount']
progress = float(amount) / point['totalAmount'] * 100
if pointState == RESOURCE_POINT_STATE.FREE:
state = _POINTS_STATE.READY
elif pointState == RESOURCE_POINT_STATE.COOLDOWN:
self.__plugin.setUpdateRequired(True)
state = _POINTS_STATE.COOLDOWN
timeDelta = max(0, point['cooldownTime'] - BigWorld.serverTime())
timeLeft = time_utils.getTimeLeftFormat(timeDelta)
elif pointState == RESOURCE_POINT_STATE.CAPTURED:
state = _CAPTURE_STATE_BY_TEAMS[arenaDP.isAllyTeam(point['team'])]
elif pointState == RESOURCE_POINT_STATE.CAPTURED_LOCKED:
state = _CAPTURE_FROZEN_STATE_BY_TEAMS[arenaDP.isAllyTeam(point['team'])]
elif pointState == RESOURCE_POINT_STATE.BLOCKED:
state = _POINTS_STATE.CONFLICT
else:
state = _POINTS_STATE.FREEZE
result.append(self._makeItem(progress, state, amount, timeLeft))
return result
def _makeItem(self, progress, state, pointsCount, timeLeftStr):
return {'progress': progress,
'state': state,
'pointsCount': pointsCount,
'timeLeftStr': timeLeftStr}
def __onSettingsChanged(self, diff = None):
if 'isColorBlind' in diff:
self.flashObject.as_onSettingsChanged()
class ResourcePointsPlugin(IPlugin):
def __init__(self, parentObj):
super(ResourcePointsPlugin, self).__init__(parentObj)
self.__resourceIndicator = None
self.__resourcePointsPanel = None
self.__updateCallbackID = None
self.__freezeCallbackID = None
self.__updateRequired = False
return
def init(self):
super(ResourcePointsPlugin, self).init()
self._parentObj.addExternalCallback(_CALLBACK_INDICATOR_NAME, self.__onLoadResourceIndicator)
self._parentObj.addExternalCallback(_CALLBACK_PANEL_NAME, self.__onLoadResourcePointsPanel)
g_ctfManager.onResPointIsFree += self.__processUpdate
g_ctfManager.onResPointCooldown += self.__processUpdate
g_ctfManager.onResPointCaptured += self.__processUpdate
g_ctfManager.onResPointCapturedLocked += self.__processUpdate
g_ctfManager.onResPointBlocked += self.__processUpdate
g_ctfManager.onResPointAmountChanged += self.__processUpdate
g_ctfManager.onOwnVehicleInsideResPoint += self.__onOwnVehicleInside
g_ctfManager.onOwnVehicleLockedForResPoint += self.__onOwnVehicleLocked
def fini(self):
self._parentObj.removeExternalCallback(_CALLBACK_INDICATOR_NAME)
self._parentObj.removeExternalCallback(_CALLBACK_PANEL_NAME)
g_ctfManager.onResPointIsFree -= self.__processUpdate
g_ctfManager.onResPointCooldown -= self.__processUpdate
g_ctfManager.onResPointCaptured -= self.__processUpdate
g_ctfManager.onResPointCapturedLocked -= self.__processUpdate
g_ctfManager.onResPointBlocked -= self.__processUpdate
g_ctfManager.onResPointAmountChanged -= self.__processUpdate
g_ctfManager.onOwnVehicleInsideResPoint -= self.__onOwnVehicleInside
g_ctfManager.onOwnVehicleLockedForResPoint -= self.__onOwnVehicleLocked
super(ResourcePointsPlugin, self).fini()
def start(self):
super(ResourcePointsPlugin, self).start()
self._parentObj.movie.falloutItems.as_loadResourceIndicator()
self._parentObj.movie.falloutItems.as_loadResourcePointsPanel()
def stop(self):
self.__cancelUpdateCallback()
self.__cancelFreezeCallback()
if self.__resourceIndicator is not None:
self.__resourceIndicator.destroy()
self.__resourceIndicator = None
if self.__resourcePointsPanel is not None:
self.__resourcePointsPanel.destroy()
self.__resourcePointsPanel = None
super(ResourcePointsPlugin, self).stop()
return
def setUpdateRequired(self, updateRequired):
self.__updateRequired = updateRequired
def __processUpdate(self, *args):
if self.__updateCallbackID is not None:
self.__updateRequired = True
else:
self.__update()
return
def __initUpdateCallback(self):
if self.__updateRequired:
self.__updateRequired = False
self.__updateCallbackID = BigWorld.callback(1.0, self.__update)
else:
self.__updateCallbackID = None
return
def __cancelUpdateCallback(self):
if self.__updateCallbackID is not None:
BigWorld.cancelCallback(self.__updateCallbackID)
self.__updateCallbackID = None
return
def __update(self):
if self.__resourcePointsPanel is not None:
self.__resourcePointsPanel.update()
self.__initUpdateCallback()
return
def __initFreezeCallback(self):
self.__freezeCallbackID = BigWorld.callback(1.0, self.__updateFreeze)
def __cancelFreezeCallback(self):
if self.__freezeCallbackID is not None:
BigWorld.cancelCallback(self.__freezeCallbackID)
self.__freezeCallbackID = None
return
def __updateFreeze(self):
lock = g_ctfManager.getResourcePointLock()
if lock is not None:
timeDelta = max(0, g_ctfManager.getResourcePointLock() - BigWorld.serverTime())
else:
timeDelta = 0
timeStr = time_utils.getTimeLeftFormat(timeDelta)
if self.__resourceIndicator is not None:
self.__resourceIndicator.setFreeze(True, timeStr)
self.__initFreezeCallback()
return
def __onLoadResourceIndicator(self, _):
self.__resourceIndicator = _ResourceIndicator(self)
self.__resourceIndicator.init()
for pointID, point in g_ctfManager.getResourcePoints():
if point['isPlayerCapture']:
self.__resourceIndicator.show(pointID)
break
def __onLoadResourcePointsPanel(self, _):
self.__resourcePointsPanel = _ResourcePointsPanel(self)
self.__resourcePointsPanel.init()
self.__processUpdate()
def __onOwnVehicleInside(self, pointID):
self.__cancelFreezeCallback()
if pointID is None:
if self.__resourceIndicator is not None:
self.__resourceIndicator.setFreeze(False, '')
self.__resourceIndicator.hide()
else:
if self.__resourcePointsPanel is not None:
self.__resourcePointsPanel.update()
if self.__resourceIndicator is not None:
self.__resourceIndicator.show(pointID)
if g_ctfManager.getResourcePointLock() is not None:
self.__updateFreeze()
return
def __onOwnVehicleLocked(self, unlockTime):
self.__cancelFreezeCallback()
if unlockTime is not None:
self.__updateFreeze()
elif self.__resourceIndicator is not None:
self.__resourceIndicator.setFreeze(False, '')
return
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\client\gui\scaleform\daapi\view\battle\resource_points.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.02.14 12:38:44 Střední Evropa (běžný čas)
| [
"[email protected]"
]
| |
4ce3a8efd95d236d73a447758148f57878a4bfdb | ebc7c4d82eed374060bf6bbc7df76930412ba26a | /plait/api.py | 98476b79c0cb686fc7f39bb0936a197961f50a2a | [
"MIT"
]
| permissive | dustinlacewell/plait | 6878b2124069373fd2cafdcf5ba1ca628bda64ec | b57bc353298401af41b286fbefa6120b236be102 | refs/heads/master | 2021-01-10T12:23:34.931115 | 2015-12-04T21:55:30 | 2015-12-04T21:55:30 | 45,318,025 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 980 | py | import inspect
from twisted.internet import reactor
from twisted.internet.threads import blockingCallFromThread as blockingCFT
from plait.task import thread_locals
class RemoteCallError(Exception): pass
def run(cmd, fail=False):
"""
Execute a command on the remote host.
Blocks by calling into the main reactor thread. The result is a CFTResult
object which will contain the stdout of the operation. It will also have
a stderr attribute which if not empty indicates the remote command failed.
"""
worker = thread_locals.worker
# block until result is available or main thread dies
result = blockingCFT(reactor, worker.execFromThread, cmd)
if result.failed and fail:
exception = RemoteCallError(result.stderr)
exception.result = result
stack = inspect.stack()[1]
exception.error = stack[1], stack[2]
raise exception
return result
def sudo(cmd, *args, **kwargs):
return run("sudo " + cmd)
| [
"[email protected]"
]
| |
6f7783056afc32c59c56147ec14e1af860df6a49 | b48e2c61292ad9c6621bee95cd70265911d6d636 | /tests/test_marketstack/test_import.py | 8e369050c5f3d39e3e813414f40f454211bfeaca | [
"BSD-3-Clause"
]
| permissive | kvh/snapflow-stocks | 9f544769b14887338e35df7c6592a035c7bddd6c | 2531749d86b9ca8a47b3443605d6a9f69c219a03 | refs/heads/master | 2023-06-04T15:47:59.157490 | 2021-06-18T04:15:22 | 2021-06-18T04:15:22 | 320,654,176 | 1 | 0 | BSD-3-Clause | 2020-12-15T20:08:19 | 2020-12-11T18:33:51 | Python | UTF-8 | Python | false | false | 1,807 | py | import os
from snapflow import graph, produce
def ensure_api_key() -> str:
api_key = os.environ.get("MARKETSTACK_ACCESS_KEY")
if api_key is not None:
return api_key
api_key = input("Enter Marketstack access key: ")
return api_key
def test_eod():
from snapflow_stocks import module as stocks
api_key = ensure_api_key()
g = graph()
# Initial graph
prices = g.create_node(
stocks.functions.marketstack_import_eod_prices,
params={"access_key": api_key, "tickers": ["AAPL"]},
)
blocks = produce(prices, execution_timelimit_seconds=1, modules=[stocks])
records = blocks[0].as_records()
assert len(records) >= 100
def test_tickers():
from snapflow_stocks import module as stocks
api_key = ensure_api_key()
g = graph()
# Initial graph
tickers = g.create_node(
stocks.functions.marketstack_import_tickers,
params={"access_key": api_key, "exchanges": ["XNAS"]},
)
blocks = produce(tickers, execution_timelimit_seconds=1, modules=[stocks])
records = blocks[0].as_records()
assert len(records) >= 100
def test_tickers_into_eod():
from snapflow_stocks import module as stocks
api_key = ensure_api_key()
g = graph()
# Initial graph
tickers = g.create_node(
stocks.functions.marketstack_import_tickers,
params={"access_key": api_key, "exchanges": ["XNAS"]},
)
prices = g.create_node(
stocks.functions.marketstack_import_eod_prices,
params={"access_key": api_key},
inputs={"tickers_input": tickers},
)
blocks = produce(prices, execution_timelimit_seconds=1, modules=[stocks])
records = blocks[0].as_records()
assert len(records) >= 100
if __name__ == "__main__":
test_tickers_into_eod()
| [
"[email protected]"
]
| |
020896c5168cdb397dd61350967710c264a45466 | ad3fd0595c60cd10130676203ca695781ba59fa5 | /RicardoFabbri18022013_2257_gml_fb/scripts/rdfFBEgoGML.py | 6c096e21a08746b169e5094ef97e3471bd11bb1c | [
"CC0-1.0"
]
| permissive | labmacambira/fbEgoGML | e3d5b499df5ad48613f5071cae8965a5b22b3315 | 3b725b0a6c1998788bcb9b6ebcc966ed4cb083fd | refs/heads/master | 2021-05-30T15:07:38.155574 | 2015-12-11T10:13:15 | 2015-12-11T10:13:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,911 | py | import social as S, percolation as P, os
import importlib
#importlib.reload(P.rdf)
importlib.reload(S)
importlib.reload(S.fb)
importlib.reload(S.fb.read)
importlib.reload(S.fb.gml2rdf)
c=P.utils.check
umbrella_dir="fbEgoGML/"
fpath="./publishing/fb4/"
dpath="../data/fb/gml/"
scriptpath=os.path.realpath(__file__)
fnames_=[
("AntonioAnzoategui18022013_182134.gml",None,"100003608428288","antonio.anzoateguifabbri"),
("BrunoMialich31012013_2126.gml",None,"10000045475708","bruno.mialich"),
("CalebLuporini13042013.gml",None,"1110305437","calebml"),
("CalebLuporini19022013.gml",None,"1110305437","calebml"),
("CamilaBatista23022014.gml",None,"100001707143512","camila.batista.3382"),
("DanielPenalva18022013.gml",None,"100000077490764","barthor.la.zule"),
# ("RafaelReinehr09042013_1148.gml",None,"814059950","reinehr"), #gml better
("GabiThume19022013_0440.gml",None,"100002011676407","gabithume"),
("GrahamForrest28012013.gml",None,0,0),
("LailaManuelle17012013_0258.gml",None,"1713144485","laila.manuelle"),
("LarissaAnzoategui20022013_0207.gml",None,"1760577842","larissa.chogui"),
("LuisCirne07032013.gml",None,"717903828","lufcirne"),
("MariliaMelloPisani10042013_0255.gml",None,"100000812625301","marilia.pisani"),
("Mirtes16052013.gml",None,0,0),
("PedroPauloRocha10032013.gml",None,"836944624","dpedropaulorocha"),
("PeterForrest28012013_1602.gml",None,"770029747","peter.forrest.18"), # ateh aqui ok
("RafaelReinehr09042013_1148.gml",None,"814059950","reinehr"), #gml better
("RamiroGiroldo20022013_0149.gml",None,"100001810878626","ramiro.giroldo"),
("RenatoFabbri03032013.gml",None,"781909429","renato.fabbri"),
("RenatoFabbri11072013.gml",None,"781909429","renato.fabbri"),
("RenatoFabbri18042013.gml",None,"781909429","renato.fabbri"),
("RenatoFabbri20012013.gml",None,"781909429","renato.fabbri"),
("RenatoFabbri29112012_0521.gml",None,"781909429","renato.fabbri"),
("RicardoFabbri18022013_2257.gml",None,"1011765","ricardofabbri"),
("RitaWu08042013.gml",None,"100009639240215",0),
("RonaldCosta12062013.gml",None,"1457302032","scherolt"),
("ThaisTeixeira19022013_062820.gml",None,"100001089120349","thais.t.fabbri"),
("VilsonVieira18022013.gml",None,"529899682","aut0mata"),
("ViniciusSampaio18022013_2050.gml",None,"529899682","sampaio.vinicius"),
]
c("largou")
for fnames in fnames_[22:]:
aa=S.fb.triplifyGML(dpath=dpath,
fname=fnames[0],
fnamei=None,
fpath=fpath,
scriptpath=scriptpath,
uid=fnames[2],
sid=fnames[3],
fb_link=None,
ego=True,
umbrella_dir=umbrella_dir)
| [
"[email protected]"
]
| |
b8ed560f29c16f3eb2bbbe66bd280997880c9fad | 6fa831a9ac84ab220aad1195365640cabf3eeb88 | /tools/calibrate/min_quad.py | 10eff68e71089a0ffedda59e1d153540a3f30fe1 | []
| no_license | d4niele/maia | 1e68faae43687a4d12f29fff7fe94424e9713da1 | 178087336a32856c5f46e364bf164dc1e229b59d | refs/heads/master | 2020-05-24T16:07:53.666392 | 2019-07-02T22:42:50 | 2019-07-02T22:42:50 | 187,349,363 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 217 | py | import numpy as np
from matplotlib import pyplot as plt
N = 10
xs = np.random.random(N)
ys = np.random.random(N)
trend = np.polyfit(xs,ys,1)
plt.plot(xs,ys,'o')
trendpoly = np.poly1d(trend)
plt.plot(xs,trendpoly(xs)) | [
"[email protected]"
]
| |
767ade35eef66aad7a14b9a9d020379e637aa45e | fd379769378d129ae2f038a01f85391034491d61 | /python/inversions.py | 67c0c16bafa80a0b69c63aab4ce145dfca63c857 | [
"MIT"
]
| permissive | drusk/algorithms | 824593d489904f6efa3ccf2a44fab69aafec9cd4 | c8bdc1c1aff6386e37c023bf1f4984e5addbcab5 | refs/heads/master | 2021-01-10T19:33:45.092215 | 2013-12-17T19:59:54 | 2013-12-17T19:59:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 728 | py | """
Count inversions in a list of numbers.
"""
__author__ = "David Rusk <[email protected]>"
def main():
input_file = ("/home/drusk/Documents/Courses/online/algorithms1/"
"assignments/a1/IntegerArray.txt")
numbers = []
with open(input_file, "rb") as filehandle:
for line in filehandle.readlines():
numbers.append(int(line))
print "Read %d numbers." % len(numbers)
# Just implementing the naive way to test other code
inversions = 0
for index, num in enumerate(numbers):
for other_num in numbers[index + 1:]:
if other_num < num:
inversions += 1
print "Inversions: %d" % inversions
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
5de26745bb13ad2ae79516eb18e726a7104d168a | 51d0377511a5da902033fb9d80184db0e096fe2c | /30-case-studies-in-statistical-thinking/5-earthquakes-and-oil-mining-in-oklahoma/01-eda-plotting-earthquakes-over-time.py | fd64e420d5acd064b26dfb6c353d44084515012c | []
| no_license | sashakrasnov/datacamp | c28c6bda178163337baed646220b2f7dcc36047d | 759f4cec297883907e21118f24a3449d84c80761 | refs/heads/master | 2021-12-07T02:54:51.190672 | 2021-09-17T21:05:29 | 2021-09-17T21:05:29 | 157,093,632 | 6 | 5 | null | null | null | null | UTF-8 | Python | false | false | 1,291 | py | '''
EDA: Plotting earthquakes over time
Make a plot where the y-axis is the magnitude and the x-axis is the time of all earthquakes in Oklahoma between 1980 and the first half of 2017. Each dot in the plot represents a single earthquake. The time of the earthquakes, as decimal years, is stored in the Numpy array time, and the magnitudes in the Numpy array mags.
'''
import numpy as np
import pandas as pd
import dc_stat_think as dcst
import matplotlib.pyplot as plt
df = pd.read_csv('../datasets/oklahoma_earthquakes_1950-2017.csv', comment='#', index_col='time', parse_dates=True, usecols=['time','mag'])
time = np.array([d.timestamp() / 31556925.9747 + 1970 for d in df['1980-01':'2017-06'].index.to_pydatetime()])
mags = df['1980-01':'2017-06'].mag.values
'''
INSTRUCTIONS
* Plot the magnitude (mags) versus time (time) using plt.plot() with keyword arguments marker='.' and linestyle='none'. Also use the keyword argument alpha=0.1 to make the points transparent to better visualize overlapping points.
* Label the x-axis 'time (year)', y-axis 'magnitude', and show the plot.
'''
# Plot time vs. magnitude
_ = plt.plot(time, mags, marker='.', linestyle='none', alpha=0.1)
# Label axes and show the plot
_ = plt.xlabel('time (year)')
_ = plt.ylabel('magnitude')
plt.show()
| [
"[email protected]"
]
| |
d2419cdf3a1b330e95dde1be399c672d76c2cdbf | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/JUNIPER-CFGMGMT-MIB.py | f042878a484686062400859bc4ddd98fa349579f | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
]
| permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 12,091 | py | #
# PySNMP MIB module JUNIPER-CFGMGMT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/JUNIPER-CFGMGMT-MIB
# Produced by pysmi-0.3.4 at Wed May 1 13:58:55 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsUnion, ValueSizeConstraint, ConstraintsIntersection, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsUnion", "ValueSizeConstraint", "ConstraintsIntersection", "SingleValueConstraint")
jnxCmNotifications, jnxMibs = mibBuilder.importSymbols("JUNIPER-SMI", "jnxCmNotifications", "jnxMibs")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Gauge32, Unsigned32, ObjectIdentity, Counter32, NotificationType, iso, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, MibIdentifier, ModuleIdentity, Counter64, Integer32, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "Unsigned32", "ObjectIdentity", "Counter32", "NotificationType", "iso", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "MibIdentifier", "ModuleIdentity", "Counter64", "Integer32", "IpAddress")
DisplayString, DateAndTime, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "DateAndTime", "TextualConvention")
jnxCfgMgmt = ModuleIdentity((1, 3, 6, 1, 4, 1, 2636, 3, 18))
jnxCfgMgmt.setRevisions(('2003-11-19 00:00', '2003-10-24 00:00', '2002-05-10 00:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: jnxCfgMgmt.setRevisionsDescriptions(('Added Rescue Configuration Management.', 'Added JnxCmCfChgSource TEXTUAL-CONVENTION.', 'Initial revision.',))
if mibBuilder.loadTexts: jnxCfgMgmt.setLastUpdated('200310240000Z')
if mibBuilder.loadTexts: jnxCfgMgmt.setOrganization('Juniper Networks, Inc.')
if mibBuilder.loadTexts: jnxCfgMgmt.setContactInfo(' Juniper Technical Assistance Center Juniper Networks, Inc. 1194 N. Mathilda Avenue Sunnyvale, CA 94089 E-mail: [email protected]')
if mibBuilder.loadTexts: jnxCfgMgmt.setDescription('This MIB module defines objects used for managing the configuration of Juniper products.')
class JnxCmCfChgSource(TextualConvention, Integer32):
description = 'Identifies the source of config event.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))
namedValues = NamedValues(("other", 1), ("cli", 2), ("junoscript", 3), ("synchronize", 4), ("snmp", 5), ("button", 6), ("autoinstall", 7), ("unknown", 8))
class JnxCmRescueCfgState(TextualConvention, Integer32):
description = 'Identifies the state of the rescue configuration.'
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("nonexistant", 1), ("updated", 2))
jnxCmCfgChg = MibIdentifier((1, 3, 6, 1, 4, 1, 2636, 3, 18, 1))
jnxCmCfgChgLatestIndex = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 18, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxCmCfgChgLatestIndex.setStatus('current')
if mibBuilder.loadTexts: jnxCmCfgChgLatestIndex.setDescription('The index in jnxCmCfgChgEventTable for the latest configuration change event.')
jnxCmCfgChgLatestTime = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 18, 1, 2), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxCmCfgChgLatestTime.setStatus('current')
if mibBuilder.loadTexts: jnxCmCfgChgLatestTime.setDescription('The value of sysUpTime when the configuration was last changed. If the management subsystem was reset after the last configuration change, this object will return 0.')
jnxCmCfgChgLatestDate = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 18, 1, 3), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxCmCfgChgLatestDate.setStatus('current')
if mibBuilder.loadTexts: jnxCmCfgChgLatestDate.setDescription('The date and time when the configuration was last changed.')
jnxCmCfgChgLatestSource = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 18, 1, 4), JnxCmCfChgSource()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxCmCfgChgLatestSource.setStatus('current')
if mibBuilder.loadTexts: jnxCmCfgChgLatestSource.setDescription('The source of the configuration event.')
jnxCmCfgChgLatestUser = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 18, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxCmCfgChgLatestUser.setStatus('current')
if mibBuilder.loadTexts: jnxCmCfgChgLatestUser.setDescription('The name of the logged in user. The length is zero if not available or not applicable.')
jnxCmCfgChgMaxEventEntries = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 18, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxCmCfgChgMaxEventEntries.setStatus('current')
if mibBuilder.loadTexts: jnxCmCfgChgMaxEventEntries.setDescription('The maximum number of entries that can be held in jnxCmCfgChgEventTable.')
jnxCmCfgChgEventTable = MibTable((1, 3, 6, 1, 4, 1, 2636, 3, 18, 1, 7), )
if mibBuilder.loadTexts: jnxCmCfgChgEventTable.setStatus('current')
if mibBuilder.loadTexts: jnxCmCfgChgEventTable.setDescription('A table of configuration events on this router.')
jnxCmCfgChgEventEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2636, 3, 18, 1, 7, 1), ).setIndexNames((0, "JUNIPER-CFGMGMT-MIB", "jnxCmCfgChgEventIndex"))
if mibBuilder.loadTexts: jnxCmCfgChgEventEntry.setStatus('current')
if mibBuilder.loadTexts: jnxCmCfgChgEventEntry.setDescription('Information about a configuration event on this router.')
jnxCmCfgChgEventIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 18, 1, 7, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: jnxCmCfgChgEventIndex.setStatus('current')
if mibBuilder.loadTexts: jnxCmCfgChgEventIndex.setDescription('This object identifies a specific configuration change event. Monotonically increasing values will be assigned by the snmp subsystem to each event as it occurs. If the snmp subsystem is reset, these index values will be reset as well.')
jnxCmCfgChgEventTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 18, 1, 7, 1, 2), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxCmCfgChgEventTime.setStatus('current')
if mibBuilder.loadTexts: jnxCmCfgChgEventTime.setDescription('The value of sysUpTime when the event occurred.')
jnxCmCfgChgEventDate = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 18, 1, 7, 1, 3), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxCmCfgChgEventDate.setStatus('current')
if mibBuilder.loadTexts: jnxCmCfgChgEventDate.setDescription('The system date and time when the event occurred.')
jnxCmCfgChgEventSource = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 18, 1, 7, 1, 4), JnxCmCfChgSource()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxCmCfgChgEventSource.setStatus('current')
if mibBuilder.loadTexts: jnxCmCfgChgEventSource.setDescription('The source of the configuration event.')
jnxCmCfgChgEventUser = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 18, 1, 7, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxCmCfgChgEventUser.setStatus('current')
if mibBuilder.loadTexts: jnxCmCfgChgEventUser.setDescription('The name of the logged in user. The length is zero if not available or not applicable.')
jnxCmCfgChgEventLog = MibTableColumn((1, 3, 6, 1, 4, 1, 2636, 3, 18, 1, 7, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxCmCfgChgEventLog.setStatus('current')
if mibBuilder.loadTexts: jnxCmCfgChgEventLog.setDescription('The log of the configuration event. The length is zero if not available.')
jnxCmRescueChg = MibIdentifier((1, 3, 6, 1, 4, 1, 2636, 3, 18, 2))
jnxCmRescueChgTime = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 18, 2, 1), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxCmRescueChgTime.setStatus('current')
if mibBuilder.loadTexts: jnxCmRescueChgTime.setDescription('The value of sysUpTime when the rescue configuration was last changed. If the management subsystem was reset after the last configuration change, this object will return 0.')
jnxCmRescueChgDate = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 18, 2, 2), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxCmRescueChgDate.setStatus('current')
if mibBuilder.loadTexts: jnxCmRescueChgDate.setDescription('The date and time when the rescue configuration was last changed.')
jnxCmRescueChgSource = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 18, 2, 3), JnxCmCfChgSource()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxCmRescueChgSource.setStatus('current')
if mibBuilder.loadTexts: jnxCmRescueChgSource.setDescription('The source of the rescue configuration event.')
jnxCmRescueChgUser = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 18, 2, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxCmRescueChgUser.setStatus('current')
if mibBuilder.loadTexts: jnxCmRescueChgUser.setDescription('The name of the logged in user. The length is zero if not available or not applicable.')
jnxCmRescueChgState = MibScalar((1, 3, 6, 1, 4, 1, 2636, 3, 18, 2, 5), JnxCmRescueCfgState()).setMaxAccess("readonly")
if mibBuilder.loadTexts: jnxCmRescueChgState.setStatus('current')
if mibBuilder.loadTexts: jnxCmRescueChgState.setDescription('The current state of the rescue configuration.')
jnxCmNotificationsPrefix = MibIdentifier((1, 3, 6, 1, 4, 1, 2636, 4, 5, 0))
jnxCmCfgChange = NotificationType((1, 3, 6, 1, 4, 1, 2636, 4, 5, 0, 1)).setObjects(("JUNIPER-CFGMGMT-MIB", "jnxCmCfgChgEventTime"), ("JUNIPER-CFGMGMT-MIB", "jnxCmCfgChgEventDate"), ("JUNIPER-CFGMGMT-MIB", "jnxCmCfgChgEventSource"), ("JUNIPER-CFGMGMT-MIB", "jnxCmCfgChgEventUser"), ("JUNIPER-CFGMGMT-MIB", "jnxCmCfgChgEventLog"))
if mibBuilder.loadTexts: jnxCmCfgChange.setStatus('current')
if mibBuilder.loadTexts: jnxCmCfgChange.setDescription('Notification of a configuration management event as recorded in jnxCmCfgChgEventTable.')
jnxCmRescueChange = NotificationType((1, 3, 6, 1, 4, 1, 2636, 4, 5, 0, 2)).setObjects(("JUNIPER-CFGMGMT-MIB", "jnxCmRescueChgTime"), ("JUNIPER-CFGMGMT-MIB", "jnxCmRescueChgDate"), ("JUNIPER-CFGMGMT-MIB", "jnxCmRescueChgSource"), ("JUNIPER-CFGMGMT-MIB", "jnxCmRescueChgUser"), ("JUNIPER-CFGMGMT-MIB", "jnxCmRescueChgState"))
if mibBuilder.loadTexts: jnxCmRescueChange.setStatus('current')
if mibBuilder.loadTexts: jnxCmRescueChange.setDescription('Notification of the latest rescue configuration change.')
mibBuilder.exportSymbols("JUNIPER-CFGMGMT-MIB", jnxCmCfgChg=jnxCmCfgChg, jnxCmRescueChgSource=jnxCmRescueChgSource, jnxCfgMgmt=jnxCfgMgmt, jnxCmCfgChange=jnxCmCfgChange, jnxCmRescueChg=jnxCmRescueChg, jnxCmCfgChgLatestUser=jnxCmCfgChgLatestUser, jnxCmCfgChgEventDate=jnxCmCfgChgEventDate, jnxCmCfgChgEventLog=jnxCmCfgChgEventLog, jnxCmCfgChgLatestDate=jnxCmCfgChgLatestDate, jnxCmCfgChgLatestTime=jnxCmCfgChgLatestTime, jnxCmRescueChgTime=jnxCmRescueChgTime, jnxCmCfgChgLatestIndex=jnxCmCfgChgLatestIndex, jnxCmRescueChgUser=jnxCmRescueChgUser, jnxCmCfgChgMaxEventEntries=jnxCmCfgChgMaxEventEntries, jnxCmCfgChgEventTable=jnxCmCfgChgEventTable, jnxCmNotificationsPrefix=jnxCmNotificationsPrefix, jnxCmCfgChgEventIndex=jnxCmCfgChgEventIndex, jnxCmRescueChange=jnxCmRescueChange, jnxCmCfgChgEventEntry=jnxCmCfgChgEventEntry, jnxCmCfgChgEventSource=jnxCmCfgChgEventSource, jnxCmCfgChgEventUser=jnxCmCfgChgEventUser, jnxCmRescueChgDate=jnxCmRescueChgDate, jnxCmCfgChgEventTime=jnxCmCfgChgEventTime, JnxCmCfChgSource=JnxCmCfChgSource, PYSNMP_MODULE_ID=jnxCfgMgmt, JnxCmRescueCfgState=JnxCmRescueCfgState, jnxCmCfgChgLatestSource=jnxCmCfgChgLatestSource, jnxCmRescueChgState=jnxCmRescueChgState)
| [
"[email protected]"
]
| |
535a116d0212e296c28c00090f9b9a5cc0ff9eda | 562a11c9813039430fe313384e55b4cc6a27283a | /tests/test_drf_article.py | 1250dd1c7f51c1525fbae28799e011647ae67d80 | []
| no_license | 8area8/django-rest-framework-articles | c5e6daaf8681293d97d40490c5317a4cabcaa3a3 | 3422353d57a66d4a22d66aa6d48bc128069ce879 | refs/heads/master | 2020-06-05T19:48:10.121391 | 2019-06-18T12:44:33 | 2019-06-18T12:44:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 92 | py | from drf_article import __version__
def test_version():
assert __version__ == '0.1.0'
| [
"[email protected]"
]
| |
0a43b98ce7d6db87225964d9ba742eaf038b8d86 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02922/s743734416.py | 5689127034e497825c363cb031a305c095bb9ae7 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 143 | py | a,b = list(map(int,input().split()))
sasikomi = 1
tap = 0
while(sasikomi < b):
tap = tap + 1
sasikomi = sasikomi + a - 1
print(tap)
| [
"[email protected]"
]
| |
33cade64ce28179008d72828added401b0e8d66f | 5bc8468029a6e1dd0cc75d480a34e646bed64ea2 | /khabarkoi/asgi.py | 55dbeb1a0b29913165098f6c260591c4e0f53001 | []
| no_license | ziaurjoy/Basic-API | c1804749593fb8b0bab3df28550280e2ffa27999 | e7a65b4a0778cd21ff08b3a0a529358c3ef288a8 | refs/heads/master | 2021-05-19T10:18:43.941612 | 2020-09-07T14:20:25 | 2020-09-07T14:20:25 | 251,534,314 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 395 | py | """
ASGI config for khabarkoi project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'khabarkoi.settings')
application = get_asgi_application()
| [
"[email protected]"
]
| |
091130c0662fd0844401f41341fb6248ddc1ebe2 | 2e4f1df3835ab1a0eba70a16800a836b05b82404 | /gst/get_group_info.py | c99105cf572285605dcf161865f88b2aa923f19c | [
"MIT"
]
| permissive | valohai/g-suite-tools | a90fbdaf29815ac76e2f6845c40e8fd421bca076 | cbf69606ed992646e652261187a34f983491db98 | refs/heads/master | 2022-12-10T14:14:14.391443 | 2021-02-08T20:36:36 | 2021-02-26T10:45:57 | 222,693,411 | 1 | 0 | MIT | 2022-12-08T06:54:43 | 2019-11-19T12:41:06 | Python | UTF-8 | Python | false | false | 1,411 | py | import argparse
import json
import sys
from typing import List
import tqdm
from gst.credentials import get_directory_read_client
from gst.utils import get_paginated
def main():
ap = argparse.ArgumentParser()
ap.add_argument("--domain", required=True)
ap.add_argument("--write-json", type=argparse.FileType(mode="w"))
args = ap.parse_args()
directory_client = get_directory_read_client()
domain = args.domain
groups = get_domain_groups(directory_client, domain)
print(f"{len(groups)} groups.")
populate_groups_members(directory_client, groups)
if args.write_json:
json.dump(groups, args.write_json)
print(f"Wrote JSON to {args.write_json}", file=sys.stderr)
def populate_groups_members(directory_client, groups: List[dict]) -> None:
for group in tqdm.tqdm(groups, desc="retrieving members"):
group["_members"] = members = []
for members_resp in get_paginated(
directory_client.members().list, {"groupKey": group["id"]}
):
members.extend(members_resp["members"])
def get_domain_groups(directory_client, domain) -> List[dict]:
groups = []
for groups_resp in tqdm.tqdm(
get_paginated(directory_client.groups().list, {"domain": domain}),
desc="retrieving groups",
):
groups.extend(groups_resp["groups"])
return groups
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
4aae7c04ed2f7d9d96f012507133797f93b1a217 | 1179e66f1790ae0fe350870ca4382a4fcee18779 | /poem/admin.py | e5bf641032564543522c860d3380579e122a84e6 | []
| no_license | shoark7/harusijak-api-server | dc47edf8de794d4c9da2750516e7183169323e64 | 73b01e9e1f6999a17893550ccaf801c3d2cccc5e | refs/heads/master | 2022-11-26T11:37:50.305787 | 2019-05-23T05:51:57 | 2019-05-23T05:51:57 | 182,030,477 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | from django.contrib import admin
from .models import Poem
admin.site.register(Poem)
| [
"[email protected]"
]
| |
a2c147f4468186f6fc2e6530df517fe80b15286b | 3e59c64c78aa3ffc4ca6ee358ee1a3ba61e2d4af | /scripts/fb15k/fb15k_v1.py | a899647b39d3c73ffe3916529dba454b4a2f075c | [
"MIT"
]
| permissive | pminervini/DeepKGC | de35f75fac9c64ca6e09e4ab244552792669678d | ed55d0a28d7607324def7c48ebde98786c11d5e1 | refs/heads/master | 2016-09-06T02:36:47.748324 | 2015-07-06T12:35:07 | 2015-07-06T12:35:07 | 38,617,255 | 5 | 5 | null | null | null | null | UTF-8 | Python | false | false | 1,911 | py | #!/usr/bin/python -uB
# -*- coding: utf-8 -*-
# Classes of methods
u_vers = ['Unstructured']
base_vers = ['TransE', 'ScalE']
scaltrans_vers = ['ScalTransE']
xi_vers = ['XiTransE', 'XiScalE']
semixi_vers = ['XiScalTransSE', 'XiTransScalSE']
xiscaltrans_vers = ['XiScalTransE']
simple_method_set = base_vers + xi_vers
sim_set = ['L1', 'L2', 'dot']
u_sim_set = ['L2_sqr']
margin_set = [1, 2, 5, 10]
ndim_set = [20, 50, 100, 200, 300]
nhid_set = [20, 50, 100, 200, 300]
epochs = 100
nbatches = 10
lr = 0.1
seed = 123
train_path = 'data/fb15k/FB15k-train.pkl'
valid_path = 'data/fb15k/FB15k-valid.pkl'
test_path = 'data/fb15k/FB15k-test.pkl'
# ADAGRAD
# def adagrad(param, rate, epsilon, gradient, updates, param_squared_gradients):
c, method = 0, 'ADAGRAD'
# def adagrad(param, rate, epsilon, gradient, updates, param_squared_gradients):
cmd_adagrad = ('./learn_parameters.py --seed=%d --strategy=%s --totepochs=%d --test_all=%d --lr=%f --name=fb15k/fb15k_%s_%d '
' --train=%s --valid=%s --test=%s --nbatches=%d --no_rescaling --filtered '
' --op=%s --sim=%s --ndim=%d --nhid=%d --margin=%d' # varying params
' > logs/fb15k/fb15k.%s_%s_%d_%d_%d_%d.log 2>&1')
for op in simple_method_set:
for sim in sim_set:
for ndim in ndim_set:
nhid = ndim
for margin in margin_set:
print(cmd_adagrad % (seed, method, epochs, epochs, lr, op, c, train_path, valid_path, test_path, nbatches, op, sim, ndim, nhid, margin, op, sim, ndim, nhid, margin, c))
c += 1
for op in u_vers:
for sim in u_sim_set:
for ndim in ndim_set:
nhid = ndim
for margin in margin_set:
print(cmd_adagrad % (seed, method, epochs, epochs, lr, op, c, train_path, valid_path, test_path, nbatches, op, sim, ndim, nhid, margin, op, sim, ndim, nhid, margin, c))
c += 1
| [
"[email protected]"
]
| |
6a9f8fdd88d5f755ba31aad1901de75f491abbc4 | 56ffce29f0d27f83206e11870d95982c38524aae | /apweb/database_test.py | 774897fdb8bf636f8846c81eeb4ba36319442e8b | []
| no_license | adamandpaul/apweb | cce365085e2ee58cfbc31544c5a7414e67ad56b4 | b1bb81fa7d7b39f19e187462aa3447ff482b46af | refs/heads/master | 2022-10-19T02:09:52.437906 | 2021-05-21T06:10:08 | 2021-05-21T06:10:08 | 201,398,036 | 0 | 3 | null | 2022-09-21T21:39:41 | 2019-08-09T05:41:06 | Python | UTF-8 | Python | false | false | 1,631 | py | # -*- coding: utf-8 -*-
from . import database
from unittest import TestCase
from unittest.mock import MagicMock
from unittest.mock import patch
import pyramid.events
class TestDatabaseConfiguration(TestCase):
@patch("zope.sqlalchemy.register")
def test_db_session_from_request(self, zope_register):
request = MagicMock()
expected_db_session = request.registry["db_session_factory"].return_value
database.db_session_from_request(request)
zope_register.assert_called_with(
expected_db_session, transaction_manager=request.tm
)
@patch("sqlalchemy.orm.configure_mappers")
def test_run_orm_configure_mappers(self, configure_mappers):
database.run_orm_configure_mappers(None)
configure_mappers.assert_called_with()
@patch("sqlalchemy.engine_from_config")
@patch("sqlalchemy.orm.sessionmaker")
def test_includeme(self, sessionmaker, engine_from_config):
config = MagicMock()
config.registry = {}
database.includeme(config)
engine_from_config.assert_called_with(
config.get_settings.return_value, "sqlalchemy."
)
db_engine = engine_from_config.return_value
db_session_factory = sessionmaker.return_value
db_session_factory.configure.assert_called_with(bind=db_engine)
self.assertEqual(config.registry["db_engine"], db_engine)
self.assertEqual(config.registry["db_session_factory"], db_session_factory)
config.add_subscriber.assert_called_with(
database.run_orm_configure_mappers, pyramid.events.ApplicationCreated
)
| [
"[email protected]"
]
| |
f12dde725368f102ef5b15f678c55c17078d4208 | ba80ca143ba35fd481730786a27ebdb1f88ce835 | /algorithm/codility/3_permMissingElem.py | 063ff53aa8c43c233ca38ae1c10147b9ceb9eec0 | []
| no_license | uiandwe/TIL | c541020b65adc53578aeb1c3ba4c6770b3b2e8b3 | 186544469374dd0279099c6c6aa7555ee23e42fe | refs/heads/master | 2022-02-15T08:33:07.270573 | 2022-01-01T15:22:54 | 2022-01-01T15:22:54 | 63,420,931 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 371 | py | # you can write to stdout for debugging purposes, e.g.
# print("this is a debug message")
def solution(A):
l = len(A)
if l <= 0:
return 0
if l == 1 and A[0] != 1:
return 1
s = (l+1) * (l+1+1) // 2
return s - sum(A)
assert solution([1, 3]) == 2
assert solution([2, 3, 1, 5]) == 4
assert solution([]) == 0
assert solution([2]) == 1
| [
"[email protected]"
]
| |
f95bb584108c6caacc35789f70cb634acd8bfcee | b4d7fbbd5ba7d73e2a0ed183e76c55a1e8f68996 | /django/db/migrations/optimizer.py | 9c9613bb370a0f1b0c58bfb3a37e6e9093de4e20 | [
"BSD-3-Clause"
]
| permissive | ikebrown/django | a6ae06946c18c39800dfb20d182da2f8fad4df99 | 8be832b262432081be297d0274ef1ab964a9bcea | refs/heads/master | 2021-01-18T05:52:46.780219 | 2013-10-21T18:40:45 | 2013-10-21T18:40:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,205 | py | from django.db import migrations
class MigrationOptimizer(object):
"""
Powers the optimization process, where you provide a list of Operations
and you are returned a list of equal or shorter length - operations
are merged into one if possible.
For example, a CreateModel and an AddField can be optimised into a
new CreateModel, and CreateModel and DeleteModel can be optimised into
nothing.
"""
def optimize(self, operations):
"""
Main optimization entry point. Pass in a list of Operation instances,
get out a new list of Operation instances.
Unfortunately, due to the scope of the optimisation (two combinable
operations might be separated by several hundred others), this can't be
done as a peephole optimisation with checks/output implemented on
the Operations themselves; instead, the optimizer looks at each
individual operation and scans forwards in the list to see if there
are any matches, stopping at boundaries - operations which can't
be optimized over (RunSQL, operations on the same field/model, etc.)
The inner loop is run until the starting list is the same as the result
list, and then the result is returned. This means that operation
optimization must be stable and always return an equal or shorter list.
"""
# Internal tracking variable for test assertions about # of loops
self._iterations = 0
while True:
result = self.optimize_inner(operations)
self._iterations += 1
if result == operations:
return result
operations = result
def optimize_inner(self, operations):
"""
Inner optimization loop.
"""
new_operations = []
for i, operation in enumerate(operations):
# Compare it to each operation after it
for j, other in enumerate(operations[i+1:]):
result = self.reduce(operation, other)
if result is not None:
# Optimize! Add result, then remaining others, then return
new_operations.extend(result)
new_operations.extend(operations[i+1:i+1+j])
new_operations.extend(operations[i+j+2:])
return new_operations
if not self.can_optimize_through(operation, other):
new_operations.append(operation)
break
else:
new_operations.append(operation)
return new_operations
#### REDUCTION ####
def reduce(self, operation, other):
"""
Either returns a list of zero, one or two operations,
or None, meaning this pair cannot be optimized.
"""
submethods = [
(migrations.CreateModel, migrations.DeleteModel, self.reduce_model_create_delete),
(migrations.AlterModelTable, migrations.DeleteModel, self.reduce_model_alter_delete),
(migrations.AlterUniqueTogether, migrations.DeleteModel, self.reduce_model_alter_delete),
(migrations.AlterIndexTogether, migrations.DeleteModel, self.reduce_model_alter_delete),
]
for ia, ib, om in submethods:
if isinstance(operation, ia) and isinstance(other, ib):
return om(operation, other)
return None
def reduce_model_create_delete(self, operation, other):
"""
Folds a CreateModel and a DeleteModel into nothing.
"""
if operation.name == other.name:
return []
return None
def reduce_model_alter_delete(self, operation, other):
"""
Folds an AlterModelSomething and a DeleteModel into nothing.
"""
if operation.name == other.name:
return [other]
return None
#### THROUGH CHECKS ####
def can_optimize_through(self, operation, other):
"""
Returns True if it's possible to optimize 'operation' with something
the other side of 'other'. This is possible if, for example, they
affect different models.
"""
return False
| [
"[email protected]"
]
| |
5768b4aa944cf0a267a5d51df9863b7719905cde | c57376701537dc6969939c3afb51d542d670db61 | /String/string_4.py | e6561f80c80b07d1636fe290b6c22921b0d65b70 | []
| no_license | dangnam739/Learn_Python_Begin | d3f5f24504b3c703de4e981abb432f3734558e5d | 81764172475f26374a3e21d150395a99e8a183e6 | refs/heads/master | 2021-07-15T19:13:58.156215 | 2020-08-21T06:38:04 | 2020-08-21T06:38:04 | 200,081,663 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,688 | py | ###Cac phuong thuc chuoi #String Method#
a = '12'
print(int(a))
print(type(a))
#capitalize: ra ve chuoi co ky tu dau duoc viet hoa, # cac chu cai con lai viet thuong
a = 'how kteam - free education'
b = a.capitalize()
print(b)
#upper : Viet hoa tat ca cac ky tu
b = a.upper()
print(b)
#lower : viet thuong tat ca ca ky tu
b = a.lower()
print(b)
#swapcase : Chu viet thuong -> viet hoa, viet hoa->viet thuong
b = a.swapcase()
print(b)
#title : Viet hoa chu cai dau cua cac tu, con lai chu thuong
b = a.title()
print(b)
#center(width,[fillchar]): can giua mot chuoi voi do rong width, can giua bang ki tu fillchar<chuoi co do dai bang 1>
b = a.center(50, '*')
print(b)
#rjust : (width,[fillchar]): can phai mot chuoi voi do rong width, can giua bang ki tu fillchar<chuoi co do dai bang 1>
b = a.rjust(50, '.')
print(b)
#ljust : (width,[fillchar]): can trai mot chuoi voi do rong width, can giua bang ki tu fillchar<chuoi co do dai bang 1>
b = a.ljust(50, '.')
print(b)
##Phuong thuc xu ly
#encode : ma hoa chuoi
b = a.encode(encoding='utf-8', errors='strict')
print(b)
b = a.encode()
print(b)
#join : cong chuoi, nho mot danh sach
b = a.join([' -1-', '-2- ', '-3- '])
print(b)
#replace: thay the mot chuoi bang mot chuoi moi
b = a.replace('o', 'K') #thay the toan bo
print(b)
b = a.replace('o', 'K', 1) #thay the 1 chuoi dau tien
print(b)
#strip(): xoa het khoan trang o hai dau, bo di cac ki tu char' ', va ms escape sequence
#strip('<ki tu>'): xoa bo <kiu tu> o hai dau chuoi
a = ' how kteam \n'
print(a)
b = a.strip()
print(b)
a = 'how kteamh'
b = a.strip('h') #co the cat bo chuoi a.strip('ho')
print(b)
#lstrip(): cat ben phai
#rstrip(): cat ben trai
| [
"[email protected]"
]
| |
919567ff88adb1a6e3f2a56cc4b28afd54153228 | 419572051aedc42fec6d1a8ec7b90b3cd0ba6637 | /pandaharvester/harvestercredmanager/no_voms_cred_manager.py | 6af30bb92a1925dc5294970415213a96cac0d2d8 | [
"Apache-2.0"
]
| permissive | wyang007/panda-harvester | e5c43688be5ebee24860b1a7c7b2c241ae2bc4ac | c24d75eabc57b14779e72b00a65162db341c99f9 | refs/heads/master | 2021-04-27T03:11:25.832083 | 2018-02-26T08:11:30 | 2018-02-26T08:11:30 | 122,710,771 | 0 | 0 | null | 2018-02-24T06:28:38 | 2018-02-24T06:28:38 | null | UTF-8 | Python | false | false | 2,244 | py | import subprocess
from pandaharvester.harvestercore.plugin_base import PluginBase
from pandaharvester.harvestercore import core_utils
# logger
_logger = core_utils.setup_logger('no_voms_cred_manager')
# credential manager with no-voms proxy
class NoVomsCredManager(PluginBase):
# constructor
def __init__(self, **kwarg):
PluginBase.__init__(self, **kwarg)
# check proxy
def check_credential(self):
# make logger
mainLog = core_utils.make_logger(_logger, method_name='check_credential')
comStr = "voms-proxy-info -exists -hours 72 -file {0}".format(self.outCertFile)
mainLog.debug(comStr)
try:
p = subprocess.Popen(comStr.split(),
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdOut, stdErr = p.communicate()
retCode = p.returncode
except:
core_utils.dump_error_message(mainLog)
return False
mainLog.debug('retCode={0} stdOut={1} stdErr={2}'.format(retCode, stdOut, stdErr))
return retCode == 0
# renew proxy
def renew_credential(self):
# make logger
mainLog = core_utils.make_logger(_logger, method_name='renew_credential')
comStr = "voms-proxy-init -rfc -voms {0} -out {1} -valid 96:00 -cert={2}".format(self.voms,
self.outCertFile,
self.inCertFile)
mainLog.debug(comStr)
try:
p = subprocess.Popen(comStr.split(),
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdOut, stdErr = p.communicate()
retCode = p.returncode
mainLog.debug('retCode={0} stdOut={1} stdErr={2}'.format(retCode, stdOut, stdErr))
except:
stdOut = ''
stdErr = core_utils.dump_error_message(mainLog)
retCode = -1
return retCode == 0, "{0} {1}".format(stdOut, stdErr)
| [
"[email protected]"
]
| |
ac8b0c1856a22c93b5d665b62610ebd8af083ae9 | 4962f934b0e94505a95ae50903cab5a9327171fc | /jsonhash/__init__.py | 82a550b4de60873d457716166d251054eedbc86c | [
"Apache-2.0"
]
| permissive | FlorianLudwig/jsonhash | f0d86267c94f6ee1de74b22c680baabf28c78dac | aae53363c3eba867189ea5ac774bcef251e54dc2 | refs/heads/master | 2021-01-10T17:21:32.048467 | 2016-04-07T19:25:00 | 2016-04-07T19:25:00 | 50,778,882 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 752 | py | import json
import hashlib
def hash(object, algorithm=None):
"""return hash object for object
object might be None, int, float, dict or array
"""
if algorithm is None:
algorithm = hashlib.sha256
data_string = json.dumps(object,
skipkeys=False,
ensure_ascii=False,
check_circular=True,
allow_nan=True,
cls=None,
indent=None,
separators=(',', ':'),
encoding="utf-8",
default=None,
sort_keys=True)
return algorithm(data_string)
| [
"[email protected]"
]
| |
3ce5571b618328f51bd78d5786b8820045c53ba1 | 9620337c5ce9294ebc0e29aafa16aa2545fd3afa | /Square.py | 3b369a9eded8898aebc55ffcf26ea3dad96ec64c | []
| no_license | balajimanikandanm/balajib | f482840ac3491b26b9315fc9f1e16e6616317638 | d065305ca7c104f7a140b4a6f09b4f747f9ac90d | refs/heads/master | 2021-06-08T16:51:52.639877 | 2019-07-16T03:59:21 | 2019-07-16T03:59:21 | 95,753,752 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 230 | py | n=int(input())
if(10<=n<100):
a=int(n/10)
b=int(n%10)
c=int((a*a)+(b*b))
print(c)
elif(100<=n<1000):
a=int(n%100)
b=int(a%10)
c=int(a/10)
d=int(n/100)
e=int((d*d)+(b*b)+(c*c))
print(e)
| [
"[email protected]"
]
| |
c510a7d18152ae7d067c151c030d1a80cca96576 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03778/s222149305.py | ada8009e48476f017c9593ae7a5970d246338c8b | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 219 | py | #!/usr/bin/env python3
w,a,b=map(int,input().split())
if b>a:
if b-w-a>0:
print(b-w-a)
else:
print(0)
if a>b:
if a-w-b>0:
print(a-w-b)
else:
print(0)
if a==b:
print(0) | [
"[email protected]"
]
| |
ab518993e7504fd0345f8ff85893401197724473 | 369e260e100db9ab5cc8b1711e99ef5e49aec173 | /ml/m09_selectModel2.py | 8500764c686e4ff32f05ffd8638b5b574a0f8beb | []
| no_license | HWALIMLEE/study | 7aa4c22cb9d7f7838634d984df96eed75f7aefea | 8336adc8999126258fe328d6b985a48e32667852 | refs/heads/master | 2023-03-26T09:11:19.606085 | 2021-03-29T23:03:04 | 2021-03-29T23:03:04 | 259,555,730 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,299 | py | #보스턴 모델링 하시오
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
from sklearn.utils.testing import all_estimators #26개 모델 한방에 돌려버림
import warnings
from sklearn.metrics import r2_score
warnings.filterwarnings('ignore') #warnings이라는 에러 그냥 넘어가겠다
boston=pd.read_csv('./data/csv/boston_house_prices.csv',header=1)
x=boston.iloc[:,0:13] #0,1,2,3
y=boston.iloc[:,13]
#numpy일 때는 그냥 슬라이싱 해주어도 된다
print("x:",x)
print("y:",y)
warnings.filterwarnings('ignore') #warnings이라는 에러 그냥 넘어가겠다
x_train,x_test,y_train,y_test=train_test_split(x,y,test_size=0.2,random_state=44)
#3. 모델
warnings.filterwarnings('ignore') #warnings이라는 에러 그냥 넘어가겠다
allAlgorithms = all_estimators(type_filter='regressor') #분류모델 싹 다 가져옴-->멋진 아이임
for (name,algorithm) in allAlgorithms: #name과 algorithm이 반환값
model=algorithm()
model.fit(x_train,y_train)
y_pred = model.predict(x_test)
score=model.score(x_test,y_test)
print(name,"의 정답률",score)
import sklearn
print(sklearn.__version__)
#sklearn버전 낮추면 all_estimators 정상 작동
#커밋수정 | [
"[email protected]"
]
| |
a6d54c808fed3224faeaa67a6f4784fc46e699fd | bc441bb06b8948288f110af63feda4e798f30225 | /flowable_sdk/model/ops_automation/job_tasks_pb2.pyi | dc12a5542a2e6c124251065644a1dc8ee7723caf | [
"Apache-2.0"
]
| permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,477 | pyi | # @generated by generate_proto_mypy_stubs.py. Do not edit!
import sys
from flowable_sdk.model.ops_automation.mail_info_pb2 import (
MailInfo as flowable_sdk___model___ops_automation___mail_info_pb2___MailInfo,
)
from google.protobuf.descriptor import (
Descriptor as google___protobuf___descriptor___Descriptor,
)
from google.protobuf.internal.containers import (
RepeatedScalarFieldContainer as google___protobuf___internal___containers___RepeatedScalarFieldContainer,
)
from google.protobuf.message import (
Message as google___protobuf___message___Message,
)
from typing import (
Iterable as typing___Iterable,
Optional as typing___Optional,
Text as typing___Text,
Union as typing___Union,
)
from typing_extensions import (
Literal as typing_extensions___Literal,
)
builtin___bool = bool
builtin___bytes = bytes
builtin___float = float
builtin___int = int
if sys.version_info < (3,):
builtin___buffer = buffer
builtin___unicode = unicode
class JobTasks(google___protobuf___message___Message):
DESCRIPTOR: google___protobuf___descriptor___Descriptor = ...
id = ... # type: typing___Text
jobId = ... # type: typing___Text
jobName = ... # type: typing___Text
menuName = ... # type: typing___Text
execId = ... # type: typing___Text
resourceType = ... # type: typing___Text
resourceId = ... # type: typing___Text
resourceVId = ... # type: typing___Text
resourceVName = ... # type: typing___Text
trigger = ... # type: typing___Text
execUser = ... # type: typing___Text
hosts = ... # type: google___protobuf___internal___containers___RepeatedScalarFieldContainer[typing___Text]
status = ... # type: typing___Text
successRate = ... # type: builtin___float
error = ... # type: typing___Text
createTime = ... # type: typing___Text
updateTime = ... # type: typing___Text
creator = ... # type: typing___Text
org = ... # type: builtin___int
@property
def mail(self) -> flowable_sdk___model___ops_automation___mail_info_pb2___MailInfo: ...
def __init__(self,
*,
id : typing___Optional[typing___Text] = None,
jobId : typing___Optional[typing___Text] = None,
jobName : typing___Optional[typing___Text] = None,
menuName : typing___Optional[typing___Text] = None,
execId : typing___Optional[typing___Text] = None,
resourceType : typing___Optional[typing___Text] = None,
resourceId : typing___Optional[typing___Text] = None,
resourceVId : typing___Optional[typing___Text] = None,
resourceVName : typing___Optional[typing___Text] = None,
trigger : typing___Optional[typing___Text] = None,
execUser : typing___Optional[typing___Text] = None,
hosts : typing___Optional[typing___Iterable[typing___Text]] = None,
status : typing___Optional[typing___Text] = None,
mail : typing___Optional[flowable_sdk___model___ops_automation___mail_info_pb2___MailInfo] = None,
successRate : typing___Optional[builtin___float] = None,
error : typing___Optional[typing___Text] = None,
createTime : typing___Optional[typing___Text] = None,
updateTime : typing___Optional[typing___Text] = None,
creator : typing___Optional[typing___Text] = None,
org : typing___Optional[builtin___int] = None,
) -> None: ...
if sys.version_info >= (3,):
@classmethod
def FromString(cls, s: builtin___bytes) -> JobTasks: ...
else:
@classmethod
def FromString(cls, s: typing___Union[builtin___bytes, builtin___buffer, builtin___unicode]) -> JobTasks: ...
def MergeFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def CopyFrom(self, other_msg: google___protobuf___message___Message) -> None: ...
def HasField(self, field_name: typing_extensions___Literal[u"mail",b"mail"]) -> builtin___bool: ...
def ClearField(self, field_name: typing_extensions___Literal[u"createTime",b"createTime",u"creator",b"creator",u"error",b"error",u"execId",b"execId",u"execUser",b"execUser",u"hosts",b"hosts",u"id",b"id",u"jobId",b"jobId",u"jobName",b"jobName",u"mail",b"mail",u"menuName",b"menuName",u"org",b"org",u"resourceId",b"resourceId",u"resourceType",b"resourceType",u"resourceVId",b"resourceVId",u"resourceVName",b"resourceVName",u"status",b"status",u"successRate",b"successRate",u"trigger",b"trigger",u"updateTime",b"updateTime"]) -> None: ...
| [
"[email protected]"
]
| |
d8580fd2b046a0ebea8248bc04e21a2e0f40e463 | 60a831fb3c92a9d2a2b52ff7f5a0f665d4692a24 | /IronPythonStubs/release/stubs.min/System/Windows/Controls/__init___parts/Button.py | 8fcee9dc5c75fc121b1f809e53bbc4b4fec8b4d4 | [
"MIT"
]
| permissive | shnlmn/Rhino-Grasshopper-Scripts | a9411098c5d1bbc55feb782def565d535b27b709 | 0e43c3c1d09fb12cdbd86a3c4e2ba49982e0f823 | refs/heads/master | 2020-04-10T18:59:43.518140 | 2020-04-08T02:49:07 | 2020-04-08T02:49:07 | 161,219,695 | 11 | 2 | null | null | null | null | UTF-8 | Python | false | false | 58,319 | py | class Button(ButtonBase,IResource,IAnimatable,IInputElement,IFrameworkInputElement,ISupportInitialize,IHaveResources,IQueryAmbient,IAddChild,ICommandSource):
"""
Represents a Windows button control,which reacts to the System.Windows.Controls.Primitives.ButtonBase.Click event.
Button()
"""
def AddChild(self,*args):
"""
AddChild(self: ContentControl,value: object)
Adds a specified object as the child of a System.Windows.Controls.ContentControl.
value: The object to add.
"""
pass
def AddLogicalChild(self,*args):
"""
AddLogicalChild(self: FrameworkElement,child: object)
Adds the provided object to the logical tree of this element.
child: Child element to be added.
"""
pass
def AddText(self,*args):
"""
AddText(self: ContentControl,text: str)
Adds a specified text string to a System.Windows.Controls.ContentControl.
text: The string to add.
"""
pass
def AddVisualChild(self,*args):
"""
AddVisualChild(self: Visual,child: Visual)
Defines the parent-child relationship between two visuals.
child: The child visual object to add to parent visual.
"""
pass
def ArrangeCore(self,*args):
"""
ArrangeCore(self: FrameworkElement,finalRect: Rect)
Implements System.Windows.UIElement.ArrangeCore(System.Windows.Rect) (defined as virtual in
System.Windows.UIElement) and seals the implementation.
finalRect: The final area within the parent that this element should use to arrange itself and its children.
"""
pass
def ArrangeOverride(self,*args):
"""
ArrangeOverride(self: Control,arrangeBounds: Size) -> Size
Called to arrange and size the content of a System.Windows.Controls.Control object.
arrangeBounds: The computed size that is used to arrange the content.
Returns: The size of the control.
"""
pass
def GetLayoutClip(self,*args):
"""
GetLayoutClip(self: FrameworkElement,layoutSlotSize: Size) -> Geometry
Returns a geometry for a clipping mask. The mask applies if the layout system attempts to
arrange an element that is larger than the available display space.
layoutSlotSize: The size of the part of the element that does visual presentation.
Returns: The clipping geometry.
"""
pass
def GetTemplateChild(self,*args):
"""
GetTemplateChild(self: FrameworkElement,childName: str) -> DependencyObject
Returns the named element in the visual tree of an instantiated
System.Windows.Controls.ControlTemplate.
childName: Name of the child to find.
Returns: The requested element. May be null if no element of the requested name exists.
"""
pass
def GetUIParentCore(self,*args):
"""
GetUIParentCore(self: FrameworkElement) -> DependencyObject
Returns an alternative logical parent for this element if there is no visual parent.
Returns: Returns something other than null whenever a WPF framework-level implementation of this method
has a non-visual parent connection.
"""
pass
def GetVisualChild(self,*args):
"""
GetVisualChild(self: FrameworkElement,index: int) -> Visual
Overrides System.Windows.Media.Visual.GetVisualChild(System.Int32),and returns a child at the
specified index from a collection of child elements.
index: The zero-based index of the requested child element in the collection.
Returns: The requested child element. This should not return null; if the provided index is out of range,
an exception is thrown.
"""
pass
def HitTestCore(self,*args):
"""
HitTestCore(self: UIElement,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
Implements
System.Windows.Media.Visual.HitTestCore(System.Windows.Media.GeometryHitTestParameters) to
supply base element hit testing behavior (returning System.Windows.Media.GeometryHitTestResult).
hitTestParameters: Describes the hit test to perform,including the initial hit point.
Returns: Results of the test,including the evaluated geometry.
HitTestCore(self: UIElement,hitTestParameters: PointHitTestParameters) -> HitTestResult
Implements System.Windows.Media.Visual.HitTestCore(System.Windows.Media.PointHitTestParameters)
to supply base element hit testing behavior (returning System.Windows.Media.HitTestResult).
hitTestParameters: Describes the hit test to perform,including the initial hit point.
Returns: Results of the test,including the evaluated point.
"""
pass
def MeasureCore(self,*args):
"""
MeasureCore(self: FrameworkElement,availableSize: Size) -> Size
Implements basic measure-pass layout system behavior for System.Windows.FrameworkElement.
availableSize: The available size that the parent element can give to the child elements.
Returns: The desired size of this element in layout.
"""
pass
def MeasureOverride(self,*args):
"""
MeasureOverride(self: Control,constraint: Size) -> Size
Called to remeasure a control.
constraint: The maximum size that the method can return.
Returns: The size of the control,up to the maximum specified by constraint.
"""
pass
def OnAccessKey(self,*args):
"""
OnAccessKey(self: ButtonBase,e: AccessKeyEventArgs)
Responds when the System.Windows.Controls.AccessText.AccessKey for this control is called.
e: The event data for the System.Windows.Input.AccessKeyManager.AccessKeyPressed event.
"""
pass
def OnChildDesiredSizeChanged(self,*args):
"""
OnChildDesiredSizeChanged(self: UIElement,child: UIElement)
Supports layout behavior when a child element is resized.
child: The child element that is being resized.
"""
pass
def OnClick(self,*args):
"""
OnClick(self: Button)
Called when a System.Windows.Controls.Button is clicked.
"""
pass
def OnContentChanged(self,*args):
"""
OnContentChanged(self: ContentControl,oldContent: object,newContent: object)
Called when the System.Windows.Controls.ContentControl.Content property changes.
oldContent: The old value of the System.Windows.Controls.ContentControl.Content property.
newContent: The new value of the System.Windows.Controls.ContentControl.Content property.
"""
pass
def OnContentStringFormatChanged(self,*args):
"""
OnContentStringFormatChanged(self: ContentControl,oldContentStringFormat: str,newContentStringFormat: str)
Occurs when the System.Windows.Controls.ContentControl.ContentStringFormat property changes.
oldContentStringFormat: The old value of System.Windows.Controls.ContentControl.ContentStringFormat.
newContentStringFormat: The new value of System.Windows.Controls.ContentControl.ContentStringFormat.
"""
pass
def OnContentTemplateChanged(self,*args):
"""
OnContentTemplateChanged(self: ContentControl,oldContentTemplate: DataTemplate,newContentTemplate: DataTemplate)
Called when the System.Windows.Controls.ContentControl.ContentTemplate property changes.
oldContentTemplate: The old value of the System.Windows.Controls.ContentControl.ContentTemplate property.
newContentTemplate: The new value of the System.Windows.Controls.ContentControl.ContentTemplate property.
"""
pass
def OnContentTemplateSelectorChanged(self,*args):
"""
OnContentTemplateSelectorChanged(self: ContentControl,oldContentTemplateSelector: DataTemplateSelector,newContentTemplateSelector: DataTemplateSelector)
Called when the System.Windows.Controls.ContentControl.ContentTemplateSelector property changes.
oldContentTemplateSelector: The old value of the System.Windows.Controls.ContentControl.ContentTemplateSelector property.
newContentTemplateSelector: The new value of the System.Windows.Controls.ContentControl.ContentTemplateSelector property.
"""
pass
def OnContextMenuClosing(self,*args):
"""
OnContextMenuClosing(self: FrameworkElement,e: ContextMenuEventArgs)
Invoked whenever an unhandled System.Windows.FrameworkElement.ContextMenuClosing routed event
reaches this class in its route. Implement this method to add class handling for this event.
e: Provides data about the event.
"""
pass
def OnContextMenuOpening(self,*args):
"""
OnContextMenuOpening(self: FrameworkElement,e: ContextMenuEventArgs)
Invoked whenever an unhandled System.Windows.FrameworkElement.ContextMenuOpening routed event
reaches this class in its route. Implement this method to add class handling for this event.
e: The System.Windows.RoutedEventArgs that contains the event data.
"""
pass
def OnCreateAutomationPeer(self,*args):
"""
OnCreateAutomationPeer(self: Button) -> AutomationPeer
Creates an appropriate System.Windows.Automation.Peers.ButtonAutomationPeer for this control as
part of the WPF infrastructure.
Returns: A System.Windows.Automation.Peers.ButtonAutomationPeer for this control.
"""
pass
def OnDpiChanged(self,*args):
""" OnDpiChanged(self: Visual,oldDpi: DpiScale,newDpi: DpiScale) """
pass
def OnDragEnter(self,*args):
"""
OnDragEnter(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.DragEnter�attached event reaches an element in
its route that is derived from this class. Implement this method to add class handling for this
event.
e: The System.Windows.DragEventArgs that contains the event data.
"""
pass
def OnDragLeave(self,*args):
"""
OnDragLeave(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.DragLeave�attached event reaches an element in
its route that is derived from this class. Implement this method to add class handling for this
event.
e: The System.Windows.DragEventArgs that contains the event data.
"""
pass
def OnDragOver(self,*args):
"""
OnDragOver(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.DragOver�attached event reaches an element in
its route that is derived from this class. Implement this method to add class handling for this
event.
e: The System.Windows.DragEventArgs that contains the event data.
"""
pass
def OnDrop(self,*args):
"""
OnDrop(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.DragEnter�attached event reaches an element in
its route that is derived from this class. Implement this method to add class handling for this
event.
e: The System.Windows.DragEventArgs that contains the event data.
"""
pass
def OnGiveFeedback(self,*args):
"""
OnGiveFeedback(self: UIElement,e: GiveFeedbackEventArgs)
Invoked when an unhandled System.Windows.DragDrop.GiveFeedback�attached event reaches an element
in its route that is derived from this class. Implement this method to add class handling for
this event.
e: The System.Windows.GiveFeedbackEventArgs that contains the event data.
"""
pass
def OnGotFocus(self,*args):
"""
OnGotFocus(self: FrameworkElement,e: RoutedEventArgs)
Invoked whenever an unhandled System.Windows.UIElement.GotFocus event reaches this element in
its route.
e: The System.Windows.RoutedEventArgs that contains the event data.
"""
pass
def OnGotKeyboardFocus(self,*args):
"""
OnGotKeyboardFocus(self: UIElement,e: KeyboardFocusChangedEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.GotKeyboardFocus�attached event reaches
an element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.KeyboardFocusChangedEventArgs that contains the event data.
"""
pass
def OnGotMouseCapture(self,*args):
"""
OnGotMouseCapture(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.GotMouseCapture�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.MouseEventArgs that contains the event data.
"""
pass
def OnGotStylusCapture(self,*args):
"""
OnGotStylusCapture(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.GotStylusCapture�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnGotTouchCapture(self,*args):
"""
OnGotTouchCapture(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.GotTouchCapture routed event that
occurs when a touch is captured to this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnInitialized(self,*args):
"""
OnInitialized(self: FrameworkElement,e: EventArgs)
Raises the System.Windows.FrameworkElement.Initialized event. This method is invoked whenever
System.Windows.FrameworkElement.IsInitialized is set to true internally.
e: The System.Windows.RoutedEventArgs that contains the event data.
"""
pass
def OnIsKeyboardFocusedChanged(self,*args):
"""
OnIsKeyboardFocusedChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsKeyboardFocusedChanged event is raised on
this element. Implement this method to add class handling for this event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event data.
"""
pass
def OnIsKeyboardFocusWithinChanged(self,*args):
"""
OnIsKeyboardFocusWithinChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked just before the System.Windows.UIElement.IsKeyboardFocusWithinChanged event is raised by
this element. Implement this method to add class handling for this event.
e: A System.Windows.DependencyPropertyChangedEventArgs that contains the event data.
"""
pass
def OnIsMouseCapturedChanged(self,*args):
"""
OnIsMouseCapturedChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsMouseCapturedChanged event is raised on
this element. Implement this method to add class handling for this event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event data.
"""
pass
def OnIsMouseCaptureWithinChanged(self,*args):
"""
OnIsMouseCaptureWithinChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsMouseCaptureWithinChanged event is raised
on this element. Implement this method to add class handling for this event.
e: A System.Windows.DependencyPropertyChangedEventArgs that contains the event data.
"""
pass
def OnIsMouseDirectlyOverChanged(self,*args):
"""
OnIsMouseDirectlyOverChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsMouseDirectlyOverChanged event is raised on
this element. Implement this method to add class handling for this event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event data.
"""
pass
def OnIsPressedChanged(self,*args):
"""
OnIsPressedChanged(self: ButtonBase,e: DependencyPropertyChangedEventArgs)
Called when the System.Windows.Controls.Primitives.ButtonBase.IsPressed property changes.
e: The data for System.Windows.DependencyPropertyChangedEventArgs.
"""
pass
def OnIsStylusCapturedChanged(self,*args):
"""
OnIsStylusCapturedChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsStylusCapturedChanged event is raised on
this element. Implement this method to add class handling for this event.
e: A System.Windows.DependencyPropertyChangedEventArgs that contains the event data.
"""
pass
def OnIsStylusCaptureWithinChanged(self,*args):
"""
OnIsStylusCaptureWithinChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsStylusCaptureWithinChanged event is raised
on this element. Implement this method to add class handling for this event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event data.
"""
pass
def OnIsStylusDirectlyOverChanged(self,*args):
"""
OnIsStylusDirectlyOverChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsStylusDirectlyOverChanged event is raised
on this element. Implement this method to add class handling for this event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event data.
"""
pass
def OnKeyDown(self,*args):
"""
OnKeyDown(self: ButtonBase,e: KeyEventArgs)
Provides class handling for the System.Windows.UIElement.KeyDown routed event that occurs when
the user presses a key while this control has focus.
e: The event data.
"""
pass
def OnKeyUp(self,*args):
"""
OnKeyUp(self: ButtonBase,e: KeyEventArgs)
Provides class handling for the System.Windows.UIElement.KeyUp routed event that occurs when the
user releases a key while this control has focus.
e: The event data for the System.Windows.UIElement.KeyUp event.
"""
pass
def OnLostFocus(self,*args):
"""
OnLostFocus(self: UIElement,e: RoutedEventArgs)
Raises the System.Windows.UIElement.LostFocus�routed event by using the event data that is
provided.
e: A System.Windows.RoutedEventArgs that contains event data. This event data must contain the
identifier for the System.Windows.UIElement.LostFocus event.
"""
pass
def OnLostKeyboardFocus(self,*args):
"""
OnLostKeyboardFocus(self: ButtonBase,e: KeyboardFocusChangedEventArgs)
Called when an element loses keyboard focus.
e: The event data for the System.Windows.IInputElement.LostKeyboardFocus event.
"""
pass
def OnLostMouseCapture(self,*args):
"""
OnLostMouseCapture(self: ButtonBase,e: MouseEventArgs)
Provides class handling for the System.Windows.UIElement.LostMouseCapture routed event that
occurs when this control is no longer receiving mouse event messages.
e: The event data for the System.Windows.Input.Mouse.LostMouseCapture event.
"""
pass
def OnLostStylusCapture(self,*args):
"""
OnLostStylusCapture(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.LostStylusCapture�attached event reaches
an element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains event data.
"""
pass
def OnLostTouchCapture(self,*args):
"""
OnLostTouchCapture(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.LostTouchCapture routed event that
occurs when this element loses a touch capture.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnManipulationBoundaryFeedback(self,*args):
"""
OnManipulationBoundaryFeedback(self: UIElement,e: ManipulationBoundaryFeedbackEventArgs)
Called when the System.Windows.UIElement.ManipulationBoundaryFeedback event occurs.
e: The data for the event.
"""
pass
def OnManipulationCompleted(self,*args):
"""
OnManipulationCompleted(self: UIElement,e: ManipulationCompletedEventArgs)
Called when the System.Windows.UIElement.ManipulationCompleted event occurs.
e: The data for the event.
"""
pass
def OnManipulationDelta(self,*args):
"""
OnManipulationDelta(self: UIElement,e: ManipulationDeltaEventArgs)
Called when the System.Windows.UIElement.ManipulationDelta event occurs.
e: The data for the event.
"""
pass
def OnManipulationInertiaStarting(self,*args):
"""
OnManipulationInertiaStarting(self: UIElement,e: ManipulationInertiaStartingEventArgs)
Called when the System.Windows.UIElement.ManipulationInertiaStarting event occurs.
e: The data for the event.
"""
pass
def OnManipulationStarted(self,*args):
"""
OnManipulationStarted(self: UIElement,e: ManipulationStartedEventArgs)
Called when the System.Windows.UIElement.ManipulationStarted event occurs.
e: The data for the event.
"""
pass
def OnManipulationStarting(self,*args):
"""
OnManipulationStarting(self: UIElement,e: ManipulationStartingEventArgs)
Provides class handling for the System.Windows.UIElement.ManipulationStarting routed event that
occurs when the manipulation processor is first created.
e: A System.Windows.Input.ManipulationStartingEventArgs that contains the event data.
"""
pass
def OnMouseDoubleClick(self,*args):
"""
OnMouseDoubleClick(self: Control,e: MouseButtonEventArgs)
Raises the System.Windows.Controls.Control.MouseDoubleClick routed event.
e: The event data.
"""
pass
def OnMouseDown(self,*args):
"""
OnMouseDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseDown�attached event reaches an element
in its route that is derived from this class. Implement this method to add class handling for
this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. This event data
reports details about the mouse button that was pressed and the handled state.
"""
pass
def OnMouseEnter(self,*args):
"""
OnMouseEnter(self: ButtonBase,e: MouseEventArgs)
Provides class handling for the System.Windows.Controls.Primitives.ButtonBase.ClickMode routed
event that occurs when the mouse enters this control.
e: The event data for the System.Windows.Input.Mouse.MouseEnter event.
"""
pass
def OnMouseLeave(self,*args):
"""
OnMouseLeave(self: ButtonBase,e: MouseEventArgs)
Provides class handling for the System.Windows.UIElement.MouseLeave routed event that occurs
when the mouse leaves an element.
e: The event data for the System.Windows.Input.Mouse.MouseLeave event.
"""
pass
def OnMouseLeftButtonDown(self,*args):
"""
OnMouseLeftButtonDown(self: ButtonBase,e: MouseButtonEventArgs)
Provides class handling for the System.Windows.UIElement.MouseLeftButtonDown routed event that
occurs when the left mouse button is pressed while the mouse pointer is over this control.
e: The event data.
"""
pass
def OnMouseLeftButtonUp(self,*args):
"""
OnMouseLeftButtonUp(self: ButtonBase,e: MouseButtonEventArgs)
Provides class handling for the System.Windows.UIElement.MouseLeftButtonUp routed event that
occurs when the left mouse button is released while the mouse pointer is over this control.
e: The event data.
"""
pass
def OnMouseMove(self,*args):
"""
OnMouseMove(self: ButtonBase,e: MouseEventArgs)
Provides class handling for the System.Windows.UIElement.MouseMove routed event that occurs when
the mouse pointer moves while over this element.
e: The event data.
"""
pass
def OnMouseRightButtonDown(self,*args):
"""
OnMouseRightButtonDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.MouseRightButtonDown�routed event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that the right mouse button was pressed.
"""
pass
def OnMouseRightButtonUp(self,*args):
"""
OnMouseRightButtonUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.MouseRightButtonUp�routed event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that the right mouse button was released.
"""
pass
def OnMouseUp(self,*args):
"""
OnMouseUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseUp�routed event reaches an element in
its route that is derived from this class. Implement this method to add class handling for this
event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that the mouse button was released.
"""
pass
def OnMouseWheel(self,*args):
"""
OnMouseWheel(self: UIElement,e: MouseWheelEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseWheel�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.MouseWheelEventArgs that contains the event data.
"""
pass
def OnPreviewDragEnter(self,*args):
"""
OnPreviewDragEnter(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewDragEnter�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
"""
pass
def OnPreviewDragLeave(self,*args):
"""
OnPreviewDragLeave(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewDragLeave�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
"""
pass
def OnPreviewDragOver(self,*args):
"""
OnPreviewDragOver(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewDragOver�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
"""
pass
def OnPreviewDrop(self,*args):
"""
OnPreviewDrop(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewDrop�attached event reaches an element
in its route that is derived from this class. Implement this method to add class handling for
this event.
e: The System.Windows.DragEventArgs that contains the event data.
"""
pass
def OnPreviewGiveFeedback(self,*args):
"""
OnPreviewGiveFeedback(self: UIElement,e: GiveFeedbackEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewGiveFeedback�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.GiveFeedbackEventArgs that contains the event data.
"""
pass
def OnPreviewGotKeyboardFocus(self,*args):
"""
OnPreviewGotKeyboardFocus(self: UIElement,e: KeyboardFocusChangedEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.PreviewGotKeyboardFocus�attached event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.KeyboardFocusChangedEventArgs that contains the event data.
"""
pass
def OnPreviewKeyDown(self,*args):
"""
OnPreviewKeyDown(self: UIElement,e: KeyEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.PreviewKeyDown�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.KeyEventArgs that contains the event data.
"""
pass
def OnPreviewKeyUp(self,*args):
"""
OnPreviewKeyUp(self: UIElement,e: KeyEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.PreviewKeyUp�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.KeyEventArgs that contains the event data.
"""
pass
def OnPreviewLostKeyboardFocus(self,*args):
"""
OnPreviewLostKeyboardFocus(self: UIElement,e: KeyboardFocusChangedEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.PreviewKeyDown�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.KeyboardFocusChangedEventArgs that contains the event data.
"""
pass
def OnPreviewMouseDoubleClick(self,*args):
"""
OnPreviewMouseDoubleClick(self: Control,e: MouseButtonEventArgs)
Raises the System.Windows.Controls.Control.PreviewMouseDoubleClick routed event.
e: The event data.
"""
pass
def OnPreviewMouseDown(self,*args):
"""
OnPreviewMouseDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.PreviewMouseDown attached�routed event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that one or more mouse buttons were pressed.
"""
pass
def OnPreviewMouseLeftButtonDown(self,*args):
"""
OnPreviewMouseLeftButtonDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.PreviewMouseLeftButtonDown�routed event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that the left mouse button was pressed.
"""
pass
def OnPreviewMouseLeftButtonUp(self,*args):
"""
OnPreviewMouseLeftButtonUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.PreviewMouseLeftButtonUp�routed event reaches
an element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that the left mouse button was released.
"""
pass
def OnPreviewMouseMove(self,*args):
"""
OnPreviewMouseMove(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.PreviewMouseMove�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.MouseEventArgs that contains the event data.
"""
pass
def OnPreviewMouseRightButtonDown(self,*args):
"""
OnPreviewMouseRightButtonDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.PreviewMouseRightButtonDown�routed event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that the right mouse button was pressed.
"""
pass
def OnPreviewMouseRightButtonUp(self,*args):
"""
OnPreviewMouseRightButtonUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.PreviewMouseRightButtonUp�routed event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that the right mouse button was released.
"""
pass
def OnPreviewMouseUp(self,*args):
"""
OnPreviewMouseUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.PreviewMouseUp�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The event data
reports that one or more mouse buttons were released.
"""
pass
def OnPreviewMouseWheel(self,*args):
"""
OnPreviewMouseWheel(self: UIElement,e: MouseWheelEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.PreviewMouseWheel�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.MouseWheelEventArgs that contains the event data.
"""
pass
def OnPreviewQueryContinueDrag(self,*args):
"""
OnPreviewQueryContinueDrag(self: UIElement,e: QueryContinueDragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewQueryContinueDrag�attached event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.QueryContinueDragEventArgs that contains the event data.
"""
pass
def OnPreviewStylusButtonDown(self,*args):
"""
OnPreviewStylusButtonDown(self: UIElement,e: StylusButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusButtonDown�attached event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.StylusButtonEventArgs that contains the event data.
"""
pass
def OnPreviewStylusButtonUp(self,*args):
"""
OnPreviewStylusButtonUp(self: UIElement,e: StylusButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusButtonUp�attached event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.StylusButtonEventArgs that contains the event data.
"""
pass
def OnPreviewStylusDown(self,*args):
"""
OnPreviewStylusDown(self: UIElement,e: StylusDownEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusDown�attached event reaches
an element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusDownEventArgs that contains the event data.
"""
pass
def OnPreviewStylusInAirMove(self,*args):
"""
OnPreviewStylusInAirMove(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusInAirMove�attached event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnPreviewStylusInRange(self,*args):
"""
OnPreviewStylusInRange(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusInRange�attached event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnPreviewStylusMove(self,*args):
"""
OnPreviewStylusMove(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusMove�attached event reaches
an element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnPreviewStylusOutOfRange(self,*args):
"""
OnPreviewStylusOutOfRange(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusOutOfRange�attached event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnPreviewStylusSystemGesture(self,*args):
"""
OnPreviewStylusSystemGesture(self: UIElement,e: StylusSystemGestureEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusSystemGesture�attached event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.StylusSystemGestureEventArgs that contains the event data.
"""
pass
def OnPreviewStylusUp(self,*args):
"""
OnPreviewStylusUp(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusUp�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnPreviewTextInput(self,*args):
"""
OnPreviewTextInput(self: UIElement,e: TextCompositionEventArgs)
Invoked when an unhandled System.Windows.Input.TextCompositionManager.PreviewTextInput�attached
event reaches an element in its route that is derived from this class. Implement this method to
add class handling for this event.
e: The System.Windows.Input.TextCompositionEventArgs that contains the event data.
"""
pass
def OnPreviewTouchDown(self,*args):
"""
OnPreviewTouchDown(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.PreviewTouchDown routed event that
occurs when a touch presses this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnPreviewTouchMove(self,*args):
"""
OnPreviewTouchMove(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.PreviewTouchMove routed event that
occurs when a touch moves while inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnPreviewTouchUp(self,*args):
"""
OnPreviewTouchUp(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.PreviewTouchUp routed event that occurs
when a touch is released inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnPropertyChanged(self,*args):
"""
OnPropertyChanged(self: FrameworkElement,e: DependencyPropertyChangedEventArgs)
Invoked whenever the effective value of any dependency property on this
System.Windows.FrameworkElement has been updated. The specific dependency property that changed
is reported in the arguments parameter. Overrides
System.Windows.DependencyObject.OnPropertyChanged(System.Windows.DependencyPropertyChangedEventAr
gs).
e: The event data that describes the property that changed,as well as old and new values.
"""
pass
def OnQueryContinueDrag(self,*args):
"""
OnQueryContinueDrag(self: UIElement,e: QueryContinueDragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.QueryContinueDrag�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.QueryContinueDragEventArgs that contains the event data.
"""
pass
def OnQueryCursor(self,*args):
"""
OnQueryCursor(self: UIElement,e: QueryCursorEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.QueryCursor�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.QueryCursorEventArgs that contains the event data.
"""
pass
def OnRender(self,*args):
"""
OnRender(self: UIElement,drawingContext: DrawingContext)
When overridden in a derived class,participates in rendering operations that are directed by
the layout system. The rendering instructions for this element are not used directly when this
method is invoked,and are instead preserved for later asynchronous use by layout and drawing.
drawingContext: The drawing instructions for a specific element. This context is provided to the layout system.
"""
pass
def OnRenderSizeChanged(self,*args):
"""
OnRenderSizeChanged(self: ButtonBase,sizeInfo: SizeChangedInfo)
Called when the rendered size of a control changes.
sizeInfo: Specifies the size changes.
"""
pass
def OnStyleChanged(self,*args):
"""
OnStyleChanged(self: FrameworkElement,oldStyle: Style,newStyle: Style)
Invoked when the style in use on this element changes,which will invalidate the layout.
oldStyle: The old style.
newStyle: The new style.
"""
pass
def OnStylusButtonDown(self,*args):
"""
OnStylusButtonDown(self: UIElement,e: StylusButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusButtonDown�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusButtonEventArgs that contains the event data.
"""
pass
def OnStylusButtonUp(self,*args):
"""
OnStylusButtonUp(self: UIElement,e: StylusButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusButtonUp�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusButtonEventArgs that contains the event data.
"""
pass
def OnStylusDown(self,*args):
"""
OnStylusDown(self: UIElement,e: StylusDownEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusDown�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusDownEventArgs that contains the event data.
"""
pass
def OnStylusEnter(self,*args):
"""
OnStylusEnter(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusEnter�attached event is raised by
this element. Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnStylusInAirMove(self,*args):
"""
OnStylusInAirMove(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusInAirMove�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnStylusInRange(self,*args):
"""
OnStylusInRange(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusInRange�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnStylusLeave(self,*args):
"""
OnStylusLeave(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusLeave�attached event is raised by
this element. Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnStylusMove(self,*args):
"""
OnStylusMove(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusMove�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnStylusOutOfRange(self,*args):
"""
OnStylusOutOfRange(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusOutOfRange�attached event reaches an
element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnStylusSystemGesture(self,*args):
"""
OnStylusSystemGesture(self: UIElement,e: StylusSystemGestureEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusSystemGesture�attached event reaches
an element in its route that is derived from this class. Implement this method to add class
handling for this event.
e: The System.Windows.Input.StylusSystemGestureEventArgs that contains the event data.
"""
pass
def OnStylusUp(self,*args):
"""
OnStylusUp(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusUp�attached event reaches an element
in its route that is derived from this class. Implement this method to add class handling for
this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
"""
pass
def OnTemplateChanged(self,*args):
"""
OnTemplateChanged(self: Control,oldTemplate: ControlTemplate,newTemplate: ControlTemplate)
Called whenever the control's template changes.
oldTemplate: The old template.
newTemplate: The new template.
"""
pass
def OnTextInput(self,*args):
"""
OnTextInput(self: UIElement,e: TextCompositionEventArgs)
Invoked when an unhandled System.Windows.Input.TextCompositionManager.TextInput�attached event
reaches an element in its route that is derived from this class. Implement this method to add
class handling for this event.
e: The System.Windows.Input.TextCompositionEventArgs that contains the event data.
"""
pass
def OnToolTipClosing(self,*args):
"""
OnToolTipClosing(self: FrameworkElement,e: ToolTipEventArgs)
Invoked whenever an unhandled System.Windows.FrameworkElement.ToolTipClosing routed event
reaches this class in its route. Implement this method to add class handling for this event.
e: Provides data about the event.
"""
pass
def OnToolTipOpening(self,*args):
"""
OnToolTipOpening(self: FrameworkElement,e: ToolTipEventArgs)
Invoked whenever the System.Windows.FrameworkElement.ToolTipOpening routed event reaches this
class in its route. Implement this method to add class handling for this event.
e: Provides data about the event.
"""
pass
def OnTouchDown(self,*args):
"""
OnTouchDown(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchDown routed event that occurs when
a touch presses inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnTouchEnter(self,*args):
"""
OnTouchEnter(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchEnter routed event that occurs
when a touch moves from outside to inside the bounds of this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnTouchLeave(self,*args):
"""
OnTouchLeave(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchLeave routed event that occurs
when a touch moves from inside to outside the bounds of this System.Windows.UIElement.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnTouchMove(self,*args):
"""
OnTouchMove(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchMove routed event that occurs when
a touch moves while inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnTouchUp(self,*args):
"""
OnTouchUp(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchUp routed event that occurs when a
touch is released inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
"""
pass
def OnVisualChildrenChanged(self,*args):
"""
OnVisualChildrenChanged(self: Visual,visualAdded: DependencyObject,visualRemoved: DependencyObject)
Called when the System.Windows.Media.VisualCollection of the visual object is modified.
visualAdded: The System.Windows.Media.Visual that was added to the collection
visualRemoved: The System.Windows.Media.Visual that was removed from the collection
"""
pass
def OnVisualParentChanged(self,*args):
"""
OnVisualParentChanged(self: FrameworkElement,oldParent: DependencyObject)
Invoked when the parent of this element in the visual tree is changed. Overrides
System.Windows.UIElement.OnVisualParentChanged(System.Windows.DependencyObject).
oldParent: The old parent element. May be null to indicate that the element did not have a visual parent
previously.
"""
pass
def ParentLayoutInvalidated(self,*args):
"""
ParentLayoutInvalidated(self: FrameworkElement,child: UIElement)
Supports incremental layout implementations in specialized subclasses of
System.Windows.FrameworkElement.
System.Windows.FrameworkElement.ParentLayoutInvalidated(System.Windows.UIElement) is invoked
when a child element has invalidated a property that is marked in metadata as affecting the
parent's measure or arrange passes during layout.
child: The child element reporting the change.
"""
pass
def RemoveLogicalChild(self,*args):
"""
RemoveLogicalChild(self: FrameworkElement,child: object)
Removes the provided object from this element's logical tree. System.Windows.FrameworkElement
updates the affected logical tree parent pointers to keep in sync with this deletion.
child: The element to remove.
"""
pass
def RemoveVisualChild(self,*args):
"""
RemoveVisualChild(self: Visual,child: Visual)
Removes the parent-child relationship between two visuals.
child: The child visual object to remove from the parent visual.
"""
pass
def ShouldSerializeProperty(self,*args):
"""
ShouldSerializeProperty(self: DependencyObject,dp: DependencyProperty) -> bool
Returns a value that indicates whether serialization processes should serialize the value for
the provided dependency property.
dp: The identifier for the dependency property that should be serialized.
Returns: true if the dependency property that is supplied should be value-serialized; otherwise,false.
"""
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __str__(self,*args):
pass
DefaultStyleKey=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the key to use to reference the style for this control,when theme styles are used or defined.
"""
HandlesScrolling=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value that indicates whether a control supports scrolling.
"""
HasEffectiveKeyboardFocus=property(lambda self: object(),lambda self,v: None,lambda self: None)
InheritanceBehavior=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the scope limits for property value inheritance,resource key lookup,and RelativeSource FindAncestor lookup.
"""
IsCancel=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a System.Windows.Controls.Button is a Cancel button. A user can activate the Cancel button by pressing the ESC key.
Get: IsCancel(self: Button) -> bool
Set: IsCancel(self: Button)=value
"""
IsDefault=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a System.Windows.Controls.Button is the default button. A user invokes the default button by pressing the ENTER key.
Get: IsDefault(self: Button) -> bool
Set: IsDefault(self: Button)=value
"""
IsDefaulted=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value that indicates whether a System.Windows.Controls.Button is the button that is activated when a user presses ENTER.
Get: IsDefaulted(self: Button) -> bool
"""
IsEnabledCore=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the value of the System.Windows.ContentElement.IsEnabled property.
"""
LogicalChildren=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets an enumerator to the content control's logical child elements.
"""
StylusPlugIns=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a collection of all stylus plug-in (customization) objects associated with this element.
"""
VisualBitmapEffect=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Effects.BitmapEffect value for the System.Windows.Media.Visual.
"""
VisualBitmapEffectInput=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Effects.BitmapEffectInput value for the System.Windows.Media.Visual.
"""
VisualBitmapScalingMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.BitmapScalingMode for the System.Windows.Media.Visual.
"""
VisualCacheMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a cached representation of the System.Windows.Media.Visual.
"""
VisualChildrenCount=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of visual child elements within this element.
"""
VisualClearTypeHint=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.ClearTypeHint that determines how ClearType is rendered in the System.Windows.Media.Visual.
"""
VisualClip=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the clip region of the System.Windows.Media.Visual as a System.Windows.Media.Geometry value.
"""
VisualEdgeMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the edge mode of the System.Windows.Media.Visual as an System.Windows.Media.EdgeMode value.
"""
VisualEffect=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the bitmap effect to apply to the System.Windows.Media.Visual.
"""
VisualOffset=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the offset value of the visual object.
"""
VisualOpacity=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the opacity of the System.Windows.Media.Visual.
"""
VisualOpacityMask=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Brush value that represents the opacity mask of the System.Windows.Media.Visual.
"""
VisualParent=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the visual tree parent of the visual object.
"""
VisualScrollableAreaClip=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a clipped scrollable area for the System.Windows.Media.Visual.
"""
VisualTextHintingMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.TextHintingMode of the System.Windows.Media.Visual.
"""
VisualTextRenderingMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.TextRenderingMode of the System.Windows.Media.Visual.
"""
VisualTransform=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Transform value for the System.Windows.Media.Visual.
"""
VisualXSnappingGuidelines=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the x-coordinate (vertical) guideline collection.
"""
VisualYSnappingGuidelines=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the y-coordinate (horizontal) guideline collection.
"""
IsCancelProperty=None
IsDefaultedProperty=None
IsDefaultProperty=None
| [
"[email protected]"
]
| |
8000e75d46ed9abd3bbb9b77c384719a41849c7a | e54993bb28e72a147e513038a6ad938fcaecc5c6 | /2018/6-2.py | a15fe749802ad0ba3658f1173718aea63542470a | []
| no_license | desecho/adventofcode | 84dc4d90bf9aefa82211b1222588897413efed38 | 239527cb57d12eb2ed02a396d74adfc824df5188 | refs/heads/master | 2021-06-15T05:13:24.558169 | 2021-03-18T23:36:37 | 2021-03-18T23:36:37 | 160,407,557 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,058 | py | from collections import defaultdict
def calculate_distance(coord1, coord2):
return abs(coord1[0] - coord2[0]) + abs(coord1[1] - coord2[1])
def get_coords():
f = open('6.txt')
lines = f.read().splitlines()
coords = []
for line in lines:
coord = line.split(', ')
coord = (int(coord[0]), int(coord[1]))
coords.append(coord)
return coords
def get_max_coord_value():
max_coord_value = 0
for coord in coords:
if coord[0] > max_coord_value:
max_coord_value = coord[0]
if coord[1] > max_coord_value:
max_coord_value = coord[1]
return max_coord_value
coords = get_coords()
max_coord_value = get_max_coord_value()
def calculate_value(coord):
n = 0
for c in coords:
n += calculate_distance(c, coord)
return n
max_value = 10000
canvas = {}
area = 0
for x in range(0, max_coord_value):
for y in range(0, max_coord_value):
coord = (x, y)
n = calculate_value(coord)
if n < max_value:
area += 1
print(area) | [
"[email protected]"
]
| |
901724595d4e863dcdff9795aeb24d6f8e20e023 | 989b3499948137f57f14be8b2c77d0610d5975e6 | /python-package/daily_study/1주차(07.09~07.13)/1주차_파이썬 과제/ii_get_firsts_age.py | 6f8acc79193e6540ffb53bc5bba8fd4a4015d551 | []
| no_license | namkiseung/python_BasicProject | 76b4c070934ad4cb9d16ce844efa05f64fb09ac0 | 460d05248b2d1431624aba960e28bece888643e4 | refs/heads/master | 2022-12-13T21:12:06.865241 | 2020-04-23T01:30:08 | 2020-04-23T01:30:08 | 142,980,920 | 1 | 1 | null | 2022-12-08T02:27:40 | 2018-07-31T07:49:17 | Python | UTF-8 | Python | false | false | 1,910 | py | # -*- coding: utf-8 -*-
name_book = [{'age': 31, 'fav': ['3g', 'chi', 'piz'], 'name': 'ttamna'},
{'age': 32, 'fav': ['cof', 'greentea'], 'name': 'hope'},
{'age': 22, 'fav': ['sprite', 'pepsi'], 'name': 'mirae'},
{'age': 21, 'fav': ['choco', 'freetime'], 'name': 'gunin'},
{'age': 2, 'fav': ['can', 'godunguh'], 'name': 'mango'}]
def get_firsts_age():
#get_firsts_age()는 첫번째 요소의 'age'의 키값을 반환하는 함수
""" name_book의 첫번째 요소는 dict인데,
이 dict에서 키 'age'를 참조해 그 값을 반환하는 함수를 작성하자
"""
# 여기 작성
return name_book[0]['age']
if __name__ == "__main__":
print('age의 키값은? ',get_firsts_age())
pass
'''
[] 는 array를 쓰는 대표 타입 (배열 초기화나 선언시 사용)
arr = [] # 빈 배열을 만들 때 []사용
arr = [1,2,3,4] #원소가 있는 배열을 만들 때 []사용
arr[3] #배열의 3번째 원소에 접근할 때 []사용
() 는 tuple을 선언 초기화시 사용(원소 접근할때)
mytuple = () #빈 튜플 생성할 때 ()사용
mytuple = (1,2,3,4) # 원소가 있는 튜플을 만들 때 ()사용
mytuple[3] # 튜플의 원소에 접근할 때 []사용
{} 는 dictionary의 대표 타입(딕셔너리 선언 및 초기화시 사용. (키에 대응하는 값 할당하거나 접근))
mydictionary = {} #빈 딕셔너리 생성 시 {}사용
mydictionary = {"mouse":3, "penguin":5}
mydictionary["mouse"] # key("mouse")에 대응하는 value(3)에 접근할 때 사용
mydictionary["cat"] = 1 # key("cat")에 대한 value(1) 생성
'''
'''
[쓰레기가 된 코드]
c_name_book=dict()
#print(type(name_book))
#result = name_book["age"]
#for seq in len(name_book):
#c_name_book += name_book[seq]
'''
| [
"[email protected]"
]
| |
e68623770b137be6b14de90280ff4515b723457a | 38c10c01007624cd2056884f25e0d6ab85442194 | /third_party/lzma_sdk/lzma_sdk.gyp | a1167d5f71c2b8b7d363c211a41bd59ce400bfd2 | [
"BSD-3-Clause",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"LicenseRef-scancode-public-domain"
]
| permissive | zenoalbisser/chromium | 6ecf37b6c030c84f1b26282bc4ef95769c62a9b2 | e71f21b9b4b9b839f5093301974a45545dad2691 | refs/heads/master | 2022-12-25T14:23:18.568575 | 2016-07-14T21:49:52 | 2016-07-23T08:02:51 | 63,980,627 | 0 | 2 | BSD-3-Clause | 2022-12-12T12:43:41 | 2016-07-22T20:14:04 | null | UTF-8 | Python | false | false | 2,150 | gyp | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'lzma_sdk_sources': [
'7z.h',
'7zAlloc.c',
'7zAlloc.h',
'7zBuf.c',
'7zBuf.h',
'7zCrc.c',
'7zCrc.h',
'7zCrcOpt.c',
'7zDec.c',
'7zFile.c',
'7zFile.h',
'7zIn.c',
'7zStream.c',
'Alloc.c',
'Alloc.h',
'Bcj2.c',
'Bcj2.h',
'Bra.c',
'Bra.h',
'Bra86.c',
'CpuArch.c',
'CpuArch.h',
'LzFind.c',
'LzFind.h',
'LzHash.h',
'Lzma2Dec.c',
'Lzma2Dec.h',
'LzmaEnc.c',
'LzmaEnc.h',
'LzmaDec.c',
'LzmaDec.h',
'LzmaLib.c',
'LzmaLib.h',
'Types.h',
],
},
'targets': [
{
'target_name': 'lzma_sdk',
'type': 'static_library',
'defines': [
'_7ZIP_ST',
'_LZMA_PROB32',
],
'variables': {
# Upstream uses self-assignment to avoid warnings.
'clang_warning_flags': [ '-Wno-self-assign' ]
},
'sources': [
'<@(lzma_sdk_sources)',
],
'include_dirs': [
'.',
],
'direct_dependent_settings': {
'include_dirs': [
'.',
],
},
},
],
'conditions': [
['OS=="win"', {
'targets': [
{
'target_name': 'lzma_sdk64',
'type': 'static_library',
'defines': [
'_7ZIP_ST',
'_LZMA_PROB32',
],
'variables': {
# Upstream uses self-assignment to avoid warnings.
'clang_warning_flags': [ '-Wno-self-assign' ]
},
'include_dirs': [
'.',
],
'sources': [
'<@(lzma_sdk_sources)',
],
'configurations': {
'Common_Base': {
'msvs_target_platform': 'x64',
},
},
'direct_dependent_settings': {
'include_dirs': [
'.',
],
},
},
],
}],
],
}
| [
"[email protected]"
]
| |
fd122e785dcd67f5dfb38d8fb252da42a10b7b2e | 46357db3b1c1af699384d9cba1ffbc3c732117ad | /python_basics/exercises/26_find_hcf_or_gcd.py | 517945158aebeb2882ab90db0eb4fd6fa7b56cc1 | []
| permissive | khanhdodang/automation-training-python | 28fbd70ca4bc84e47cf17d1e4702513863e38c44 | b16143961cee869c7555b449e2a05abeae2dc3b5 | refs/heads/master | 2023-07-11T05:21:34.495851 | 2021-08-18T01:29:37 | 2021-08-18T01:29:37 | 285,208,030 | 0 | 8 | MIT | 2020-09-29T07:01:15 | 2020-08-05T07:01:46 | Python | UTF-8 | Python | false | false | 559 | py | '''
The highest common factor (H.C.F) or greatest common divisor (G.C.D) of two numbers is the largest positive integer that perfectly divides the two given numbers.
For example, the H.C.F of 12 and 14 is 2.
'''
# Python program to find H.C.F of two numbers
# define a function
def compute_hcf(x, y):
# choose the smaller number
if x > y:
smaller = y
else:
smaller = x
for i in range(1, smaller + 1):
if ((x % i == 0) and (y % i == 0)):
hcf = i
return hcf
num1 = 54
num2 = 24
print("The H.C.F. is", compute_hcf(num1, num2))
| [
"[email protected]"
]
| |
138e01dd749c37a17258cce7de9bd8b7d0744481 | 0a21d5e72b4afbabcbf4ec0d65ea84cd8d6159c7 | /Contest/weekly-contest-169/D.py | 7b093659d1af3a7b5b3878292f1449b79aa09d2b | []
| no_license | LuoJiaji/LeetCode-Demo | 193f27ba36c93f9030435874a145c63a81d3c0f8 | 78e6e87c01848a1dc71b7dc0716029ece5f35863 | refs/heads/master | 2020-06-24T03:03:33.366537 | 2020-04-05T02:09:41 | 2020-04-05T02:09:41 | 198,830,590 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,126 | py | # class Solution(object):
# def isSolvable(self, words, result):
# """
# :type words: List[str]
# :type result: str
# :rtype: bool
# """
# # used = []
# # flag = False
# def check(numlist, alpha, words, result):
# # pass
# flag = False
# nums = []
# for w in words:
# tmp = ''
# for a in w:
# ind = alpha.index(a)
# tmp += str(numlist[ind])
# nums.append(tmp)
# res = ''
# for i in result:
# ind = alpha.index(i)
# res += str(numlist[ind])
# s = 0
# for num in nums:
# s += int(num)
# if s == int(res):
# flag = True
# print(nums, res)
# return flag
# # print(nums, res)
# def dfs(alpha, i, used, numlist, cantzero):
# # numlist = copy.deepcopy(numlist)
# # used = copy.deepcopy(used)
# # print(numlist, used)
# if i == len(alpha):
# # print(i, len(alpha), numlist)
# res = check(numlist, alpha, words, result)
# return res
# for n in range(10):
# # print(n, used)
# if n == 0 and alpha[i] in cantzero:
# continue
# if n not in used:
# # new_used = used + [n]
# # new_numlist = numlist + [n]
# flag = dfs(alpha, i+1, used + [n], numlist + [n], cantzero)
# if flag:
# return True
# return False
# alpha = result
# cantzero = [result[0]]
# for w in words:
# alpha += w
# cantzero += [w[0]]
# alpha = list(set(alpha))
# cantzero = list(set(cantzero))
# print(alpha)
# print(cantzero)
# res = dfs(alpha, 0, [], [], cantzero)
# return res
# class Solution(object):
# def isSolvable(self, words, result):
# """
# :type words: List[str]
# :type result: str
# :rtype: bool
# """
# def reverse(s):
# ret = ""
# for i in range(len(s) - 1, -1, -1):
# ret += s[i]
# return ret
# data = [reverse(w) for w in words]
# data.append(reverse(result))
# print(data)
# n = len(data)
# m = len(result)
# s = {}
# a = set()
# def work(k, n, m, y):
# if k >= n * m:
# return y == 0
# if (k + 1) % n == 0:
# if data[k % n][k // n] in s:
# if y % 10 == s[data[k % n][k // n]]:
# return work(k + 1, n, m, y // 10)
# else:
# return False
# else:
# if y % 10 not in a:
# a.add(y % 10)
# s[data[k % n][k // n]] = y % 10
# if work(k + 1, n, m, y // 10):
# return True
# a.remove(y % 10)
# del s[data[k % n][k // n]]
# else:
# return False
# elif k // n >= len(data[k % n]):
# return work(k + 1, n, m, y)
# elif data[k % n][k // n] in s:
# if k // n == len(data[k % n]) - 1 and s[data[k % n][k // n]] == 0:
# return False
# return work(k + 1, n, m, y + s[data[k % n][k // n]])
# else:
# for i in range(10):
# if k // n == len(data[k % n]) - 1 and i == 0:
# continue
# if i not in a:
# s[data[k % n][k // n]] = i
# a.add(i)
# if work(k + 1, n, m, y + i):
# return True
# a.remove(i)
# del s[data[k % n][k // n]]
# return False
# return work(0, n, m, 0)
class Solution:
def isSolvable(self, words, result):
letter_dict = dict()
for word in words:
cnt = 1
for letter in word[::-1]:
if letter not in letter_dict:
letter_dict[letter] = 0
letter_dict[letter] += cnt
cnt *= 10
cnt = 1
for letter in result[::-1]:
if letter not in letter_dict:
letter_dict[letter] = 0
letter_dict[letter] -= cnt
cnt *= 10
print(letter_dict)
arr = sorted(letter_dict.values(), key=lambda x: abs(x), reverse=True)
print(arr)
length = len(arr)
flag_num = [True] * 10
flag_res = [False]
def dfs(i, s):
if flag_res[0]:
return
if i == length:
if s == 0:
flag_res[0] = True
return
# 剪枝
# num = 10
for num in range(10)[::-1]:
if flag_num[num]:
break
if num * sum([abs(arr[j]) for j in range(i, length)]) < abs(s):
return
for num in range(10):
if not flag_num[num]:
continue
flag_num[num] = False
dfs(i + 1, s + arr[i] * num)
flag_num[num] = True
dfs(0, 0)
return flag_res[0]
words = ["SEND", "MORE"]
result = "MONEY"
res = Solution().isSolvable(words, result)
print(res)
words = ["SIX", "SEVEN", "SEVEN"]
result = "TWENTY"
res = Solution().isSolvable(words, result)
print(res)
words = ["THIS", "IS", "TOO"]
result = "FUNNY"
res = Solution().isSolvable(words, result)
print(res)
words = ["LEET", "CODE"]
result = "POINT"
res = Solution().isSolvable(words, result)
print(res)
| [
"[email protected]"
]
| |
42f4bf6c1f20d34b1dff13ff86e5188f43f078a1 | aa2c3743c265c3db8a246a04f26df8428d23dd06 | /tacker/api/v1/router.py | 3f53618e26a630c4111a70c43a13a10fd9edda17 | [
"Apache-2.0"
]
| permissive | SripriyaSeetharam/tacker | fb43740de8e791b7bfa121dd16c295dd380f03f0 | 0c5c2eb06fb6112b03b49c05c5cbffb0ba00587f | refs/heads/master | 2021-01-22T01:55:17.327221 | 2015-07-08T21:07:02 | 2015-07-08T21:07:02 | 38,065,799 | 1 | 0 | null | 2015-06-25T18:13:25 | 2015-06-25T18:13:24 | Python | UTF-8 | Python | false | false | 2,312 | py | # Copyright (c) 2012 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import routes as routes_mapper
import six.moves.urllib.parse as urlparse
import webob
import webob.dec
import webob.exc
from tacker.api import extensions
from tacker.api.v1 import attributes
from tacker.openstack.common import log as logging
from tacker import wsgi
LOG = logging.getLogger(__name__)
class Index(wsgi.Application):
def __init__(self, resources):
self.resources = resources
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
metadata = {'application/xml': {'attributes': {
'resource': ['name', 'collection'],
'link': ['href', 'rel']}}}
layout = []
for name, collection in self.resources.iteritems():
href = urlparse.urljoin(req.path_url, collection)
resource = {'name': name,
'collection': collection,
'links': [{'rel': 'self',
'href': href}]}
layout.append(resource)
response = dict(resources=layout)
content_type = req.best_match_content_type()
body = wsgi.Serializer(metadata=metadata).serialize(response,
content_type)
return webob.Response(body=body, content_type=content_type)
class APIRouter(wsgi.Router):
@classmethod
def factory(cls, global_config, **local_config):
return cls(**local_config)
def __init__(self, **local_config):
mapper = routes_mapper.Mapper()
ext_mgr = extensions.ExtensionManager.get_instance()
ext_mgr.extend_resources("1.0", attributes.RESOURCE_ATTRIBUTE_MAP)
super(APIRouter, self).__init__(mapper)
| [
"[email protected]"
]
| |
b40a2a276b7057d283cc97a08b18fff243b0a820 | c16ea32a4cddb6b63ad3bacce3c6db0259d2bacd | /google/ads/googleads/v4/googleads-py/google/ads/googleads/v4/services/types/ad_group_extension_setting_service.py | ef41a0ce26e223a0dc2319488043f0f0ca9dd4e9 | [
"Apache-2.0"
]
| permissive | dizcology/googleapis-gen | 74a72b655fba2565233e5a289cfaea6dc7b91e1a | 478f36572d7bcf1dc66038d0e76b9b3fa2abae63 | refs/heads/master | 2023-06-04T15:51:18.380826 | 2021-06-16T20:42:38 | 2021-06-16T20:42:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,946 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v4.resources.types import ad_group_extension_setting
from google.protobuf import field_mask_pb2 # type: ignore
from google.rpc import status_pb2 # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v4.services',
marshal='google.ads.googleads.v4',
manifest={
'GetAdGroupExtensionSettingRequest',
'MutateAdGroupExtensionSettingsRequest',
'AdGroupExtensionSettingOperation',
'MutateAdGroupExtensionSettingsResponse',
'MutateAdGroupExtensionSettingResult',
},
)
class GetAdGroupExtensionSettingRequest(proto.Message):
r"""Request message for
[AdGroupExtensionSettingService.GetAdGroupExtensionSetting][google.ads.googleads.v4.services.AdGroupExtensionSettingService.GetAdGroupExtensionSetting].
Attributes:
resource_name (str):
Required. The resource name of the ad group
extension setting to fetch.
"""
resource_name = proto.Field(
proto.STRING,
number=1,
)
class MutateAdGroupExtensionSettingsRequest(proto.Message):
r"""Request message for
[AdGroupExtensionSettingService.MutateAdGroupExtensionSettings][google.ads.googleads.v4.services.AdGroupExtensionSettingService.MutateAdGroupExtensionSettings].
Attributes:
customer_id (str):
Required. The ID of the customer whose ad
group extension settings are being modified.
operations (Sequence[google.ads.googleads.v4.services.types.AdGroupExtensionSettingOperation]):
Required. The list of operations to perform
on individual ad group extension settings.
partial_failure (bool):
If true, successful operations will be
carried out and invalid operations will return
errors. If false, all operations will be carried
out in one transaction if and only if they are
all valid. Default is false.
validate_only (bool):
If true, the request is validated but not
executed. Only errors are returned, not results.
"""
customer_id = proto.Field(
proto.STRING,
number=1,
)
operations = proto.RepeatedField(
proto.MESSAGE,
number=2,
message='AdGroupExtensionSettingOperation',
)
partial_failure = proto.Field(
proto.BOOL,
number=3,
)
validate_only = proto.Field(
proto.BOOL,
number=4,
)
class AdGroupExtensionSettingOperation(proto.Message):
r"""A single operation (create, update, remove) on an ad group
extension setting.
Attributes:
update_mask (google.protobuf.field_mask_pb2.FieldMask):
FieldMask that determines which resource
fields are modified in an update.
create (google.ads.googleads.v4.resources.types.AdGroupExtensionSetting):
Create operation: No resource name is
expected for the new ad group extension setting.
update (google.ads.googleads.v4.resources.types.AdGroupExtensionSetting):
Update operation: The ad group extension
setting is expected to have a valid resource
name.
remove (str):
Remove operation: A resource name for the removed ad group
extension setting is expected, in this format:
``customers/{customer_id}/adGroupExtensionSettings/{ad_group_id}~{extension_type}``
"""
update_mask = proto.Field(
proto.MESSAGE,
number=4,
message=field_mask_pb2.FieldMask,
)
create = proto.Field(
proto.MESSAGE,
number=1,
oneof='operation',
message=ad_group_extension_setting.AdGroupExtensionSetting,
)
update = proto.Field(
proto.MESSAGE,
number=2,
oneof='operation',
message=ad_group_extension_setting.AdGroupExtensionSetting,
)
remove = proto.Field(
proto.STRING,
number=3,
oneof='operation',
)
class MutateAdGroupExtensionSettingsResponse(proto.Message):
r"""Response message for an ad group extension setting mutate.
Attributes:
partial_failure_error (google.rpc.status_pb2.Status):
Errors that pertain to operation failures in the partial
failure mode. Returned only when partial_failure = true and
all errors occur inside the operations. If any errors occur
outside the operations (e.g. auth errors), we return an RPC
level error.
results (Sequence[google.ads.googleads.v4.services.types.MutateAdGroupExtensionSettingResult]):
All results for the mutate.
"""
partial_failure_error = proto.Field(
proto.MESSAGE,
number=3,
message=status_pb2.Status,
)
results = proto.RepeatedField(
proto.MESSAGE,
number=2,
message='MutateAdGroupExtensionSettingResult',
)
class MutateAdGroupExtensionSettingResult(proto.Message):
r"""The result for the ad group extension setting mutate.
Attributes:
resource_name (str):
Returned for successful operations.
"""
resource_name = proto.Field(
proto.STRING,
number=1,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| [
"bazel-bot-development[bot]@users.noreply.github.com"
]
| bazel-bot-development[bot]@users.noreply.github.com |
554ab2c2d9d21b5149c32d9c6caf9f65d0dbe509 | dd08a146a41a8114365a7b11b534935cb96ec0b5 | /configs/activations/x101_4x4_1x.py | 978ebdd35999768d449ea74d69c8814e2a657020 | [
"Apache-2.0"
]
| permissive | kostas1515/GOL | 2132020f97739278afd12fc1e7af2560c50781cb | 70026b73cacf7ee9c8b209907a83cff7d7e34d65 | refs/heads/master | 2023-05-23T10:13:04.671246 | 2022-11-24T10:51:22 | 2022-11-24T10:51:22 | 513,964,766 | 18 | 2 | null | null | null | null | UTF-8 | Python | false | false | 650 | py | _base_ = [
'../lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py'
]
# data = dict(train=dict(oversample_thr=0.0))
data = dict(train=dict(oversample_thr=0.0),samples_per_gpu=4)
model = dict(roi_head=dict(bbox_head=dict(loss_cls=dict(type="CrossEntropyLoss", use_sigmoid=True),
init_cfg = dict(type='Constant',val=0.001, bias=-6.5, override=dict(name='fc_cls')))))
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=1000,
warmup_ratio=0.001,
step=[8, 11])
work_dir='./experiments/x101_4x4_1x/'
# work_dir='./experiments/test/'
fp16 = dict(loss_scale=512.)
| [
"[email protected]"
]
| |
54653ff8ae9f1d734186dff0d2cc2189116fa554 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/5/lEm.py | 06476509f118b2e4163b067da84e4c3f5d7fee82 | []
| no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'lEM':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
]
| |
d1c83f7397067a5db4849163931e37a5e5a349c0 | d7016f69993570a1c55974582cda899ff70907ec | /sdk/dataprotection/azure-mgmt-dataprotection/generated_samples/get_operation_result_patch.py | 448ca7b2befea5dda6ba7aebacbd274a18d99b1d | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
]
| permissive | kurtzeborn/azure-sdk-for-python | 51ca636ad26ca51bc0c9e6865332781787e6f882 | b23e71b289c71f179b9cf9b8c75b1922833a542a | refs/heads/main | 2023-03-21T14:19:50.299852 | 2023-02-15T13:30:47 | 2023-02-15T13:30:47 | 157,927,277 | 0 | 0 | MIT | 2022-07-19T08:05:23 | 2018-11-16T22:15:30 | Python | UTF-8 | Python | false | false | 1,756 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.dataprotection import DataProtectionClient
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-dataprotection
# USAGE
python get_operation_result_patch.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = DataProtectionClient(
credential=DefaultAzureCredential(),
subscription_id="04cf684a-d41f-4550-9f70-7708a3a2283b",
)
response = client.backup_vault_operation_results.get(
resource_group_name="SampleResourceGroup",
vault_name="swaggerExample",
operation_id="YWUzNDFkMzQtZmM5OS00MmUyLWEzNDMtZGJkMDIxZjlmZjgzOzdmYzBiMzhmLTc2NmItNDM5NS05OWQ1LTVmOGEzNzg4MWQzNA==",
)
print(response)
# x-ms-original-file: specification/dataprotection/resource-manager/Microsoft.DataProtection/preview/2022-11-01-preview/examples/VaultCRUD/GetOperationResultPatch.json
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
fcf7d0c7373084d9fb10b07ff25dc125d92190fe | 36d924baf115884f48cf4e8de3415340b82abda5 | /atomman/dump/poscar/dump.py | 7d6ed24a88f0fa17c6717384f61be774816a7250 | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-public-domain"
]
| permissive | plin1112/atomman | 499a430649a89bc9aa5cb716598d05b2783537d6 | ced3a4e5a99a95ef582cf3dbb074eb75cad4a0a6 | refs/heads/master | 2020-03-17T19:28:04.032877 | 2019-05-24T03:17:46 | 2019-05-24T03:17:46 | 133,863,046 | 0 | 0 | NOASSERTION | 2019-05-24T03:17:47 | 2018-05-17T20:06:21 | Python | UTF-8 | Python | false | false | 3,554 | py | # Standard Python libraries
from __future__ import (absolute_import, print_function,
division, unicode_literals)
# http://www.numpy.org/
import numpy as np
# atomman imports
from ...compatibility import range
def dump(system, f=None, header='', symbols=None, style='direct',
box_scale=1.0, float_format='%.13e'):
"""
Generates a poscar-style coordination file for the system.
Parameters
----------
system : atomman.System
The system whose coordinates you are saving
f : str or file-like object, optional
File path or file-like object to write the content to. If not given,
then the content is returned as a str.
header : str, optional
The comment line to place at the top of the file. Default value is ''.
symbols : tuple, optional
List of the element symbols that correspond to the atom types. If not
given, will use system.symbols if set, otherwise no element content
will be included.
style : str, optional
The poscar coordinate style. Default value is 'direct'.
box_scale : float, optional
A universal scaling constant applied to the box vectors. Default value
is 1.0.
float_format : str, optional
c-style format for printing the floating point numbers. Default value
is '%.13e'.
Returns
-------
poscar_str : str
String of the poscar object (only returned if fname is not given).
"""
assert '\n' not in header, 'header can only be one line'
assert '\n' not in style, 'style can only be one line'
threexf = float_format + ' ' + float_format + ' ' + float_format
# Scale box vectors and write out the values
vects = system.box.vects / box_scale
poscar_string = '\n'.join([header,
float_format % box_scale,
threexf % tuple(vects[0]),
threexf % tuple(vects[1]),
threexf % tuple(vects[2])])
# Use system.symbols if set
if symbols is None:
if None not in system.symbols:
symbols = system.symbols
# Write symbols tags if they are given
if symbols is not None:
if not isinstance(symbols, (list, tuple)):
symbols = [symbols]
assert len(symbols) == system.natypes, 'length of symbols differs from number of atom types'
poscar_string += '\n' + ' '.join(symbols)
# Count how many atoms of each type
atype = system.atoms.atype
poscar_string += '\n'
uatype, counts = np.unique(atype, return_counts=True)
for i in range(1, int(uatype.max()+1)):
count = counts[uatype==i]
if count == []:
count = 0
else:
count = count[0]
poscar_string += '%i ' % count
# Check which coordinate style to use
poscar_string += '\n' + style
if style[0] in 'cCkK':
scale = False
else:
scale = True
# Write out positions
pos = system.atoms_prop(key='pos', scale=scale)
for a in range(1, system.natypes+1):
for p in pos[atype==a]:
poscar_string += '\n'+ threexf % tuple(p)
# Save to the file-like object
if hasattr(f, 'write'):
f.write(poscar_string)
# Save to the file name
elif f is not None:
with open(f, 'w') as fp:
fp.write(poscar_string)
# Return as a string
else:
return poscar_string | [
"[email protected]"
]
| |
d7be36e6e09482164160575426a42d6f073350ce | e17cd40f0a9e2452d685a754458f152a1cfb2c69 | /open/core/betterself/models/supplement_stack.py | a29fbadc0050919745c7366ea59577be8717613f | [
"MIT"
]
| permissive | Rowlando13/open | 6faec6f4f048284c2a69b64d7fb3767569dbcf3a | 6c14c7bf8b915cea94f89b8af209be14489726e8 | refs/heads/master | 2022-12-02T00:34:56.987855 | 2020-08-20T22:01:25 | 2020-08-20T22:01:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 758 | py | from open.utilities.fields import DEFAULT_MODELS_CHAR_FIELD
from open.utilities.models import BaseModelWithUserGeneratedContent
class SupplementStack(BaseModelWithUserGeneratedContent):
name = DEFAULT_MODELS_CHAR_FIELD
class Meta:
unique_together = ("user", "name")
ordering = ["user", "name"]
verbose_name = "Supplements Stack"
verbose_name_plural = "Supplements Stacks"
def __str__(self):
return "{} Stack".format(self.name)
@property
def description(self):
compositions = self.compositions.all()
descriptions = [composition.description for composition in compositions]
if descriptions:
return ", ".join(descriptions)
else:
return ""
| [
"[email protected]"
]
| |
69bd648e2f22328b05bc72638b69a9862c70e36b | 7090a13d5a33d2c1e500af239fe2a3f4a9996596 | /trtools/tools/pload.py | ff5d82655582d01ae821a3b001351359dadb89e9 | [
"MIT"
]
| permissive | andrewmhammond/trtools | 97d382e560e9e303e34431d7a4248ac633b62b09 | 211cdb21377e36086c3d78fe9d711a9c12245359 | refs/heads/master | 2020-12-26T01:48:59.728488 | 2014-04-06T23:25:05 | 2014-04-06T23:25:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 931 | py | import trtools.tools.datapanel as datapanel
import multiprocessing
_store = None
def _load(key):
return _store[key]
def pload(store, N=None, num_consumers=None):
"""
Parallelize the reading of a mapping class.
This is useful for any IO abstraction where you want to read
many files at once
Parameters:
store : mapping object
N : int
number of items to process, mostly for debugging
Note: This was built specifically for something like FileCache
"""
# set global so consumers processes have access
global _store
_store = store
keys = store.keys()
if N is None:
N = len(keys)
results = {}
# store on process so we aren't pickling it constantly
pvars = {'store':store}
loader = datapanel.DataPanel(keys, store=results)
loader.process(_load, num=N, num_consumers=num_consumers, process_vars=pvars)
return results
| [
"[email protected]"
]
| |
37b3647bb50ae28ed528e2bd7ffccfe2adbf6942 | ac47074bcf749273941ab01213bb6d1f59c40c99 | /project/fund/fund_factor/alpha_factor/morningstar_risk_adjusted_return.py | ac09c255fb010636db5553366eb00d9f13b56a5e | []
| no_license | xuzhihua95/quant | c5561e2b08370610f58662f2871f1f1490681be2 | c7e312c70d5f400b7e777d2ff4c9f6f223eabfee | refs/heads/master | 2020-05-19T17:04:08.796981 | 2019-04-24T02:50:29 | 2019-04-24T02:50:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,434 | py | from datetime import datetime
import os
import pandas as pd
import numpy as np
from quant.fund.fund_pool import FundPool
from quant.fund.fund import Fund
from quant.stock.date import Date
from quant.param.param import Parameter
from quant.stock.macro import Macro
import calendar
class MorningStarRiskAdjustedReturn(object):
def __init__(self):
self.path = r"E:\3_Data\4_fund_data\2_fund_factor\alpha_factor"
def cal_factor_mrar(self, fund, T, r, end_date, fund_data, macro_data):
# T = 12
# r = 2
def fun_date(x):
year = int(x[0:4])
month = int(x[4:6])
day = calendar.monthrange(year, month)[1]
date = datetime(year, month, day).strftime("%Y%m%d")
return date
end_date = Date().get_normal_date_last_month_end_day(end_date)
fund_data = pd.DataFrame(fund_data.loc[:end_date, fund])
fund_data = fund_data.dropna()
fund_data["Month"] = fund_data.index.map(lambda x: x[0:6])
fund_month = fund_data.groupby(by=["Month"]).sum()
fund_month.index = fund_month.index.map(fun_date)
concat_data = pd.concat([fund_month, macro_data], axis=1)
concat_data.columns = ["FundReturn", "FreeRiskReturn"]
concat_data = concat_data.dropna()
concat_data["ExcessMonthRerurn"] = concat_data["FundReturn"] - concat_data["FreeRiskReturn"]
excess_return = pd.DataFrame(concat_data.loc[concat_data.index[-T:], "ExcessMonthRerurn"])
excess_return /= 100.0
if len(excess_return) == T:
excess_return["R"] = excess_return["ExcessMonthRerurn"].map(lambda x: (1+x)**(-r))
res = excess_return["R"].mean() ** (-12/r)
else:
res = np.nan
return res
def cal_factor_mrar_all(self, T, r, beg_date, end_date):
date_series = Date().get_normal_date_series(beg_date, end_date, "Q")
result = pd.DataFrame([], index=date_series)
def fun_date(x):
year = int(x[0:4])
month = int(x[4:6])
day = calendar.monthrange(year, month)[1]
date = datetime(year, month, day).strftime("%Y%m%d")
return date
macro_code = "S0059744"
macro_name = "中债国债到期收益率-1年"
macro_data = Macro().get_macro_data(macro_code, None, None)
macro_data.columns = [macro_name]
macro_data['YearMonth'] = macro_data.index.map(lambda x: x[0:6])
macro_data = macro_data.groupby(by=['YearMonth']).mean()[macro_name]
macro_data.index = macro_data.index.map(fun_date)
macro_data = pd.DataFrame(macro_data)
macro_data.columns = [macro_name]
macro_data /= 12.0
fund_data = Fund().get_fund_factor("Repair_Nav_Pct", None, None)
for i in range(len(date_series)):
# 日期
######################################################################################################
report_date = date_series[i]
# 基金池信息
######################################################################################################
fund_code_list = FundPool().get_fund_pool_code(date=report_date, name="基金持仓基准基金池")
fund_code_list3 = FundPool().get_fund_pool_code(date=report_date, name="量化基金")
fund_code_list2 = FundPool().get_fund_pool_code(date="20180630", name="东方红基金")
fund_code_list.extend(fund_code_list2)
fund_code_list.extend(fund_code_list3)
fund_code_list = list(set(fund_code_list))
fund_code_list.sort()
for i_fund in range(len(fund_code_list)):
fund = fund_code_list[i_fund]
print(report_date, fund)
try:
res = self.cal_factor_mrar(fund, T, r, end_date, fund_data, macro_data)
result.loc[report_date, fund] = res
except Exception as e:
result.loc[report_date, fund] = np.nan
result = result.T
file = os.path.join(self.path, "MorningStar_MRAR_" + str(r) + "_" + str(T) + '.csv')
result.to_csv(file)
if __name__ == "__main__":
beg_date = "20040331"
end_date = "20180909"
fund = "000001.OF"
T = 12
r = 2
MorningStarRiskAdjustedReturn().cal_factor_mrar_all(T, r, beg_date, end_date) | [
"[email protected]"
]
| |
31ee51f9fb901a1a711c8792108497d4f80f3159 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/41/usersdata/67/24524/submittedfiles/gravitacional.py | e231a12429dd729284c1d39d911cd4e681053dbb | []
| no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 802 | py | # -*- coding: utf-8 -*-
from __future__ import division
import numpy as np
import funcoes
#ENTRADA
dimensao = input('Digite a dimensao das matrizes: ')
matrizA = input('Digite a Matriz A como uma única linha entre aspas: ')
matrizD = input('Digite a Matriz D como uma única linha entre aspas: ')
alfa = input('Digite o valor de alfa: ')
#PREPARANDO A ENTRADA
T = np.zeros((dimensao,dimensao))
A = np.fromstring(matrizA, sep=' ').reshape(dimensao, dimensao)
d = np.fromstring(matrizD, sep=' ').reshape(dimensao, dimensao)
#comece aqui...
#INÍCIO
somal=[]
ss=0
for i in range (0,a.shape[1],1):
for j in range(0,a.shape[0],1):
ss=a[i,j]+ss
somal.append=ss
#SAÍDA
somatorio = sum(sum(T))
print('%.4f' % somatorio)
| [
"[email protected]"
]
| |
0bf0bdcbca713d816b6bf06d52c9ea4f4fc29158 | 2bb90b620f86d0d49f19f01593e1a4cc3c2e7ba8 | /pardus/tags/2009.1/programming/language/python/pysqlite/actions.py | d4b2fc36f873b9ceae3f7fec989ac524da571a25 | []
| no_license | aligulle1/kuller | bda0d59ce8400aa3c7ba9c7e19589f27313492f7 | 7f98de19be27d7a517fe19a37c814748f7e18ba6 | refs/heads/master | 2021-01-20T02:22:09.451356 | 2013-07-23T17:57:58 | 2013-07-23T17:57:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 456 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (C) TUBITAK/UEKAE
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
from pisi.actionsapi import pythonmodules
from pisi.actionsapi import pisitools
from pisi.actionsapi import get
def install():
pythonmodules.install()
pisitools.remove("usr/pysqlite2-doc/install-source.txt")
# needs sphinx package for documentation
| [
"[email protected]"
]
| |
4d2b5f48e997fed08bc8c45f8f7918ba868a9710 | e71b6d14fbdbc57c7234ca45a47329d7d02fc6f7 | /flask_api/venv/lib/python3.7/site-packages/vsts/task_agent/v4_1/models/service_endpoint_request.py | 84e317a726ed5a2c330d46f34c75ccca9feae9e4 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | u-blavins/secret_sasquatch_society | c36993c738ab29a6a4879bfbeb78a5803f4f2a57 | 0214eadcdfa9b40254e331a6617c50b422212f4c | refs/heads/master | 2020-08-14T00:39:52.948272 | 2020-01-22T13:54:58 | 2020-01-22T13:54:58 | 215,058,646 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,877 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class ServiceEndpointRequest(Model):
"""ServiceEndpointRequest.
:param data_source_details:
:type data_source_details: :class:`DataSourceDetails <task-agent.v4_1.models.DataSourceDetails>`
:param result_transformation_details:
:type result_transformation_details: :class:`ResultTransformationDetails <task-agent.v4_1.models.ResultTransformationDetails>`
:param service_endpoint_details:
:type service_endpoint_details: :class:`ServiceEndpointDetails <task-agent.v4_1.models.ServiceEndpointDetails>`
"""
_attribute_map = {
'data_source_details': {'key': 'dataSourceDetails', 'type': 'DataSourceDetails'},
'result_transformation_details': {'key': 'resultTransformationDetails', 'type': 'ResultTransformationDetails'},
'service_endpoint_details': {'key': 'serviceEndpointDetails', 'type': 'ServiceEndpointDetails'}
}
def __init__(self, data_source_details=None, result_transformation_details=None, service_endpoint_details=None):
super(ServiceEndpointRequest, self).__init__()
self.data_source_details = data_source_details
self.result_transformation_details = result_transformation_details
self.service_endpoint_details = service_endpoint_details
| [
"[email protected]"
]
| |
b981da78f6dd70eac3da54b275275e77340715aa | f537427134bac52e0274ecd73e664d597718a6a8 | /tests/hwsim/test_ap_hs20.py | d2d1af2b36bb40fc09bce469704b1f204da663f4 | [
"BSD-3-Clause"
]
| permissive | liulk/hostap | a19d411d961ec6a0165db38b8b72f8ced6875594 | f3ff948753ebe5643b5c2d16546a4d16e2c9d20a | refs/heads/master | 2016-08-06T02:19:18.487269 | 2014-02-27T00:05:21 | 2014-04-11T16:22:00 | 14,415,674 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 74,104 | py | # Hotspot 2.0 tests
# Copyright (c) 2013-2014, Jouni Malinen <[email protected]>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import time
import subprocess
import logging
logger = logging.getLogger()
import os
import os.path
import subprocess
import hostapd
from wlantest import Wlantest
from wpasupplicant import WpaSupplicant
def hs20_ap_params(ssid="test-hs20"):
params = hostapd.wpa2_params(ssid=ssid)
params['wpa_key_mgmt'] = "WPA-EAP"
params['ieee80211w'] = "1"
params['ieee8021x'] = "1"
params['auth_server_addr'] = "127.0.0.1"
params['auth_server_port'] = "1812"
params['auth_server_shared_secret'] = "radius"
params['interworking'] = "1"
params['access_network_type'] = "14"
params['internet'] = "1"
params['asra'] = "0"
params['esr'] = "0"
params['uesa'] = "0"
params['venue_group'] = "7"
params['venue_type'] = "1"
params['venue_name'] = [ "eng:Example venue", "fin:Esimerkkipaikka" ]
params['roaming_consortium'] = [ "112233", "1020304050", "010203040506",
"fedcba" ]
params['domain_name'] = "example.com,another.example.com"
params['nai_realm'] = [ "0,example.com,13[5:6],21[2:4][5:7]",
"0,another.example.com" ]
params['hs20'] = "1"
params['hs20_wan_metrics'] = "01:8000:1000:80:240:3000"
params['hs20_conn_capab'] = [ "1:0:2", "6:22:1", "17:5060:0" ]
params['hs20_operating_class'] = "5173"
params['anqp_3gpp_cell_net'] = "244,91"
return params
def check_auto_select(dev, bssid):
dev.request("INTERWORKING_SELECT auto freq=2412")
ev = dev.wait_event(["CTRL-EVENT-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Connection timed out")
if bssid not in ev:
raise Exception("Connected to incorrect network")
dev.request("REMOVE_NETWORK all")
def interworking_select(dev, bssid, type=None, no_match=False, freq=None):
dev.dump_monitor()
freq_extra = " freq=" + freq if freq else ""
dev.request("INTERWORKING_SELECT" + freq_extra)
ev = dev.wait_event(["INTERWORKING-AP", "INTERWORKING-NO-MATCH"],
timeout=15)
if ev is None:
raise Exception("Network selection timed out");
if no_match:
if "INTERWORKING-NO-MATCH" not in ev:
raise Exception("Unexpected network match")
return
if "INTERWORKING-NO-MATCH" in ev:
raise Exception("Matching network not found")
if bssid and bssid not in ev:
raise Exception("Unexpected BSSID in match")
if type and "type=" + type not in ev:
raise Exception("Network type not recognized correctly")
def check_sp_type(dev, sp_type):
type = dev.get_status_field("sp_type")
if type is None:
raise Exception("sp_type not available")
if type != sp_type:
raise Exception("sp_type did not indicate home network")
def hlr_auc_gw_available():
if not os.path.exists("/tmp/hlr_auc_gw.sock"):
logger.info("No hlr_auc_gw available");
return False
if not os.path.exists("../../hostapd/hlr_auc_gw"):
logger.info("No hlr_auc_gw available");
return False
return True
def interworking_ext_sim_connect(dev, bssid, method):
dev.request("INTERWORKING_CONNECT " + bssid)
interworking_ext_sim_auth(dev, method)
def interworking_ext_sim_auth(dev, method):
ev = dev.wait_event(["CTRL-EVENT-EAP-METHOD"], timeout=15)
if ev is None:
raise Exception("Network connected timed out")
if "(" + method + ")" not in ev:
raise Exception("Unexpected EAP method selection")
ev = dev.wait_event(["CTRL-REQ-SIM"], timeout=15)
if ev is None:
raise Exception("Wait for external SIM processing request timed out")
p = ev.split(':', 2)
if p[1] != "GSM-AUTH":
raise Exception("Unexpected CTRL-REQ-SIM type")
id = p[0].split('-')[3]
rand = p[2].split(' ')[0]
res = subprocess.check_output(["../../hostapd/hlr_auc_gw",
"-m",
"auth_serv/hlr_auc_gw.milenage_db",
"GSM-AUTH-REQ 232010000000000 " + rand])
if "GSM-AUTH-RESP" not in res:
raise Exception("Unexpected hlr_auc_gw response")
resp = res.split(' ')[2].rstrip()
dev.request("CTRL-RSP-SIM-" + id + ":GSM-AUTH:" + resp)
ev = dev.wait_event(["CTRL-EVENT-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Connection timed out")
def interworking_connect(dev, bssid, method):
dev.request("INTERWORKING_CONNECT " + bssid)
interworking_auth(dev, method)
def interworking_auth(dev, method):
ev = dev.wait_event(["CTRL-EVENT-EAP-METHOD"], timeout=15)
if ev is None:
raise Exception("Network connected timed out")
if "(" + method + ")" not in ev:
raise Exception("Unexpected EAP method selection")
ev = dev.wait_event(["CTRL-EVENT-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Connection timed out")
def check_probe_resp(wt, bssid_unexpected, bssid_expected):
if bssid_unexpected:
count = wt.get_bss_counter("probe_response", bssid_unexpected)
if count > 0:
raise Exception("Unexpected Probe Response frame from AP")
if bssid_expected:
count = wt.get_bss_counter("probe_response", bssid_expected)
if count == 0:
raise Exception("No Probe Response frame from AP")
def test_ap_anqp_sharing(dev, apdev):
"""ANQP sharing within ESS and explicit unshare"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0]['ifname'], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['nai_realm'] = [ "0,example.com,13[5:6],21[2:4][5:7]" ]
hostapd.add_ap(apdev[1]['ifname'], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({ 'realm': "example.com", 'username': "test",
'password': "secret",
'domain': "example.com" })
logger.info("Normal network selection with shared ANQP results")
interworking_select(dev[0], None, "home", freq="2412")
dev[0].dump_monitor()
res1 = dev[0].get_bss(bssid)
res2 = dev[0].get_bss(bssid2)
if res1['anqp_nai_realm'] != res2['anqp_nai_realm']:
raise Exception("ANQP results were not shared between BSSes")
logger.info("Explicit ANQP request to unshare ANQP results")
dev[0].request("ANQP_GET " + bssid + " 263")
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("ANQP operation timed out")
dev[0].request("ANQP_GET " + bssid2 + " 263")
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("ANQP operation timed out")
res1 = dev[0].get_bss(bssid)
res2 = dev[0].get_bss(bssid2)
if res1['anqp_nai_realm'] == res2['anqp_nai_realm']:
raise Exception("ANQP results were not unshared")
def test_ap_nai_home_realm_query(dev, apdev):
"""NAI Home Realm Query"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = [ "0,example.com,13[5:6],21[2:4][5:7]",
"0,another.example.org" ]
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].scan(freq="2412")
dev[0].request("HS20_GET_NAI_HOME_REALM_LIST " + bssid + " realm=example.com")
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("ANQP operation timed out")
nai1 = dev[0].get_bss(bssid)['anqp_nai_realm']
dev[0].dump_monitor()
dev[0].request("ANQP_GET " + bssid + " 263")
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("ANQP operation timed out")
nai2 = dev[0].get_bss(bssid)['anqp_nai_realm']
if len(nai1) >= len(nai2):
raise Exception("Unexpected NAI Realm list response lengths")
if "example.com".encode('hex') not in nai1:
raise Exception("Home realm not reported")
if "example.org".encode('hex') in nai1:
raise Exception("Non-home realm reported")
if "example.com".encode('hex') not in nai2:
raise Exception("Home realm not reported in wildcard query")
if "example.org".encode('hex') not in nai2:
raise Exception("Non-home realm not reported in wildcard query ")
def test_ap_interworking_scan_filtering(dev, apdev):
"""Interworking scan filtering with HESSID and access network type"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
ssid = "test-hs20-ap1"
params['ssid'] = ssid
params['hessid'] = bssid
hostapd.add_ap(apdev[0]['ifname'], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params()
ssid2 = "test-hs20-ap2"
params['ssid'] = ssid2
params['hessid'] = bssid2
params['access_network_type'] = "1"
del params['venue_group']
del params['venue_type']
hostapd.add_ap(apdev[1]['ifname'], params)
dev[0].hs20_enable()
wt = Wlantest()
wt.flush()
logger.info("Check probe request filtering based on HESSID")
dev[0].request("SET hessid " + bssid2)
dev[0].scan(freq="2412")
time.sleep(0.03)
check_probe_resp(wt, bssid, bssid2)
logger.info("Check probe request filtering based on access network type")
wt.clear_bss_counters(bssid)
wt.clear_bss_counters(bssid2)
dev[0].request("SET hessid 00:00:00:00:00:00")
dev[0].request("SET access_network_type 14")
dev[0].scan(freq="2412")
time.sleep(0.03)
check_probe_resp(wt, bssid2, bssid)
wt.clear_bss_counters(bssid)
wt.clear_bss_counters(bssid2)
dev[0].request("SET hessid 00:00:00:00:00:00")
dev[0].request("SET access_network_type 1")
dev[0].scan(freq="2412")
time.sleep(0.03)
check_probe_resp(wt, bssid, bssid2)
logger.info("Check probe request filtering based on HESSID and ANT")
wt.clear_bss_counters(bssid)
wt.clear_bss_counters(bssid2)
dev[0].request("SET hessid " + bssid)
dev[0].request("SET access_network_type 14")
dev[0].scan(freq="2412")
time.sleep(0.03)
check_probe_resp(wt, bssid2, bssid)
wt.clear_bss_counters(bssid)
wt.clear_bss_counters(bssid2)
dev[0].request("SET hessid " + bssid2)
dev[0].request("SET access_network_type 14")
dev[0].scan(freq="2412")
time.sleep(0.03)
check_probe_resp(wt, bssid, None)
check_probe_resp(wt, bssid2, None)
wt.clear_bss_counters(bssid)
wt.clear_bss_counters(bssid2)
dev[0].request("SET hessid " + bssid)
dev[0].request("SET access_network_type 1")
dev[0].scan(freq="2412")
time.sleep(0.03)
check_probe_resp(wt, bssid, None)
check_probe_resp(wt, bssid2, None)
def test_ap_hs20_select(dev, apdev):
"""Hotspot 2.0 network selection"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({ 'realm': "example.com", 'username': "test",
'password': "secret",
'domain': "example.com" })
interworking_select(dev[0], bssid, "home")
dev[0].remove_cred(id)
id = dev[0].add_cred_values({ 'realm': "example.com", 'username': "test",
'password': "secret",
'domain': "no.match.example.com" })
interworking_select(dev[0], bssid, "roaming", freq="2412")
dev[0].set_cred_quoted(id, "realm", "no.match.example.com");
interworking_select(dev[0], bssid, no_match=True, freq="2412")
bssid2 = apdev[1]['bssid']
params = hs20_ap_params()
params['nai_realm'] = [ "0,example.org,21" ]
params['hessid'] = bssid2
params['domain_name'] = "example.org"
hostapd.add_ap(apdev[1]['ifname'], params)
dev[0].remove_cred(id)
id = dev[0].add_cred_values({ 'realm': "example.org", 'username': "test",
'password': "secret",
'domain': "example.org" })
interworking_select(dev[0], bssid2, "home", freq="2412")
def hs20_simulated_sim(dev, ap, method):
bssid = ap['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['anqp_3gpp_cell_net'] = "555,444"
params['domain_name'] = "wlan.mnc444.mcc555.3gppnetwork.org"
hostapd.add_ap(ap['ifname'], params)
dev.hs20_enable()
dev.add_cred_values({ 'imsi': "555444-333222111", 'eap': method,
'milenage': "5122250214c33e723a5dd523fc145fc0:981d464c7c52eb6e5036234984ad0bcf:000000000123"})
interworking_select(dev, "home", freq="2412")
interworking_connect(dev, bssid, method)
check_sp_type(dev, "home")
def test_ap_hs20_sim(dev, apdev):
"""Hotspot 2.0 with simulated SIM and EAP-SIM"""
if not hlr_auc_gw_available():
return "skip"
hs20_simulated_sim(dev[0], apdev[0], "SIM")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["INTERWORKING-ALREADY-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Timeout on already-connected event")
def test_ap_hs20_aka(dev, apdev):
"""Hotspot 2.0 with simulated USIM and EAP-AKA"""
if not hlr_auc_gw_available():
return "skip"
hs20_simulated_sim(dev[0], apdev[0], "AKA")
def test_ap_hs20_aka_prime(dev, apdev):
"""Hotspot 2.0 with simulated USIM and EAP-AKA'"""
if not hlr_auc_gw_available():
return "skip"
hs20_simulated_sim(dev[0], apdev[0], "AKA'")
def test_ap_hs20_ext_sim(dev, apdev):
"""Hotspot 2.0 with external SIM processing"""
if not hlr_auc_gw_available():
return "skip"
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['anqp_3gpp_cell_net'] = "232,01"
params['domain_name'] = "wlan.mnc001.mcc232.3gppnetwork.org"
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
dev[0].request("SET external_sim 1")
dev[0].add_cred_values({ 'imsi': "23201-0000000000", 'eap': "SIM" })
interworking_select(dev[0], "home", freq="2412")
interworking_ext_sim_connect(dev[0], bssid, "SIM")
check_sp_type(dev[0], "home")
def test_ap_hs20_ext_sim_roaming(dev, apdev):
"""Hotspot 2.0 with external SIM processing in roaming network"""
if not hlr_auc_gw_available():
return "skip"
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['anqp_3gpp_cell_net'] = "244,91;310,026;232,01;234,56"
params['domain_name'] = "wlan.mnc091.mcc244.3gppnetwork.org"
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
dev[0].request("SET external_sim 1")
dev[0].add_cred_values({ 'imsi': "23201-0000000000", 'eap': "SIM" })
interworking_select(dev[0], "roaming", freq="2412")
interworking_ext_sim_connect(dev[0], bssid, "SIM")
check_sp_type(dev[0], "roaming")
def test_ap_hs20_username(dev, apdev):
"""Hotspot 2.0 connection in username/password credential"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['disable_dgaf'] = '1'
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({ 'realm': "example.com",
'username': "hs20-test",
'password': "password",
'ca_cert': "auth_serv/ca.pem",
'domain': "example.com",
'update_identifier': "1234" })
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
check_sp_type(dev[0], "home")
status = dev[0].get_status()
if status['pairwise_cipher'] != "CCMP":
raise Exception("Unexpected pairwise cipher")
if status['hs20'] != "2":
raise Exception("Unexpected HS 2.0 support indication")
dev[1].connect("test-hs20", key_mgmt="WPA-EAP", eap="TTLS",
identity="hs20-test", password="password",
ca_cert="auth_serv/ca.pem", phase2="auth=MSCHAPV2",
scan_freq="2412")
def eap_test(dev, ap, eap_params, method, user):
bssid = ap['bssid']
params = hs20_ap_params()
params['nai_realm'] = [ "0,example.com," + eap_params ]
hostapd.add_ap(ap['ifname'], params)
dev.hs20_enable()
dev.add_cred_values({ 'realm': "example.com",
'username': user,
'password': "password" })
interworking_select(dev, bssid, freq="2412")
interworking_connect(dev, bssid, method)
def test_ap_hs20_eap_unknown(dev, apdev):
"""Hotspot 2.0 connection with unknown EAP method"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = "0,example.com,99"
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
dev[0].add_cred_values(default_cred())
interworking_select(dev[0], None, no_match=True, freq="2412")
def test_ap_hs20_eap_peap_mschapv2(dev, apdev):
"""Hotspot 2.0 connection with PEAP/MSCHAPV2"""
eap_test(dev[0], apdev[0], "25[3:26]", "PEAP", "user")
def test_ap_hs20_eap_peap_default(dev, apdev):
"""Hotspot 2.0 connection with PEAP/MSCHAPV2 (as default)"""
eap_test(dev[0], apdev[0], "25", "PEAP", "user")
def test_ap_hs20_eap_peap_gtc(dev, apdev):
"""Hotspot 2.0 connection with PEAP/GTC"""
eap_test(dev[0], apdev[0], "25[3:6]", "PEAP", "user")
def test_ap_hs20_eap_peap_unknown(dev, apdev):
"""Hotspot 2.0 connection with PEAP/unknown"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = "0,example.com,25[3:99]"
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
dev[0].add_cred_values(default_cred())
interworking_select(dev[0], None, no_match=True, freq="2412")
def test_ap_hs20_eap_ttls_chap(dev, apdev):
"""Hotspot 2.0 connection with TTLS/CHAP"""
eap_test(dev[0], apdev[0], "21[2:2]", "TTLS", "chap user")
def test_ap_hs20_eap_ttls_mschap(dev, apdev):
"""Hotspot 2.0 connection with TTLS/MSCHAP"""
eap_test(dev[0], apdev[0], "21[2:3]", "TTLS", "mschap user")
def test_ap_hs20_eap_ttls_eap_mschapv2(dev, apdev):
"""Hotspot 2.0 connection with TTLS/EAP-MSCHAPv2"""
eap_test(dev[0], apdev[0], "21[3:26][6:7][99:99]", "TTLS", "user")
def test_ap_hs20_eap_ttls_eap_unknown(dev, apdev):
"""Hotspot 2.0 connection with TTLS/EAP-unknown"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = "0,example.com,21[3:99]"
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
dev[0].add_cred_values(default_cred())
interworking_select(dev[0], None, no_match=True, freq="2412")
def test_ap_hs20_eap_ttls_eap_unsupported(dev, apdev):
"""Hotspot 2.0 connection with TTLS/EAP-OTP(unsupported)"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = "0,example.com,21[3:5]"
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
dev[0].add_cred_values(default_cred())
interworking_select(dev[0], None, no_match=True, freq="2412")
def test_ap_hs20_eap_ttls_unknown(dev, apdev):
"""Hotspot 2.0 connection with TTLS/unknown"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = "0,example.com,21[2:5]"
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
dev[0].add_cred_values(default_cred())
interworking_select(dev[0], None, no_match=True, freq="2412")
def test_ap_hs20_eap_fast_mschapv2(dev, apdev):
"""Hotspot 2.0 connection with FAST/EAP-MSCHAPV2"""
eap_test(dev[0], apdev[0], "43[3:26]", "FAST", "user")
def test_ap_hs20_eap_fast_gtc(dev, apdev):
"""Hotspot 2.0 connection with FAST/EAP-GTC"""
eap_test(dev[0], apdev[0], "43[3:6]", "FAST", "user")
def test_ap_hs20_eap_tls(dev, apdev):
"""Hotspot 2.0 connection with EAP-TLS"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = [ "0,example.com,13[5:6]" ]
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
dev[0].add_cred_values({ 'realm': "example.com",
'username': "certificate-user",
'ca_cert': "auth_serv/ca.pem",
'client_cert': "auth_serv/user.pem",
'private_key': "auth_serv/user.key"})
interworking_select(dev[0], bssid, freq="2412")
interworking_connect(dev[0], bssid, "TLS")
def test_ap_hs20_eap_cert_unknown(dev, apdev):
"""Hotspot 2.0 connection with certificate, but unknown EAP method"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = [ "0,example.com,99[5:6]" ]
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
dev[0].add_cred_values({ 'realm': "example.com",
'username': "certificate-user",
'ca_cert': "auth_serv/ca.pem",
'client_cert': "auth_serv/user.pem",
'private_key': "auth_serv/user.key"})
interworking_select(dev[0], None, no_match=True, freq="2412")
def test_ap_hs20_eap_cert_unsupported(dev, apdev):
"""Hotspot 2.0 connection with certificate, but unsupported TTLS"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = [ "0,example.com,21[5:6]" ]
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
dev[0].add_cred_values({ 'realm': "example.com",
'username': "certificate-user",
'ca_cert': "auth_serv/ca.pem",
'client_cert': "auth_serv/user.pem",
'private_key': "auth_serv/user.key"})
interworking_select(dev[0], None, no_match=True, freq="2412")
def test_ap_hs20_eap_invalid_cred(dev, apdev):
"""Hotspot 2.0 connection with invalid cred configuration"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
dev[0].add_cred_values({ 'realm': "example.com",
'username': "certificate-user",
'client_cert': "auth_serv/user.pem" })
interworking_select(dev[0], None, no_match=True, freq="2412")
def test_ap_hs20_nai_realms(dev, apdev):
"""Hotspot 2.0 connection and multiple NAI realms and TTLS/PAP"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
params['nai_realm'] = [ "0,no.match.here;example.com;no.match.here.either,21[2:1][5:7]" ]
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({ 'realm': "example.com",
'username': "pap user",
'password': "password",
'domain': "example.com" })
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
check_sp_type(dev[0], "home")
def test_ap_hs20_roaming_consortium(dev, apdev):
"""Hotspot 2.0 connection based on roaming consortium match"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
for consortium in [ "112233", "1020304050", "010203040506", "fedcba" ]:
id = dev[0].add_cred_values({ 'username': "user",
'password': "password",
'domain': "example.com",
'roaming_consortium': consortium,
'eap': "PEAP" })
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "PEAP")
check_sp_type(dev[0], "home")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["INTERWORKING-ALREADY-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Timeout on already-connected event")
dev[0].remove_cred(id)
def test_ap_hs20_username_roaming(dev, apdev):
"""Hotspot 2.0 connection in username/password credential (roaming)"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = [ "0,example.com,13[5:6],21[2:4][5:7]",
"0,roaming.example.com,21[2:4][5:7]",
"0,another.example.com" ]
params['domain_name'] = "another.example.com"
params['hessid'] = bssid
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({ 'realm': "roaming.example.com",
'username': "hs20-test",
'password': "password",
'domain': "example.com" })
interworking_select(dev[0], bssid, "roaming", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
check_sp_type(dev[0], "roaming")
def test_ap_hs20_username_unknown(dev, apdev):
"""Hotspot 2.0 connection in username/password credential (no domain in cred)"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({ 'realm': "example.com",
'username': "hs20-test",
'password': "password" })
interworking_select(dev[0], bssid, "unknown", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
check_sp_type(dev[0], "unknown")
def test_ap_hs20_username_unknown2(dev, apdev):
"""Hotspot 2.0 connection in username/password credential (no domain advertized)"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
del params['domain_name']
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({ 'realm': "example.com",
'username': "hs20-test",
'password': "password",
'domain': "example.com" })
interworking_select(dev[0], bssid, "unknown", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
check_sp_type(dev[0], "unknown")
def test_ap_hs20_gas_while_associated(dev, apdev):
"""Hotspot 2.0 connection with GAS query while associated"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({ 'realm': "example.com",
'username': "hs20-test",
'password': "password",
'domain': "example.com" })
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
logger.info("Verifying GAS query while associated")
dev[0].request("FETCH_ANQP")
for i in range(0, 6):
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("Operation timed out")
def test_ap_hs20_gas_while_associated_with_pmf(dev, apdev):
"""Hotspot 2.0 connection with GAS query while associated and using PMF"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0]['ifname'], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid2
params['nai_realm'] = [ "0,no-match.example.org,13[5:6],21[2:4][5:7]" ]
hostapd.add_ap(apdev[1]['ifname'], params)
dev[0].hs20_enable()
dev[0].request("SET pmf 2")
id = dev[0].add_cred_values({ 'realm': "example.com",
'username': "hs20-test",
'password': "password",
'domain': "example.com" })
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
logger.info("Verifying GAS query while associated")
dev[0].request("FETCH_ANQP")
for i in range(0, 2 * 6):
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("Operation timed out")
def test_ap_hs20_gas_frag_while_associated(dev, apdev):
"""Hotspot 2.0 connection with fragmented GAS query while associated"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0]['ifname'], params)
hapd = hostapd.Hostapd(apdev[0]['ifname'])
hapd.set("gas_frag_limit", "50")
dev[0].hs20_enable()
id = dev[0].add_cred_values({ 'realm': "example.com",
'username': "hs20-test",
'password': "password",
'domain': "example.com" })
interworking_select(dev[0], bssid, "home", freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
logger.info("Verifying GAS query while associated")
dev[0].request("FETCH_ANQP")
for i in range(0, 6):
ev = dev[0].wait_event(["RX-ANQP"], timeout=5)
if ev is None:
raise Exception("Operation timed out")
def test_ap_hs20_multiple_connects(dev, apdev):
"""Hotspot 2.0 connection through multiple network selections"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
values = { 'realm': "example.com",
'username': "hs20-test",
'password': "password",
'domain': "example.com" }
id = dev[0].add_cred_values(values)
for i in range(0, 3):
logger.info("Starting Interworking network selection")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
while True:
ev = dev[0].wait_event(["INTERWORKING-NO-MATCH",
"INTERWORKING-ALREADY-CONNECTED",
"CTRL-EVENT-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Connection timed out")
if "INTERWORKING-NO-MATCH" in ev:
raise Exception("Matching AP not found")
if "CTRL-EVENT-CONNECTED" in ev:
break
if i == 2 and "INTERWORKING-ALREADY-CONNECTED" in ev:
break
if i == 0:
dev[0].request("DISCONNECT")
dev[0].dump_monitor()
networks = dev[0].list_networks()
if len(networks) > 1:
raise Exception("Duplicated network block detected")
def test_ap_hs20_disallow_aps(dev, apdev):
"""Hotspot 2.0 connection and disallow_aps"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
values = { 'realm': "example.com",
'username': "hs20-test",
'password': "password",
'domain': "example.com" }
id = dev[0].add_cred_values(values)
logger.info("Verify disallow_aps bssid")
dev[0].request("SET disallow_aps bssid " + bssid.translate(None, ':'))
dev[0].request("INTERWORKING_SELECT auto")
ev = dev[0].wait_event(["INTERWORKING-NO-MATCH"], timeout=15)
if ev is None:
raise Exception("Network selection timed out")
dev[0].dump_monitor()
logger.info("Verify disallow_aps ssid")
dev[0].request("SET disallow_aps ssid 746573742d68733230")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["INTERWORKING-NO-MATCH"], timeout=15)
if ev is None:
raise Exception("Network selection timed out")
dev[0].dump_monitor()
logger.info("Verify disallow_aps clear")
dev[0].request("SET disallow_aps ")
interworking_select(dev[0], bssid, "home", freq="2412")
dev[0].request("SET disallow_aps bssid " + bssid.translate(None, ':'))
ret = dev[0].request("INTERWORKING_CONNECT " + bssid)
if "FAIL" not in ret:
raise Exception("INTERWORKING_CONNECT to disallowed BSS not rejected")
def policy_test(dev, ap, values, only_one=True):
dev.dump_monitor()
if ap:
logger.info("Verify network selection to AP " + ap['ifname'])
bssid = ap['bssid']
else:
logger.info("Verify network selection")
bssid = None
dev.hs20_enable()
id = dev.add_cred_values(values)
dev.request("INTERWORKING_SELECT auto freq=2412")
events = []
while True:
ev = dev.wait_event(["INTERWORKING-AP", "INTERWORKING-NO-MATCH",
"INTERWORKING-BLACKLISTED",
"INTERWORKING-SELECTED"], timeout=15)
if ev is None:
raise Exception("Network selection timed out")
events.append(ev)
if "INTERWORKING-NO-MATCH" in ev:
raise Exception("Matching AP not found")
if bssid and only_one and "INTERWORKING-AP" in ev and bssid not in ev:
raise Exception("Unexpected AP claimed acceptable")
if "INTERWORKING-SELECTED" in ev:
if bssid and bssid not in ev:
raise Exception("Selected incorrect BSS")
break
ev = dev.wait_event(["CTRL-EVENT-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Connection timed out")
if bssid and bssid not in ev:
raise Exception("Connected to incorrect BSS")
conn_bssid = dev.get_status_field("bssid")
if bssid and conn_bssid != bssid:
raise Exception("bssid information points to incorrect BSS")
dev.remove_cred(id)
dev.dump_monitor()
return events
def default_cred(domain=None):
cred = { 'realm': "example.com",
'username': "hs20-test",
'password': "password" }
if domain:
cred['domain'] = domain
return cred
def test_ap_hs20_prefer_home(dev, apdev):
"""Hotspot 2.0 required roaming consortium"""
params = hs20_ap_params()
params['domain_name'] = "example.org"
hostapd.add_ap(apdev[0]['ifname'], params)
params = hs20_ap_params()
params['ssid'] = "test-hs20-other"
params['domain_name'] = "example.com"
hostapd.add_ap(apdev[1]['ifname'], params)
values = default_cred()
values['domain'] = "example.com"
policy_test(dev[0], apdev[1], values, only_one=False)
values['domain'] = "example.org"
policy_test(dev[0], apdev[0], values, only_one=False)
def test_ap_hs20_req_roaming_consortium(dev, apdev):
"""Hotspot 2.0 required roaming consortium"""
params = hs20_ap_params()
hostapd.add_ap(apdev[0]['ifname'], params)
params = hs20_ap_params()
params['ssid'] = "test-hs20-other"
params['roaming_consortium'] = [ "223344" ]
hostapd.add_ap(apdev[1]['ifname'], params)
values = default_cred()
values['required_roaming_consortium'] = "223344"
policy_test(dev[0], apdev[1], values)
values['required_roaming_consortium'] = "112233"
policy_test(dev[0], apdev[0], values)
id = dev[0].add_cred()
dev[0].set_cred(id, "required_roaming_consortium", "112233")
dev[0].set_cred(id, "required_roaming_consortium", "112233445566778899aabbccddeeff")
for val in [ "", "1", "11", "1122", "1122334", "112233445566778899aabbccddeeff00" ]:
if "FAIL" not in dev[0].request('SET_CRED {} required_roaming_consortium {}'.format(id, val)):
raise Exception("Invalid roaming consortium value accepted: " + val)
def test_ap_hs20_excluded_ssid(dev, apdev):
"""Hotspot 2.0 exclusion based on SSID"""
params = hs20_ap_params()
params['roaming_consortium'] = [ "223344" ]
params['anqp_3gpp_cell_net'] = "555,444"
hostapd.add_ap(apdev[0]['ifname'], params)
params = hs20_ap_params()
params['ssid'] = "test-hs20-other"
params['roaming_consortium'] = [ "223344" ]
params['anqp_3gpp_cell_net'] = "555,444"
hostapd.add_ap(apdev[1]['ifname'], params)
values = default_cred()
values['excluded_ssid'] = "test-hs20"
events = policy_test(dev[0], apdev[1], values)
ev = [e for e in events if "INTERWORKING-BLACKLISTED " + apdev[0]['bssid'] in e]
if len(ev) != 1:
raise Exception("Excluded network not reported")
values['excluded_ssid'] = "test-hs20-other"
events = policy_test(dev[0], apdev[0], values)
ev = [e for e in events if "INTERWORKING-BLACKLISTED " + apdev[1]['bssid'] in e]
if len(ev) != 1:
raise Exception("Excluded network not reported")
values = default_cred()
values['roaming_consortium'] = "223344"
values['eap'] = "TTLS"
values['phase2'] = "auth=MSCHAPV2"
values['excluded_ssid'] = "test-hs20"
events = policy_test(dev[0], apdev[1], values)
ev = [e for e in events if "INTERWORKING-BLACKLISTED " + apdev[0]['bssid'] in e]
if len(ev) != 1:
raise Exception("Excluded network not reported")
values = { 'imsi': "555444-333222111", 'eap': "SIM",
'milenage': "5122250214c33e723a5dd523fc145fc0:981d464c7c52eb6e5036234984ad0bcf:000000000123",
'excluded_ssid': "test-hs20" }
events = policy_test(dev[0], apdev[1], values)
ev = [e for e in events if "INTERWORKING-BLACKLISTED " + apdev[0]['bssid'] in e]
if len(ev) != 1:
raise Exception("Excluded network not reported")
def test_ap_hs20_roam_to_higher_prio(dev, apdev):
"""Hotspot 2.0 and roaming from current to higher priority network"""
bssid = apdev[0]['bssid']
params = hs20_ap_params(ssid="test-hs20-visited")
params['domain_name'] = "visited.example.org"
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({ 'realm': "example.com",
'username': "hs20-test",
'password': "password",
'domain': "example.com" })
logger.info("Connect to the only network option")
interworking_select(dev[0], bssid, "roaming", freq="2412")
dev[0].dump_monitor()
interworking_connect(dev[0], bssid, "TTLS")
logger.info("Start another AP (home operator) and reconnect")
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20-home")
params['domain_name'] = "example.com"
hostapd.add_ap(apdev[1]['ifname'], params)
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["INTERWORKING-NO-MATCH",
"INTERWORKING-ALREADY-CONNECTED",
"CTRL-EVENT-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Connection timed out")
if "INTERWORKING-NO-MATCH" in ev:
raise Exception("Matching AP not found")
if "INTERWORKING-ALREADY-CONNECTED" in ev:
raise Exception("Unexpected AP selected")
if bssid2 not in ev:
raise Exception("Unexpected BSSID after reconnection")
def test_ap_hs20_domain_suffix_match(dev, apdev):
"""Hotspot 2.0 and domain_suffix_match"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
id = dev[0].add_cred_values({ 'realm': "example.com",
'username': "hs20-test",
'password': "password",
'domain': "example.com",
'domain_suffix_match': "w1.fi" })
interworking_select(dev[0], bssid, "home", freq="2412")
dev[0].dump_monitor()
interworking_connect(dev[0], bssid, "TTLS")
dev[0].request("REMOVE_NETWORK all")
dev[0].dump_monitor()
dev[0].set_cred_quoted(id, "domain_suffix_match", "no-match.example.com")
interworking_select(dev[0], bssid, "home", freq="2412")
dev[0].dump_monitor()
dev[0].request("INTERWORKING_CONNECT " + bssid)
ev = dev[0].wait_event(["CTRL-EVENT-EAP-TLS-CERT-ERROR"])
if ev is None:
raise Exception("TLS certificate error not reported")
if "Domain suffix mismatch" not in ev:
raise Exception("Domain suffix mismatch not reported")
def test_ap_hs20_roaming_partner_preference(dev, apdev):
"""Hotspot 2.0 and roaming partner preference"""
params = hs20_ap_params()
params['domain_name'] = "roaming.example.org"
hostapd.add_ap(apdev[0]['ifname'], params)
params = hs20_ap_params()
params['ssid'] = "test-hs20-other"
params['domain_name'] = "roaming.example.net"
hostapd.add_ap(apdev[1]['ifname'], params)
logger.info("Verify default vs. specified preference")
values = default_cred()
values['roaming_partner'] = "roaming.example.net,1,127,*"
policy_test(dev[0], apdev[1], values, only_one=False)
values['roaming_partner'] = "roaming.example.net,1,129,*"
policy_test(dev[0], apdev[0], values, only_one=False)
logger.info("Verify partial FQDN match")
values['roaming_partner'] = "example.net,0,0,*"
policy_test(dev[0], apdev[1], values, only_one=False)
values['roaming_partner'] = "example.net,0,255,*"
policy_test(dev[0], apdev[0], values, only_one=False)
def test_ap_hs20_max_bss_load(dev, apdev):
"""Hotspot 2.0 and maximum BSS load"""
params = hs20_ap_params()
params['bss_load_test'] = "12:200:20000"
hostapd.add_ap(apdev[0]['ifname'], params)
params = hs20_ap_params()
params['ssid'] = "test-hs20-other"
params['bss_load_test'] = "5:20:10000"
hostapd.add_ap(apdev[1]['ifname'], params)
logger.info("Verify maximum BSS load constraint")
values = default_cred()
values['domain'] = "example.com"
values['max_bss_load'] = "100"
events = policy_test(dev[0], apdev[1], values, only_one=False)
ev = [e for e in events if "INTERWORKING-AP " + apdev[0]['bssid'] in e]
if len(ev) != 1 or "over_max_bss_load=1" not in ev[0]:
raise Exception("Maximum BSS Load case not noticed")
ev = [e for e in events if "INTERWORKING-AP " + apdev[1]['bssid'] in e]
if len(ev) != 1 or "over_max_bss_load=1" in ev[0]:
raise Exception("Maximum BSS Load case reported incorrectly")
logger.info("Verify maximum BSS load does not prevent connection")
values['max_bss_load'] = "1"
events = policy_test(dev[0], None, values)
ev = [e for e in events if "INTERWORKING-AP " + apdev[0]['bssid'] in e]
if len(ev) != 1 or "over_max_bss_load=1" not in ev[0]:
raise Exception("Maximum BSS Load case not noticed")
ev = [e for e in events if "INTERWORKING-AP " + apdev[1]['bssid'] in e]
if len(ev) != 1 or "over_max_bss_load=1" not in ev[0]:
raise Exception("Maximum BSS Load case not noticed")
def test_ap_hs20_max_bss_load2(dev, apdev):
"""Hotspot 2.0 and maximum BSS load with one AP not advertising"""
params = hs20_ap_params()
params['bss_load_test'] = "12:200:20000"
hostapd.add_ap(apdev[0]['ifname'], params)
params = hs20_ap_params()
params['ssid'] = "test-hs20-other"
hostapd.add_ap(apdev[1]['ifname'], params)
logger.info("Verify maximum BSS load constraint with AP advertisement")
values = default_cred()
values['domain'] = "example.com"
values['max_bss_load'] = "100"
events = policy_test(dev[0], apdev[1], values, only_one=False)
ev = [e for e in events if "INTERWORKING-AP " + apdev[0]['bssid'] in e]
if len(ev) != 1 or "over_max_bss_load=1" not in ev[0]:
raise Exception("Maximum BSS Load case not noticed")
ev = [e for e in events if "INTERWORKING-AP " + apdev[1]['bssid'] in e]
if len(ev) != 1 or "over_max_bss_load=1" in ev[0]:
raise Exception("Maximum BSS Load case reported incorrectly")
def test_ap_hs20_multi_cred_sp_prio(dev, apdev):
"""Hotspot 2.0 multi-cred sp_priority"""
if not hlr_auc_gw_available():
return "skip"
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
del params['domain_name']
params['anqp_3gpp_cell_net'] = "232,01"
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
dev[0].request("SET external_sim 1")
id1 = dev[0].add_cred_values({ 'imsi': "23201-0000000000", 'eap': "SIM",
'provisioning_sp': "example.com",
'sp_priority' :"1" })
id2 = dev[0].add_cred_values({ 'realm': "example.com",
'username': "hs20-test",
'password': "password",
'domain': "example.com",
'provisioning_sp': "example.com",
'sp_priority': "2" })
dev[0].dump_monitor()
dev[0].request("INTERWORKING_SELECT auto freq=2412")
interworking_ext_sim_auth(dev[0], "SIM")
check_sp_type(dev[0], "unknown")
dev[0].request("REMOVE_NETWORK all")
dev[0].set_cred(id1, "sp_priority", "2")
dev[0].set_cred(id2, "sp_priority", "1")
dev[0].dump_monitor()
dev[0].request("INTERWORKING_SELECT auto freq=2412")
interworking_auth(dev[0], "TTLS")
check_sp_type(dev[0], "unknown")
def test_ap_hs20_multi_cred_sp_prio2(dev, apdev):
"""Hotspot 2.0 multi-cred sp_priority with two BSSes"""
if not hlr_auc_gw_available():
return "skip"
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hessid'] = bssid
del params['nai_realm']
del params['domain_name']
params['anqp_3gpp_cell_net'] = "232,01"
hostapd.add_ap(apdev[0]['ifname'], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params()
params['ssid'] = "test-hs20-other"
params['hessid'] = bssid2
del params['domain_name']
del params['anqp_3gpp_cell_net']
hostapd.add_ap(apdev[1]['ifname'], params)
dev[0].hs20_enable()
dev[0].request("SET external_sim 1")
id1 = dev[0].add_cred_values({ 'imsi': "23201-0000000000", 'eap': "SIM",
'provisioning_sp': "example.com",
'sp_priority': "1" })
id2 = dev[0].add_cred_values({ 'realm': "example.com",
'username': "hs20-test",
'password': "password",
'domain': "example.com",
'provisioning_sp': "example.com",
'sp_priority': "2" })
dev[0].dump_monitor()
dev[0].request("INTERWORKING_SELECT auto freq=2412")
interworking_ext_sim_auth(dev[0], "SIM")
check_sp_type(dev[0], "unknown")
conn_bssid = dev[0].get_status_field("bssid")
if conn_bssid != bssid:
raise Exception("Connected to incorrect BSS")
dev[0].request("REMOVE_NETWORK all")
dev[0].set_cred(id1, "sp_priority", "2")
dev[0].set_cred(id2, "sp_priority", "1")
dev[0].dump_monitor()
dev[0].request("INTERWORKING_SELECT auto freq=2412")
interworking_auth(dev[0], "TTLS")
check_sp_type(dev[0], "unknown")
conn_bssid = dev[0].get_status_field("bssid")
if conn_bssid != bssid2:
raise Exception("Connected to incorrect BSS")
def check_conn_capab_selection(dev, type, missing):
dev.request("INTERWORKING_SELECT freq=2412")
ev = dev.wait_event(["INTERWORKING-AP"])
if ev is None:
raise Exception("Network selection timed out");
if "type=" + type not in ev:
raise Exception("Unexpected network type")
if missing and "conn_capab_missing=1" not in ev:
raise Exception("conn_capab_missing not reported")
if not missing and "conn_capab_missing=1" in ev:
raise Exception("conn_capab_missing reported unexpectedly")
def conn_capab_cred(domain=None, req_conn_capab=None):
cred = default_cred(domain=domain)
if req_conn_capab:
cred['req_conn_capab'] = req_conn_capab
return cred
def test_ap_hs20_req_conn_capab(dev, apdev):
"""Hotspot 2.0 network selection with req_conn_capab"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
logger.info("Not used in home network")
values = conn_capab_cred(domain="example.com", req_conn_capab="6:1234")
id = dev[0].add_cred_values(values)
check_conn_capab_selection(dev[0], "home", False)
logger.info("Used in roaming network")
dev[0].remove_cred(id)
values = conn_capab_cred(domain="example.org", req_conn_capab="6:1234")
id = dev[0].add_cred_values(values)
check_conn_capab_selection(dev[0], "roaming", True)
logger.info("Verify that req_conn_capab does not prevent connection if no other network is available")
check_auto_select(dev[0], bssid)
logger.info("Additional req_conn_capab checks")
dev[0].remove_cred(id)
values = conn_capab_cred(domain="example.org", req_conn_capab="1:0")
id = dev[0].add_cred_values(values)
check_conn_capab_selection(dev[0], "roaming", True)
dev[0].remove_cred(id)
values = conn_capab_cred(domain="example.org", req_conn_capab="17:5060")
id = dev[0].add_cred_values(values)
check_conn_capab_selection(dev[0], "roaming", True)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20b")
params['hs20_conn_capab'] = [ "1:0:2", "6:22:1", "17:5060:0", "50:0:1" ]
hostapd.add_ap(apdev[1]['ifname'], params)
dev[0].remove_cred(id)
values = conn_capab_cred(domain="example.org", req_conn_capab="50")
id = dev[0].add_cred_values(values)
dev[0].set_cred(id, "req_conn_capab", "6:22")
dev[0].request("INTERWORKING_SELECT freq=2412")
for i in range(0, 2):
ev = dev[0].wait_event(["INTERWORKING-AP"])
if ev is None:
raise Exception("Network selection timed out");
if bssid in ev and "conn_capab_missing=1" not in ev:
raise Exception("Missing protocol connection capability not reported")
if bssid2 in ev and "conn_capab_missing=1" in ev:
raise Exception("Protocol connection capability not reported correctly")
def test_ap_hs20_req_conn_capab_and_roaming_partner_preference(dev, apdev):
"""Hotspot 2.0 and req_conn_capab with roaming partner preference"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['domain_name'] = "roaming.example.org"
params['hs20_conn_capab'] = [ "1:0:2", "6:22:1", "17:5060:0", "50:0:1" ]
hostapd.add_ap(apdev[0]['ifname'], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20-b")
params['domain_name'] = "roaming.example.net"
hostapd.add_ap(apdev[1]['ifname'], params)
values = default_cred()
values['roaming_partner'] = "roaming.example.net,1,127,*"
id = dev[0].add_cred_values(values)
check_auto_select(dev[0], bssid2)
dev[0].set_cred(id, "req_conn_capab", "50")
check_auto_select(dev[0], bssid)
dev[0].remove_cred(id)
id = dev[0].add_cred_values(values)
dev[0].set_cred(id, "req_conn_capab", "51")
check_auto_select(dev[0], bssid2)
def check_bandwidth_selection(dev, type, below):
dev.request("INTERWORKING_SELECT freq=2412")
ev = dev.wait_event(["INTERWORKING-AP"])
if ev is None:
raise Exception("Network selection timed out");
if "type=" + type not in ev:
raise Exception("Unexpected network type")
if below and "below_min_backhaul=1" not in ev:
raise Exception("below_min_backhaul not reported")
if not below and "below_min_backhaul=1" in ev:
raise Exception("below_min_backhaul reported unexpectedly")
def bw_cred(domain=None, dl_home=None, ul_home=None, dl_roaming=None, ul_roaming=None):
cred = default_cred(domain=domain)
if dl_home:
cred['min_dl_bandwidth_home'] = str(dl_home)
if ul_home:
cred['min_ul_bandwidth_home'] = str(ul_home)
if dl_roaming:
cred['min_dl_bandwidth_roaming'] = str(dl_roaming)
if ul_roaming:
cred['min_ul_bandwidth_roaming'] = str(ul_roaming)
return cred
def test_ap_hs20_min_bandwidth_home(dev, apdev):
"""Hotspot 2.0 network selection with min bandwidth (home)"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
values = bw_cred(domain="example.com", dl_home=5490, ul_home=58)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "home", False)
dev[0].remove_cred(id)
values = bw_cred(domain="example.com", dl_home=5491, ul_home=58)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "home", True)
dev[0].remove_cred(id)
values = bw_cred(domain="example.com", dl_home=5490, ul_home=59)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "home", True)
dev[0].remove_cred(id)
values = bw_cred(domain="example.com", dl_home=5491, ul_home=59)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "home", True)
check_auto_select(dev[0], bssid)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20-b")
params['hs20_wan_metrics'] = "01:8000:1000:1:1:3000"
hostapd.add_ap(apdev[1]['ifname'], params)
check_auto_select(dev[0], bssid2)
def test_ap_hs20_min_bandwidth_roaming(dev, apdev):
"""Hotspot 2.0 network selection with min bandwidth (roaming)"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
values = bw_cred(domain="example.org", dl_roaming=5490, ul_roaming=58)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "roaming", False)
dev[0].remove_cred(id)
values = bw_cred(domain="example.org", dl_roaming=5491, ul_roaming=58)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "roaming", True)
dev[0].remove_cred(id)
values = bw_cred(domain="example.org", dl_roaming=5490, ul_roaming=59)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "roaming", True)
dev[0].remove_cred(id)
values = bw_cred(domain="example.org", dl_roaming=5491, ul_roaming=59)
id = dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "roaming", True)
check_auto_select(dev[0], bssid)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20-b")
params['hs20_wan_metrics'] = "01:8000:1000:1:1:3000"
hostapd.add_ap(apdev[1]['ifname'], params)
check_auto_select(dev[0], bssid2)
def test_ap_hs20_min_bandwidth_and_roaming_partner_preference(dev, apdev):
"""Hotspot 2.0 and minimum bandwidth with roaming partner preference"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['domain_name'] = "roaming.example.org"
params['hs20_wan_metrics'] = "01:8000:1000:1:1:3000"
hostapd.add_ap(apdev[0]['ifname'], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20-b")
params['domain_name'] = "roaming.example.net"
hostapd.add_ap(apdev[1]['ifname'], params)
values = default_cred()
values['roaming_partner'] = "roaming.example.net,1,127,*"
id = dev[0].add_cred_values(values)
check_auto_select(dev[0], bssid2)
dev[0].set_cred(id, "min_dl_bandwidth_roaming", "6000")
check_auto_select(dev[0], bssid)
dev[0].set_cred(id, "min_dl_bandwidth_roaming", "10000")
check_auto_select(dev[0], bssid2)
def test_ap_hs20_min_bandwidth_no_wan_metrics(dev, apdev):
"""Hotspot 2.0 network selection with min bandwidth but no WAN Metrics"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
del params['hs20_wan_metrics']
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
values = bw_cred(domain="example.com", dl_home=10000, ul_home=10000,
dl_roaming=10000, ul_roaming=10000)
dev[0].add_cred_values(values)
check_bandwidth_selection(dev[0], "home", False)
def test_ap_hs20_deauth_req_ess(dev, apdev):
"""Hotspot 2.0 connection and deauthentication request for ESS"""
dev[0].request("SET pmf 2")
eap_test(dev[0], apdev[0], "21[3:26]", "TTLS", "user")
dev[0].dump_monitor()
addr = dev[0].p2p_interface_addr()
hapd = hostapd.Hostapd(apdev[0]['ifname'])
hapd.request("HS20_DEAUTH_REQ " + addr + " 1 120 http://example.com/")
ev = dev[0].wait_event(["HS20-DEAUTH-IMMINENT-NOTICE"])
if ev is None:
raise Exception("Timeout on deauth imminent notice")
if "1 120 http://example.com/" not in ev:
raise Exception("Unexpected deauth imminent notice: " + ev)
hapd.request("DEAUTHENTICATE " + addr)
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECTED"])
if ev is None:
raise Exception("Timeout on disconnection")
if "[TEMP-DISABLED]" not in dev[0].list_networks()[0]['flags']:
raise Exception("Network not marked temporarily disabled")
ev = dev[0].wait_event(["SME: Trying to authenticate",
"Trying to associate",
"CTRL-EVENT-CONNECTED"], timeout=5)
if ev is not None:
raise Exception("Unexpected connection attempt")
def test_ap_hs20_deauth_req_bss(dev, apdev):
"""Hotspot 2.0 connection and deauthentication request for BSS"""
dev[0].request("SET pmf 2")
eap_test(dev[0], apdev[0], "21[3:26]", "TTLS", "user")
dev[0].dump_monitor()
addr = dev[0].p2p_interface_addr()
hapd = hostapd.Hostapd(apdev[0]['ifname'])
hapd.request("HS20_DEAUTH_REQ " + addr + " 0 120 http://example.com/")
ev = dev[0].wait_event(["HS20-DEAUTH-IMMINENT-NOTICE"])
if ev is None:
raise Exception("Timeout on deauth imminent notice")
if "0 120 http://example.com/" not in ev:
raise Exception("Unexpected deauth imminent notice: " + ev)
hapd.request("DEAUTHENTICATE " + addr + " reason=4")
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECTED"])
if ev is None:
raise Exception("Timeout on disconnection")
if "reason=4" not in ev:
raise Exception("Unexpected disconnection reason")
if "[TEMP-DISABLED]" not in dev[0].list_networks()[0]['flags']:
raise Exception("Network not marked temporarily disabled")
ev = dev[0].wait_event(["SME: Trying to authenticate",
"Trying to associate",
"CTRL-EVENT-CONNECTED"], timeout=5)
if ev is not None:
raise Exception("Unexpected connection attempt")
def test_ap_hs20_deauth_req_from_radius(dev, apdev):
"""Hotspot 2.0 connection and deauthentication request from RADIUS"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = [ "0,example.com,21[2:4]" ]
params['hs20_deauth_req_timeout'] = "2"
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].request("SET pmf 2")
dev[0].hs20_enable()
dev[0].add_cred_values({ 'realm': "example.com",
'username': "hs20-deauth-test",
'password': "password" })
interworking_select(dev[0], bssid, freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
ev = dev[0].wait_event(["HS20-DEAUTH-IMMINENT-NOTICE"], timeout=5)
if ev is None:
raise Exception("Timeout on deauth imminent notice")
if " 1 100" not in ev:
raise Exception("Unexpected deauth imminent contents")
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECTED"], timeout=3)
if ev is None:
raise Exception("Timeout on disconnection")
def test_ap_hs20_remediation_required(dev, apdev):
"""Hotspot 2.0 connection and remediation required from RADIUS"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = [ "0,example.com,21[2:4]" ]
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].request("SET pmf 1")
dev[0].hs20_enable()
dev[0].add_cred_values({ 'realm': "example.com",
'username': "hs20-subrem-test",
'password': "password" })
interworking_select(dev[0], bssid, freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
ev = dev[0].wait_event(["HS20-SUBSCRIPTION-REMEDIATION"], timeout=5)
if ev is None:
raise Exception("Timeout on subscription remediation notice")
if " 1 https://example.com/" not in ev:
raise Exception("Unexpected subscription remediation event contents")
def test_ap_hs20_remediation_required_ctrl(dev, apdev):
"""Hotspot 2.0 connection and subrem from ctrl_iface"""
bssid = apdev[0]['bssid']
addr = dev[0].p2p_dev_addr()
params = hs20_ap_params()
params['nai_realm'] = [ "0,example.com,21[2:4]" ]
hapd = hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].request("SET pmf 1")
dev[0].hs20_enable()
dev[0].add_cred_values(default_cred())
interworking_select(dev[0], bssid, freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
hapd.request("HS20_WNM_NOTIF " + addr + " https://example.com/")
ev = dev[0].wait_event(["HS20-SUBSCRIPTION-REMEDIATION"], timeout=5)
if ev is None:
raise Exception("Timeout on subscription remediation notice")
if " 1 https://example.com/" not in ev:
raise Exception("Unexpected subscription remediation event contents")
hapd.request("HS20_WNM_NOTIF " + addr)
ev = dev[0].wait_event(["HS20-SUBSCRIPTION-REMEDIATION"], timeout=5)
if ev is None:
raise Exception("Timeout on subscription remediation notice")
if not ev.endswith("HS20-SUBSCRIPTION-REMEDIATION "):
raise Exception("Unexpected subscription remediation event contents: " + ev)
if "FAIL" not in hapd.request("HS20_WNM_NOTIF "):
raise Exception("Unexpected HS20_WNM_NOTIF success")
if "FAIL" not in hapd.request("HS20_WNM_NOTIF foo"):
raise Exception("Unexpected HS20_WNM_NOTIF success")
if "FAIL" not in hapd.request("HS20_WNM_NOTIF " + addr + " https://12345678923456789842345678456783456712345678923456789842345678456783456712345678923456789842345678456783456712345678923456789842345678456783456712345678923456789842345678456783456712345678923456789842345678456783456712345678923456789842345678456783456712345678927.very.long.example.com/"):
raise Exception("Unexpected HS20_WNM_NOTIF success")
def test_ap_hs20_session_info(dev, apdev):
"""Hotspot 2.0 connection and session information from RADIUS"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['nai_realm'] = [ "0,example.com,21[2:4]" ]
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].request("SET pmf 1")
dev[0].hs20_enable()
dev[0].add_cred_values({ 'realm': "example.com",
'username': "hs20-session-info-test",
'password': "password" })
interworking_select(dev[0], bssid, freq="2412")
interworking_connect(dev[0], bssid, "TTLS")
ev = dev[0].wait_event(["ESS-DISASSOC-IMMINENT"], timeout=10)
if ev is None:
raise Exception("Timeout on ESS disassociation imminent notice")
if " 1 59904 https://example.com/" not in ev:
raise Exception("Unexpected ESS disassociation imminent event contents")
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-STARTED"])
if ev is None:
raise Exception("Scan not started")
ev = dev[0].wait_event(["CTRL-EVENT-SCAN-RESULTS"])
if ev is None:
raise Exception("Scan not completed")
def test_ap_hs20_osen(dev, apdev):
"""Hotspot 2.0 OSEN connection"""
params = { 'ssid': "osen",
'osen': "1",
'auth_server_addr': "127.0.0.1",
'auth_server_port': "1812",
'auth_server_shared_secret': "radius" }
hostapd.add_ap(apdev[0]['ifname'], params)
dev[1].connect("osen", key_mgmt="NONE", scan_freq="2412",
wait_connect=False)
dev[2].connect("osen", key_mgmt="NONE", wep_key0='"hello"',
scan_freq="2412", wait_connect=False)
dev[0].connect("osen", proto="OSEN", key_mgmt="OSEN", pairwise="CCMP",
group="GTK_NOT_USED",
eap="WFA-UNAUTH-TLS", identity="[email protected]",
ca_cert="auth_serv/ca.pem",
scan_freq="2412")
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5", drv_params="force_connect_cmd=1")
wpas.connect("osen", proto="OSEN", key_mgmt="OSEN", pairwise="CCMP",
group="GTK_NOT_USED",
eap="WFA-UNAUTH-TLS", identity="[email protected]",
ca_cert="auth_serv/ca.pem",
scan_freq="2412")
wpas.request("DISCONNECT")
def test_ap_hs20_network_preference(dev, apdev):
"""Hotspot 2.0 network selection with preferred home network"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].hs20_enable()
values = { 'realm': "example.com",
'username': "hs20-test",
'password': "password",
'domain': "example.com" }
dev[0].add_cred_values(values)
id = dev[0].add_network()
dev[0].set_network_quoted(id, "ssid", "home")
dev[0].set_network_quoted(id, "psk", "12345678")
dev[0].set_network(id, "priority", "1")
dev[0].request("ENABLE_NETWORK %s no-connect" % id)
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Connection timed out")
if bssid not in ev:
raise Exception("Unexpected network selected")
bssid2 = apdev[1]['bssid']
params = hostapd.wpa2_params(ssid="home", passphrase="12345678")
hostapd.add_ap(apdev[1]['ifname'], params)
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED",
"INTERWORKING-ALREADY-CONNECTED" ], timeout=15)
if ev is None:
raise Exception("Connection timed out")
if "INTERWORKING-ALREADY-CONNECTED" in ev:
raise Exception("No roam to higher priority network")
if bssid2 not in ev:
raise Exception("Unexpected network selected")
def test_ap_hs20_network_preference2(dev, apdev):
"""Hotspot 2.0 network selection with preferred credential"""
bssid2 = apdev[1]['bssid']
params = hostapd.wpa2_params(ssid="home", passphrase="12345678")
hostapd.add_ap(apdev[1]['ifname'], params)
dev[0].hs20_enable()
values = { 'realm': "example.com",
'username': "hs20-test",
'password': "password",
'domain': "example.com",
'priority': "1" }
dev[0].add_cred_values(values)
id = dev[0].add_network()
dev[0].set_network_quoted(id, "ssid", "home")
dev[0].set_network_quoted(id, "psk", "12345678")
dev[0].request("ENABLE_NETWORK %s no-connect" % id)
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Connection timed out")
if bssid2 not in ev:
raise Exception("Unexpected network selected")
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED",
"INTERWORKING-ALREADY-CONNECTED" ], timeout=15)
if ev is None:
raise Exception("Connection timed out")
if "INTERWORKING-ALREADY-CONNECTED" in ev:
raise Exception("No roam to higher priority network")
if bssid not in ev:
raise Exception("Unexpected network selected")
def test_ap_hs20_network_preference3(dev, apdev):
"""Hotspot 2.0 network selection with two credential (one preferred)"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0]['ifname'], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20b")
params['nai_realm'] = "0,example.org,13[5:6],21[2:4][5:7]"
hostapd.add_ap(apdev[1]['ifname'], params)
dev[0].hs20_enable()
values = { 'realm': "example.com",
'username': "hs20-test",
'password': "password",
'priority': "1" }
dev[0].add_cred_values(values)
values = { 'realm': "example.org",
'username': "hs20-test",
'password': "password" }
id = dev[0].add_cred_values(values)
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Connection timed out")
if bssid not in ev:
raise Exception("Unexpected network selected")
dev[0].set_cred(id, "priority", "2")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED",
"INTERWORKING-ALREADY-CONNECTED" ], timeout=15)
if ev is None:
raise Exception("Connection timed out")
if "INTERWORKING-ALREADY-CONNECTED" in ev:
raise Exception("No roam to higher priority network")
if bssid2 not in ev:
raise Exception("Unexpected network selected")
def test_ap_hs20_network_preference4(dev, apdev):
"""Hotspot 2.0 network selection with username vs. SIM credential"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
hostapd.add_ap(apdev[0]['ifname'], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20b")
params['hessid'] = bssid2
params['anqp_3gpp_cell_net'] = "555,444"
params['domain_name'] = "wlan.mnc444.mcc555.3gppnetwork.org"
hostapd.add_ap(apdev[1]['ifname'], params)
dev[0].hs20_enable()
values = { 'realm': "example.com",
'username': "hs20-test",
'password': "password",
'priority': "1" }
dev[0].add_cred_values(values)
values = { 'imsi': "555444-333222111",
'eap': "SIM",
'milenage': "5122250214c33e723a5dd523fc145fc0:981d464c7c52eb6e5036234984ad0bcf:000000000123" }
id = dev[0].add_cred_values(values)
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=15)
if ev is None:
raise Exception("Connection timed out")
if bssid not in ev:
raise Exception("Unexpected network selected")
dev[0].set_cred(id, "priority", "2")
dev[0].request("INTERWORKING_SELECT auto freq=2412")
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED",
"INTERWORKING-ALREADY-CONNECTED" ], timeout=15)
if ev is None:
raise Exception("Connection timed out")
if "INTERWORKING-ALREADY-CONNECTED" in ev:
raise Exception("No roam to higher priority network")
if bssid2 not in ev:
raise Exception("Unexpected network selected")
def test_ap_hs20_fetch_osu(dev, apdev):
"""Hotspot 2.0 OSU provider and icon fetch"""
bssid = apdev[0]['bssid']
params = hs20_ap_params()
params['hs20_icon'] = "128:80:zxx:image/png:w1fi_logo:w1fi_logo.png"
params['osu_ssid'] = '"HS 2.0 OSU open"'
params['osu_method_list'] = "1"
params['osu_friendly_name'] = [ "eng:Test OSU", "fin:Testi-OSU" ]
params['osu_icon'] = "w1fi_logo"
params['osu_service_desc'] = [ "eng:Example services", "fin:Esimerkkipalveluja" ]
params['osu_server_uri'] = "https://example.com/osu/"
hostapd.add_ap(apdev[0]['ifname'], params)
bssid2 = apdev[1]['bssid']
params = hs20_ap_params(ssid="test-hs20b")
params['hessid'] = bssid2
params['hs20_icon'] = "128:80:zxx:image/png:w1fi_logo:w1fi_logo.png"
params['osu_ssid'] = '"HS 2.0 OSU OSEN"'
params['osu_method_list'] = "0"
params['osu_nai'] = "[email protected]"
params['osu_friendly_name'] = [ "eng:Test2 OSU", "fin:Testi2-OSU" ]
params['osu_icon'] = "w1fi_logo"
params['osu_service_desc'] = [ "eng:Example services2", "fin:Esimerkkipalveluja2" ]
params['osu_server_uri'] = "https://example.org/osu/"
hostapd.add_ap(apdev[1]['ifname'], params)
with open("w1fi_logo.png", "r") as f:
orig_logo = f.read()
dev[0].hs20_enable()
dir = "/tmp/osu-fetch"
if os.path.isdir(dir):
files = [ f for f in os.listdir(dir) if f.startswith("osu-") ]
for f in files:
os.remove(dir + "/" + f)
else:
try:
os.makedirs(dir)
except:
pass
try:
dev[0].request("SET osu_dir " + dir)
dev[0].request("FETCH_OSU")
icons = 0
while True:
ev = dev[0].wait_event(["OSU provider fetch completed",
"RX-HS20-ANQP-ICON"], timeout=15)
if ev is None:
raise Exception("Timeout on OSU fetch")
if "OSU provider fetch completed" in ev:
break
if "RX-HS20-ANQP-ICON" in ev:
with open(ev.split(' ')[1], "r") as f:
logo = f.read()
if logo == orig_logo:
icons += 1
with open(dir + "/osu-providers.txt", "r") as f:
prov = f.read()
if "OSU-PROVIDER " + bssid not in prov:
raise Exception("Missing OSU_PROVIDER")
if "OSU-PROVIDER " + bssid2 not in prov:
raise Exception("Missing OSU_PROVIDER")
finally:
files = [ f for f in os.listdir(dir) if f.startswith("osu-") ]
for f in files:
os.remove(dir + "/" + f)
os.rmdir(dir)
if icons != 2:
raise Exception("Unexpected number of icons fetched")
| [
"[email protected]"
]
| |
ab64c7ade9c0e6f20ab29492761f9cef7461b56b | 155b6c640dc427590737750fe39542a31eda2aa4 | /api-test/hmpt/test/test_009_web_Template.py | 90b6e1ae1a7cb06aa0b98c4140eba1a39a8ff34d | []
| no_license | RomySaber/api-test | d4b3add00e7e5ed70a5c72bb38dc010f67bbd981 | 028c9f7fe0d321db2af7f1cb936c403194db850c | refs/heads/master | 2022-10-09T18:42:43.352325 | 2020-06-11T07:00:04 | 2020-06-11T07:00:04 | 271,468,744 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,731 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Time :2019-06-05 上午 11:25
@Author : 罗林
@File : test_009_web_Template.py
@desc : 进件配置流程自动化测试用例
"""
import json
from faker import Faker
from common.myCommon import Assertion
from common.myCommon.TestBaseCase import TestBaseCase
from hmpt.query import xqkj_query
from hmpt.testAction import WebAction
from hmpt.testAction import loginAction
global_dict = loginAction.global_dict
fake = Faker("zh_CN")
template_name = loginAction.sign + fake.name_male()
del_template_name = loginAction.sign + fake.name_male()
class test_009_web_Template(TestBaseCase):
def test_001_api_78dk_platform_tm_incoming_addOrEditTemplate_none_name(self):
"""
Time :2019-06-05
author : 罗林
desc : 添加或者编辑进件模板(新)名称为空
"""
global sysdata
sysdata = xqkj_query.get_sysdata()
rs = WebAction.test_api_78dk_platform_tm_incoming_addOrEditTemplate(
remark='', sysdata=sysdata, templatename='', templatetype='template_type_incoming_parts',
producttemplateuuid='')
Assertion.verity(json.loads(rs)['code'], "20000")
Assertion.verity(json.loads(rs)['msg'], "TemplateName 不能为空!")
def test_002_api_78dk_platform_tm_incoming_addOrEditTemplate_256_name(self):
"""
Time :2019-06-05
author : 罗林
desc : 添加或者编辑进件模板(新)名称长度256
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_addOrEditTemplate(
remark='', sysdata=sysdata, templatename=''.join(fake.words(nb=128)),
templatetype='template_type_incoming_parts', producttemplateuuid='')
Assertion.verity(json.loads(rs)['code'], "20000")
Assertion.verity(json.loads(rs)['msg'], "添加或编辑进件模板发生错误!")
def test_003_api_78dk_platform_tm_incoming_addOrEditTemplate_none_sysdata(self):
"""
Time :2019-06-05
author : 罗林
desc : 添加或者编辑进件模板(新)
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_addOrEditTemplate(
remark='', sysdata='', templatename=template_name, templatetype='template_type_incoming_parts',
producttemplateuuid='')
Assertion.verity(json.loads(rs)['code'], "20000")
Assertion.verity(json.loads(rs)['msg'], "系统发生内部异常,请稍候再试")
def test_004_api_78dk_platform_tm_incoming_addOrEditTemplate(self):
"""
Time :2019-06-05
author : 罗林
desc : 添加或者编辑进件模板(新)
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_addOrEditTemplate(
remark='', sysdata=sysdata, templatename=template_name, templatetype='template_type_incoming_parts',
producttemplateuuid='')
Assertion.verity(json.loads(rs)['code'], "10000")
Assertion.verity(json.loads(rs)['msg'], "成功")
Assertion.verityNotNone(json.loads(rs)['data']['productTemplateUuid'])
global productTemplateUuid
productTemplateUuid = json.loads(rs)['data']['productTemplateUuid']
loginAction.global_dict.set(productTemplateUuid=productTemplateUuid)
def test_005_api_78dk_platform_tm_incoming_templateList_name_none(self):
"""
Time :2019-06-05
author : 罗林
desc : 模板查询列表
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_templateList(name='', pagecurrent=1, pagesize=10)
Assertion.verity(json.loads(rs)['code'], "10000")
Assertion.verity(json.loads(rs)['msg'], "成功")
Assertion.verityNotNone(json.loads(rs)['data'])
Assertion.verityContain(json.loads(rs)['data'], 'pageCurrent')
Assertion.verityContain(json.loads(rs)['data'], 'dataList')
Assertion.verityContain(json.loads(rs)['data'], 'pageSize')
def test_006_api_78dk_platform_tm_incoming_templateList_name_not_exits(self):
"""
Time :2019-06-05
author : 罗林
desc : 模板查询列表
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_templateList(
name=''.join(fake.words(nb=128)), pagecurrent=1, pagesize=10)
Assertion.verity(json.loads(rs)['code'], "10000")
Assertion.verity(json.loads(rs)['msg'], "成功")
Assertion.verityNotNone(json.loads(rs)['data'])
Assertion.verityContain(json.loads(rs)['data'], 'dataList')
def test_007_api_78dk_platform_tm_incoming_templateList(self):
"""
Time :2019-06-05
author : 罗林
desc : 模板查询列表
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_templateList(
name=template_name, pagecurrent=1, pagesize=10)
Assertion.verity(json.loads(rs)['code'], "10000")
Assertion.verity(json.loads(rs)['msg'], "成功")
Assertion.verityNotNone(json.loads(rs)['data'])
Assertion.verityContain(json.loads(rs)['data'], 'pageCurrent')
Assertion.verityContain(json.loads(rs)['data'], 'dataList')
Assertion.verityContain(json.loads(rs)['data'], 'pageSize')
Assertion.verityNotNone(json.loads(rs)['data']['dataList'])
Assertion.verityContain(json.loads(rs)['data']['dataList'], 'created')
Assertion.verity(json.loads(rs)['data']['dataList'][0]['templateName'], template_name)
Assertion.verity(json.loads(rs)['data']['dataList'][0]['templateType'], 'template_type_incoming_parts')
Assertion.verity(json.loads(rs)['data']['dataList'][0]['productTemplateUuid'], productTemplateUuid)
def test_008_api_78dk_platform_tm_incoming_addOrEditTemplate_edit_none_name(self):
"""
Time :2019-06-05
author : 罗林
desc : 添加或者编辑进件模板(新)名称为空
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_addOrEditTemplate(
remark='', sysdata=sysdata, templatename='', templatetype='template_type_incoming_parts',
producttemplateuuid=productTemplateUuid)
Assertion.verity(json.loads(rs)['code'], "20000")
Assertion.verity(json.loads(rs)['msg'], "TemplateName 不能为空!")
def test_009_api_78dk_platform_tm_incoming_addOrEditTemplate_edit_256_name(self):
"""
Time :2019-06-05
author : 罗林
desc : 添加或者编辑进件模板(新)名称长度256
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_addOrEditTemplate(
remark='', sysdata=sysdata, templatename=''.join(fake.words(nb=128)),
templatetype='template_type_incoming_parts', producttemplateuuid=productTemplateUuid)
Assertion.verity(json.loads(rs)['code'], "20000")
Assertion.verity(json.loads(rs)['msg'], "添加或编辑进件模板发生错误!")
def test_010_api_78dk_platform_tm_incoming_addOrEditTemplate_edit_none_sysdata(self):
"""
Time :2019-06-05
author : 罗林
desc : 添加或者编辑进件模板(新)
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_addOrEditTemplate(
remark='', sysdata='', templatename=template_name, templatetype='template_type_incoming_parts',
producttemplateuuid=productTemplateUuid)
Assertion.verity(json.loads(rs)['code'], "20000")
Assertion.verity(json.loads(rs)['msg'], "系统发生内部异常,请稍候再试")
def test_011_api_78dk_platform_tm_incoming_addOrEditTemplate_edit(self):
"""
Time :2019-06-05
author : 罗林
desc : 添加或者编辑进件模板(新)
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_addOrEditTemplate(
remark='', sysdata=sysdata, templatename=template_name, templatetype='template_type_incoming_parts',
producttemplateuuid=productTemplateUuid)
Assertion.verity(json.loads(rs)['code'], "10000")
Assertion.verity(json.loads(rs)['msg'], "成功")
Assertion.verityNotNone(json.loads(rs)['data'])
global productTemplateUuid1
productTemplateUuid1 = json.loads(rs)['data']['productTemplateUuid']
def test_012_api_78dk_platform_tm_incoming_templateDetails_none(self):
"""
Time :2019-06-05
author : 罗林
desc : 模板详情查询
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_templateDetails('')
Assertion.verity(json.loads(rs)['code'], "20000")
Assertion.verity(json.loads(rs)['msg'], "ProductTemplateUuid 不能为空!")
def test_013_api_78dk_platform_tm_incoming_templateDetails_not_exits(self):
"""
Time :2019-06-05
author : 罗林
desc : 模板详情查询
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_templateDetails(fake.ean8())
Assertion.verity(json.loads(rs)['code'], "20000")
Assertion.verity(json.loads(rs)['msg'], "ProductTemplateUuid 不合法!")
def test_014_api_78dk_platform_tm_incoming_templateDetails(self):
"""
Time :2019-06-05
author : 罗林
desc : 模板详情查询
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_templateDetails(productTemplateUuid1)
Assertion.verity(json.loads(rs)['code'], "10000")
Assertion.verity(json.loads(rs)['msg'], "成功")
Assertion.verityNotNone(json.loads(rs)['data'])
Assertion.verityContain(json.loads(rs)['data'], 'created')
Assertion.verityNotNone(json.loads(rs)['data']['sysData'])
Assertion.verityContain(json.loads(rs)['data']['sysData'], 'id')
Assertion.verityContain(json.loads(rs)['data']['sysData'], 'parentId')
Assertion.verityContain(json.loads(rs)['data']['sysData'], 'typeName')
Assertion.verity(json.loads(rs)['data']['productTemplateUuid'], productTemplateUuid1)
Assertion.verity(json.loads(rs)['data']['remark'], '')
Assertion.verity(json.loads(rs)['data']['templateName'], template_name)
Assertion.verity(json.loads(rs)['data']['templateType'], 'template_type_incoming_parts')
def test_015_api_78dk_platform_tm_incoming_findTemplateDictionaries(self):
"""
Time :2019-06-05
author : 罗林
desc : 查询所有模板配置字典
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_findTemplateDictionaries()
Assertion.verity(json.loads(rs)['code'], "10000")
Assertion.verity(json.loads(rs)['msg'], "成功")
Assertion.verityNotNone(json.loads(rs)['data'])
Assertion.verityContain(json.loads(rs)['data'], 'id')
Assertion.verityContain(json.loads(rs)['data'], 'parentId')
Assertion.verityContain(json.loads(rs)['data'], 'typeName')
def test_016_api_78dk_platform_tm_incoming_findProductByTemplate_none(self):
"""
Time :2019-06-05
author : 罗林
desc : 查询模板关联产品
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_findProductByTemplate(
pagecurrent=1, pagesize=10, producttemplateuuid='')
Assertion.verity(json.loads(rs)['code'], "20000")
Assertion.verity(json.loads(rs)['msg'], "ProductTemplateUuid 不能为空!")
def test_017_api_78dk_platform_tm_incoming_findProductByTemplate_not_exits(self):
"""
Time :2019-06-05
author : 罗林
desc : 查询模板关联产品
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_findProductByTemplate(
pagecurrent=1, pagesize=10, producttemplateuuid=fake.ean8())
Assertion.verity(json.loads(rs)['code'], "20000")
Assertion.verity(json.loads(rs)['msg'], "ProductTemplateUuid 不合法!")
def test_018_api_78dk_platform_tm_incoming_findProductByTemplate(self):
"""
Time :2019-06-05
author : 罗林
desc : 查询模板关联产品
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_findProductByTemplate(
pagecurrent=1, pagesize=10, producttemplateuuid=productTemplateUuid1)
Assertion.verity(json.loads(rs)['code'], "10000")
Assertion.verity(json.loads(rs)['msg'], "成功")
Assertion.verityContain(json.loads(rs)['data'], 'dataList')
def test_019_api_78dk_platform_tm_incoming_delTemplate_none(self):
"""
Time :2019-06-05
author : 罗林
desc : 删除进件模板
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_delTemplate(producttemplateuuid='')
Assertion.verity(json.loads(rs)['code'], "20000")
Assertion.verity(json.loads(rs)['msg'], "productTemplateUuid 不能为空!")
def test_020_api_78dk_platform_tm_incoming_delTemplate_not_exits(self):
"""
Time :2019-06-05
author : 罗林
desc : 删除进件模板
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_delTemplate(producttemplateuuid=fake.ean8())
Assertion.verity(json.loads(rs)['code'], "20000")
Assertion.verity(json.loads(rs)['msg'], "productTemplateUuid 不合法!")
def test_021_api_78dk_platform_tm_incoming_addOrEditTemplate_two(self):
"""
Time :2019-06-05
author : 罗林
desc : 添加或者编辑进件模板(新)
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_addOrEditTemplate(
remark='', sysdata=sysdata, templatename=del_template_name, templatetype='template_type_incoming_parts',
producttemplateuuid='')
Assertion.verity(json.loads(rs)['code'], "10000")
Assertion.verity(json.loads(rs)['msg'], "成功")
Assertion.verityNotNone(json.loads(rs)['data']['productTemplateUuid'])
global del_template_id
del_template_id = json.loads(rs)['data']['productTemplateUuid']
def test_022_api_78dk_platform_tm_incoming_templateList_two(self):
"""
Time :2019-06-05
author : 罗林
desc : 模板查询列表
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_templateList(
name=del_template_name, pagecurrent=1, pagesize=10)
Assertion.verityNotNone(json.loads(rs)['data'])
Assertion.verityContain(json.loads(rs)['data'], 'pageCurrent')
Assertion.verityContain(json.loads(rs)['data'], 'dataList')
Assertion.verityContain(json.loads(rs)['data'], 'pageSize')
Assertion.verityNotNone(json.loads(rs)['data']['dataList'])
Assertion.verityContain(json.loads(rs)['data']['dataList'], 'created')
Assertion.verityContain(json.loads(rs)['data']['dataList'], 'productTemplateUuid')
Assertion.verity(json.loads(rs)['data']['dataList'][0]['templateName'], del_template_name)
Assertion.verity(json.loads(rs)['data']['dataList'][0]['templateType'], 'template_type_incoming_parts')
Assertion.verity(json.loads(rs)['data']['dataList'][0]['productTemplateUuid'], del_template_id)
def test_023_api_78dk_platform_tm_incoming_delTemplate_none(self):
"""
Time :2019-06-05
author : 罗林
desc : 删除进件模板
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_delTemplate(producttemplateuuid='')
Assertion.verity(json.loads(rs)['code'], "20000")
Assertion.verity(json.loads(rs)['msg'], "productTemplateUuid 不能为空!")
def test_024_api_78dk_platform_tm_incoming_delTemplate_not_exits(self):
"""
Time :2019-06-05
author : 罗林
desc : 删除进件模板
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_delTemplate(producttemplateuuid=fake.ean8())
Assertion.verity(json.loads(rs)['code'], "20000")
Assertion.verity(json.loads(rs)['msg'], "productTemplateUuid 不合法!")
def test_025_api_78dk_platform_tm_incoming_delTemplate(self):
"""
Time :2019-06-05
author : 罗林
desc : 删除进件模板
"""
rs = WebAction.test_api_78dk_platform_tm_incoming_delTemplate(producttemplateuuid=del_template_id)
Assertion.verity(json.loads(rs)['code'], "10000")
Assertion.verity(json.loads(rs)['msg'], "成功")
| [
"[email protected]"
]
| |
71f9fd478ec198cda1f30d14323c68bd03250659 | ba916d93dfb8074241b0ea1f39997cb028509240 | /kickstart/2020/RoundD/record_breaker.py | 270cc231f01a90a63cf18de06cdfa30e3bce0042 | []
| no_license | satojkovic/algorithms | ecc1589898c61d2eef562093d3d2a9a2d127faa8 | f666b215bc9bbdab2d2257c83ff1ee2c31c6ff8e | refs/heads/master | 2023-09-06T08:17:08.712555 | 2023-08-31T14:19:01 | 2023-08-31T14:19:01 | 169,414,662 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 472 | py | T = int(input())
for t in range(1, T + 1):
N = int(input())
visitors = list(map(int, input().split(' ')))
prev_record = 0
res = 0
for i in range(N):
greater_than_prev = i == 0 or visitors[i] > prev_record
greater_than_next = i == N - 1 or visitors[i] > visitors[i + 1]
res = res + 1 if greater_than_prev and greater_than_next else res
prev_record = max(prev_record, visitors[i])
print('Case #{}: {}'.format(t, res))
| [
"[email protected]"
]
| |
a2e53cd61b75aa7768b090cc1df2d8557dfba982 | 45eb1b25bf72d7c88a57fec5bb4bc5336c04f5ba | /reckon/loc.py | 7f151997499dd7b720cb8586a16c4597bc41d887 | [
"MIT"
]
| permissive | seandstewart/reckon | b10faece45e4c1ede5fa1c7e416179e7d1e68142 | ddddb2b0d881e1226075d9eefdcef580826da750 | refs/heads/master | 2021-06-21T06:38:33.649537 | 2021-03-06T21:28:32 | 2021-03-06T21:28:32 | 193,951,846 | 2 | 0 | MIT | 2021-03-06T21:28:33 | 2019-06-26T17:44:43 | Python | UTF-8 | Python | false | false | 1,632 | py | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
import collections
import threading
from typing import Callable
try:
from reprlib import repr
except ImportError:
pass
from . import protos
class LocalCache(protos.ProtoCache):
"""A Localized cache.
Can be implemented as a globalized cache by initializing at the top-level of a module.
"""
def __init__(
self,
*,
target_usage: float = None,
strategy: protos.CacheStrategy = protos.CacheStrategy.DYN
):
self._lock = threading.RLock()
with self._lock:
self.TARGET_RATIO = (
target_usage if target_usage is not None else self.TARGET_RATIO
)
self._cache = dict()
self._locks = collections.defaultdict(threading.RLock)
self._hits = 0
self._misses = 0
self.strategy = strategy
__getitem__ = protos.cache_getitem
get = protos.cache_get
keys = protos.cache_keys
values = protos.cache_values
items = protos.cache_items
info = protos.cache_info
clear = protos.clear_cache
size = protos.cache_size
usage = protos.memory_usage_ratio
memoize = protos.memoize
set_target_usage = protos.set_target_memory_use_ratio
# Assigned on init.
shrink = protos.shrink
def memoize(
_func: Callable = None,
*,
target_usage: float = LocalCache.TARGET_RATIO,
strategy: protos.CacheStrategy = protos.CacheStrategy.DYN
) -> Callable:
cache = LocalCache(target_usage=target_usage, strategy=strategy)
return cache.memoize(_func) if _func else cache.memoize
| [
"[email protected]"
]
| |
774ff8553c11fe3a6fd04dbb2d0f46c015f512b3 | 97be97cfc56fb2170b60b91063dbfe5f1449e3c0 | /python/ABC179/B.py | 53752a55a98e2fa669ae9c197b305f57a1d95a7f | []
| no_license | iWonder118/atcoder | 73d965a0a9ade189733808e47634f2b7776aad4b | 3ab7271e838a2903ff0e07f94015ef13c59577e1 | refs/heads/master | 2022-01-25T10:10:55.007340 | 2021-12-31T14:04:54 | 2021-12-31T14:04:54 | 245,155,997 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 272 | py | n = int(input())
results = [list(map(int, input().split())) for _ in range(n)]
ans = 0
for i in range(n):
if ans >= 3:
break
if results[i][0] == results[i][1]:
ans += 1
else:
ans = 0
if ans >= 3:
print("Yes")
else:
print("No")
| [
"[email protected]"
]
| |
b4b369b2625b316d54996745d9eab2a7ccae7b52 | 73145f3548feb0812dde986242773f7d446e487f | /tests/tests.py | 9da95dc88c2c111c8459f6c5975e43edaa44c135 | [
"BSD-3-Clause"
]
| permissive | lookup/django-redis-sessions | 9d4c31d71d1fb5d552b702e961066848e6443b9f | c9a1d3712d59d0fc972c9463e7718f7202cab41b | refs/heads/master | 2021-01-17T23:28:53.249603 | 2013-03-11T22:40:33 | 2013-03-11T22:40:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,878 | py | import time
from nose.tools import eq_
from django.utils.importlib import import_module
from django.conf import settings
redis_session = import_module(settings.SESSION_ENGINE).SessionStore()
def test_modify_and_keys():
eq_(redis_session.modified, False)
redis_session['test'] = 'test_me'
eq_(redis_session.modified, True)
eq_(redis_session['test'], 'test_me')
def test_save_and_delete():
redis_session['key'] = 'value'
redis_session.save()
eq_(redis_session.exists(redis_session.session_key), True)
redis_session.delete(redis_session.session_key)
eq_(redis_session.exists(redis_session.session_key), False)
def test_flush():
redis_session['key'] = 'another_value'
redis_session.save()
key = redis_session.session_key
redis_session.flush()
eq_(redis_session.exists(key), False)
def test_items():
redis_session['item1'], redis_session['item2'] = 1, 2
redis_session.save()
# Python 3.* fix
eq_(sorted(list(redis_session.items())), [('item1', 1), ('item2', 2)])
def test_expiry():
redis_session.set_expiry(1)
# Test if the expiry age is set correctly
eq_(redis_session.get_expiry_age(), 1)
redis_session['key'] = 'expiring_value'
redis_session.save()
key = redis_session.session_key
eq_(redis_session.exists(key), True)
time.sleep(2)
eq_(redis_session.exists(key), False)
def test_save_and_load():
redis_session.set_expiry(60)
redis_session.setdefault('item_test', 8)
redis_session.save()
session_data = redis_session.load()
eq_(session_data.get('item_test'), 8)
# def test_load():
# redis_session.set_expiry(60)
# redis_session['item1'], redis_session['item2'] = 1,2
# redis_session.save()
# session_data = redis_session.server.get(redis_session.session_key)
# expiry, data = int(session_data[:15]), session_data[15:]
| [
"[email protected]"
]
| |
b13efcc972876375f804eb0816b1849ab2f0fd26 | 00ef8e1eb57b73427508b20aadf0266da6b1f900 | /examples/gdev/any.py | 7e9c6d05c4ea3e8b94d091aebc5b144a1fdb9e4d | []
| no_license | amy12xx/rl-toolkit | f4643935cc8afd960356bfeae74c233d2596dea9 | 8254df8346752ea0226ae2064cc1eabc839567b0 | refs/heads/master | 2023-08-14T00:56:52.270642 | 2021-09-28T15:59:32 | 2021-09-28T15:59:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,167 | py | import sys
sys.path.insert(0, './')
import os
import os.path as osp
import torch
import numpy as np
import argparse
import string
import random
import datetime
from garage import wrap_experiment
from garage.experiment.deterministic import set_seed
from garage.torch.algos import PPO
from garage.torch.policies import GaussianMLPPolicy
from garage.torch.value_functions import GaussianMLPValueFunction
from garage.trainer import Trainer
import pybullet_envs # noqa: F401 # pylint: disable=unused-import
import pybulletgym
from garage.torch import set_gpu_mode
from garage.sampler import LocalSampler
from garage.sampler import VecWorker
from garage.sampler import DefaultWorker
from garage.sampler import MultiprocessingSampler
from garage.envs import GymEnv, normalize
from garage.torch.algos import SAC
from garage.torch.policies import TanhGaussianMLPPolicy
from garage.torch.q_functions import ContinuousMLPQFunction
from garage.replay_buffer import PathBuffer
from garage.torch.optimizers import OptimizerWrapper
from torch import nn
from torch.nn import functional as F
from rlf.garage.auto_arg import convert_to_args, convert_kwargs
from rlf.args import str2bool
from rlf.exp_mgr import config_mgr
from dowel import logger
from rlf.garage.wb_logger import WbOutput
from rlf.garage.std_logger import StdLogger
def setup_def_parser():
parser = argparse.ArgumentParser()
parser.add_argument('--seed', type=int, default=1)
parser.add_argument('--n-epochs', type=int, default=1000)
parser.add_argument('--batch-size', type=int, default=1000)
parser.add_argument('--hidden-dim', type=int, default=256)
parser.add_argument('--log-interval', type=int, default=10)
parser.add_argument('--depth', type=int, default=2)
parser.add_argument('--env-name', type=str, required=True)
parser.add_argument('--prefix', type=str, default='debug')
parser.add_argument('--env-norm', type=str2bool, default=False)
parser.add_argument('--cuda', type=str2bool, default=False)
parser.add_argument('--no-wb', action='store_true', default=False)
parser.add_argument('--alg', type=str, required=True)
return parser
def ppo_args(parser):
convert_to_args(PPO, parser)
parser.add_argument('--policy-lr', type=float, default=3e-4)
parser.add_argument('--vf-lr', type=float, default=3e-4)
parser.add_argument('--n-minibatches', type=float, default=10)
parser.add_argument('--minibatch-size', type=float, default=None)
def ppo_setup(env, trainer, args):
policy = GaussianMLPPolicy(env.spec,
hidden_sizes=[args.hidden_dim]*args.depth,
hidden_nonlinearity=torch.tanh,
output_nonlinearity=None)
value_function = GaussianMLPValueFunction(env_spec=env.spec,
hidden_sizes=[args.hidden_dim]*args.depth,
hidden_nonlinearity=torch.tanh,
output_nonlinearity=None)
algo = PPO(env_spec=env.spec,
policy=policy,
value_function=value_function,
policy_optimizer=OptimizerWrapper(
(torch.optim.Adam, dict(lr=args.policy_lr)),
policy,
max_optimization_epochs=args.n_minibatches,
minibatch_size=args.minibatch_size),
vf_optimizer=OptimizerWrapper(
(torch.optim.Adam, dict(lr=args.vf_lr)),
value_function,
max_optimization_epochs=args.n_minibatches,
minibatch_size=args.minibatch_size),
**convert_kwargs(args, PPO))
trainer.setup(algo, env, sampler_cls=LocalSampler, worker_class=VecWorker,
worker_args={'n_envs': 8})
return algo
def sac_args(parser):
convert_to_args(SAC, parser)
parser.add_argument('--buffer-size', type=float, default=1e6)
parser.add_argument('--gradient-steps-per-itr', type=int, default=1000)
def sac_setup(env, trainer, args):
policy = TanhGaussianMLPPolicy(
env_spec=env.spec,
hidden_sizes=[args.hidden_dim]*args.depth,
hidden_nonlinearity=nn.ReLU,
output_nonlinearity=None,
min_std=np.exp(-20.),
max_std=np.exp(2.),
)
qf1 = ContinuousMLPQFunction(env_spec=env.spec,
hidden_sizes=[args.hidden_dim]*args.depth,
hidden_nonlinearity=F.relu)
qf2 = ContinuousMLPQFunction(env_spec=env.spec,
hidden_sizes=[args.hidden_dim]*args.depth,
hidden_nonlinearity=F.relu)
replay_buffer = PathBuffer(capacity_in_transitions=int(args.buffer_size))
sac = SAC(env_spec=env.spec,
policy=policy,
qf1=qf1,
qf2=qf2,
**convert_kwargs(args, SAC))
trainer.setup(algo=sac, env=env, sampler_cls=LocalSampler)
return sac
USE_FNS = {
'ppo': (ppo_args, ppo_setup),
'sac': (sac_args, sac_setup),
}
def get_env_id(args):
upper_case = [c for c in args.env_name if c.isupper()]
if len(upper_case) == 0:
return ''.join([word[0] for word in args.env_name.split(".")])
else:
return ''.join(upper_case)
def create_prefix(args):
assert args.prefix is not None and args.prefix != '', 'Must specify a prefix'
d = datetime.datetime.today()
date_id = '%i%i' % (d.month, d.day)
env_id = get_env_id(args)
rnd_id = ''.join(random.sample(
string.ascii_uppercase + string.digits, k=2))
before = ('%s-%s-%s-%s-' %
(date_id, env_id, args.seed, rnd_id))
if args.prefix != 'debug' and args.prefix != 'NONE':
prefix = before + args.prefix
print('Assigning full prefix %s' % prefix)
else:
prefix = args.prefix
return prefix
def setup_launcher():
config_dir = osp.dirname(osp.realpath(__file__))
config_path = osp.join(config_dir, 'config.yaml')
config_mgr.init(config_path)
parser = setup_def_parser()
# First parse the regular args
base_args, _ = parser.parse_known_args()
get_args, get_algo = USE_FNS[base_args.alg]
use_prefix = create_prefix(base_args)
@wrap_experiment(archive_launch_repo=False, snapshot_mode='none', name=use_prefix)
def alg_train(ctxt=None):
get_args(parser)
args = parser.parse_args()
args.prefix = use_prefix
set_seed(args.seed)
env = GymEnv(args.env_name)
if args.env_norm:
env = normalize(env)
trainer = Trainer(ctxt)
logger.remove_all()
logger.add_output(StdLogger(args.log_interval))
if not args.no_wb:
wb_logger = WbOutput(args.log_interval, base_args)
logger.add_output(wb_logger)
algo = get_algo(env, trainer, args)
if args.cuda:
set_gpu_mode(True)
algo.to()
else:
set_gpu_mode(False)
trainer.train(n_epochs=args.n_epochs, batch_size=args.batch_size)
return alg_train
launcher = setup_launcher()
launcher()
| [
"[email protected]"
]
| |
19ffe8a397c4cba7402ef7600fb331973f268134 | 6b6e20004b46165595f35b5789e7426d5289ea48 | /release.py | a0439d9a8953d67b942543862779c0ee771d69e8 | [
"Apache-2.0"
]
| permissive | anwarchk/quay | 2a83d0ab65aff6a1120fbf3a45dd72f42211633b | 23c5120790c619174e7d36784ca5aab7f4eece5c | refs/heads/master | 2020-09-12T18:53:21.093606 | 2019-11-15T19:29:02 | 2019-11-15T19:29:02 | 222,517,145 | 0 | 0 | Apache-2.0 | 2019-11-18T18:32:35 | 2019-11-18T18:32:35 | null | UTF-8 | Python | false | false | 304 | py | import os
_GIT_HEAD_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'GIT_HEAD')
SERVICE = 'quay'
GIT_HEAD = None
REGION = os.environ.get('QUAY_REGION')
# Load git head if available
if os.path.isfile(_GIT_HEAD_PATH):
with open(_GIT_HEAD_PATH) as f:
GIT_HEAD = f.read().strip()
| [
"[email protected]"
]
| |
d75c9bd6b13a1685e997c9bff89e619dfbad9617 | 4b41a76c5c366ba2daa30843acea16609b8f5da7 | /2017/19/AoC17_19_2.py | 63d0bff264fc8eed2403e85ab2ba25522ce454cd | []
| no_license | grandfoosier/AdventOfCode | c4706cfefef61e80060cca89b0433636e42bf974 | a43fdd72fe4279196252f24a4894500a4e272a5d | refs/heads/master | 2020-06-11T12:36:48.699811 | 2019-01-14T23:44:44 | 2019-01-14T23:44:44 | 75,665,958 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,251 | py | import copy
class Packet(object):
def __init__(self):
fname = "AoC17_19_1.txt"
self.paths = [line.rstrip("\n") for line in open(fname)]
self.dir = 'D'
self.pos = (0, self.paths[0].find('|'))
self.markers = ''
self.steps = 1; self.last = copy.copy(self.pos)
def _turn(self):
(y, x) = self.pos; (b, a) = self.last
self.steps += abs(y - b) + abs(x - a)
self.last = copy.copy(self.pos)
try:
if self.dir != 'D' and self.paths[y-1][x] != ' ': return 'U'
except: pass
try:
if self.dir != 'L' and self.paths[y][x+1] != ' ': return 'R'
except: pass
try:
if self.dir != 'U' and self.paths[y+1][x] != ' ': return 'D'
except: pass
try:
if self.dir != 'R' and self.paths[y][x-1] != ' ': return 'L'
except: pass
return 'X'
def _check(self):
(y, x) = self.pos
try:
if self.dir == 'U' and self.paths[y-1][x] == ' ':
return self._turn()
except: return self._turn()
try:
if self.dir == 'R' and self.paths[y][x+1] == ' ':
return self._turn()
except: return self._turn()
try:
if self.dir == 'D' and self.paths[y+1][x] == ' ':
return self._turn()
except: return self._turn()
try:
if self.dir == 'L' and self.paths[y][x-1] == ' ':
return self._turn()
except: return self._turn()
return self.dir
def _move(self):
(y, x) = self.pos
if self.dir == 'U': self.pos = (y-1, x)
elif self.dir == 'R': self.pos = (y, x+1)
elif self.dir == 'D': self.pos = (y+1, x)
elif self.dir == 'L': self.pos = (y, x-1)
if self.paths[y][x] not in ['|','-','+']:
self.markers += self.paths[y][x]
def follow_path(self):
while self.dir != 'X':
self.dir = self._check()
self._move()
(y, x) = self.pos; (b, a) = self.last
self.steps += abs(y - b) + abs(x - a)
self.last = copy.copy(self.pos)
return self.steps
P = Packet()
print ""
print P.follow_path()
print "\n"
| [
"[email protected]"
]
| |
097a4a080d568e5c73ed850c08f8373fdda5d37a | 2923b9f58e6a143a3e070169612165585c301def | /high_f/la_habra_large_100120/test_small_awp_uni/make_receiver.py | b335a3279a0c6760c860769fdea74c0c36897b26 | []
| no_license | hzfmer/summit_work_021421 | 16536dd716519bc9244da60007b9061ef5403429 | 6981b359fefb2af22e0bea6c47511de16cad22bd | refs/heads/master | 2023-03-11T15:34:36.418971 | 2021-02-05T23:22:10 | 2021-02-05T23:22:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 37 | py | ../test_small_awp_dm/make_receiver.py | [
"[email protected]"
]
| |
a388e444b8e3f5e81960dffe79afb5a395d44c5c | a25e2aa102ffe9c2d9b553252a1882fe5a9d7ec9 | /SprityBird/spritybird/python3.5/lib/python3.5/site-packages/openpyxl/formatting/formatting.py | ca562b09e2a12ddee21fe8ca8c472413569d9744 | [
"MIT"
]
| permissive | MobileAnalytics/iPython-Framework | f96ebc776e763e6b4e60fb6ec26bb71e02cf6409 | da0e598308c067cd5c5290a6364b3ffaf2d2418f | refs/heads/master | 2020-03-22T06:49:29.022949 | 2018-07-04T04:22:17 | 2018-07-04T04:22:17 | 139,660,631 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,501 | py | from __future__ import absolute_import
# Copyright (c) 2010-2016 openpyxl
from openpyxl.compat import iteritems, OrderedDict, deprecated
from openpyxl.styles.differential import DifferentialStyle
from .rule import Rule
def unpack_rules(cfRules):
for key, rules in iteritems(cfRules):
for idx,rule in enumerate(rules):
yield (key, idx, rule.priority)
class ConditionalFormatting(object):
"""Conditional formatting rules."""
def __init__(self):
self.cf_rules = OrderedDict()
self.max_priority = 0
def add(self, range_string, cfRule):
"""Add a rule such as ColorScaleRule, FormulaRule or CellIsRule
The priority will be added automatically.
"""
if not isinstance(cfRule, Rule):
raise ValueError("Only instances of openpyxl.formatting.rule.Rule may be added")
rule = cfRule
self.max_priority += 1
rule.priority = self.max_priority
self.cf_rules.setdefault(range_string, []).append(rule)
def _fix_priorities(self):
rules = unpack_rules(self.cf_rules)
rules = sorted(rules, key=lambda x: x[2])
for idx, (key, rule_no, prio) in enumerate(rules, 1):
self.cf_rules[key][rule_no].priority = idx
self.max_priority = len(rules)
@deprecated("Always use Rule objects")
def update(self, cfRules):
pass
@deprecated("Conditionl Formats are saved automatically")
def setDxfStyles(self, wb):
pass
| [
"[email protected]"
]
| |
92258abe93cc42c815b9fcfd2422f11e6f2e3c37 | 62e58c051128baef9452e7e0eb0b5a83367add26 | /edifact/D09B/MOVINSD09BUN.py | 5baaa0f135f7639211744f4c0fe1978fec2ced42 | []
| no_license | dougvanhorn/bots-grammars | 2eb6c0a6b5231c14a6faf194b932aa614809076c | 09db18d9d9bd9d92cefbf00f1c0de1c590fe3d0d | refs/heads/master | 2021-05-16T12:55:58.022904 | 2019-05-17T15:22:23 | 2019-05-17T15:22:23 | 105,274,633 | 0 | 0 | null | 2017-09-29T13:21:21 | 2017-09-29T13:21:21 | null | UTF-8 | Python | false | false | 1,740 | py | #Generated by bots open source edi translator from UN-docs.
from bots.botsconfig import *
from edifact import syntax
from recordsD09BUN import recorddefs
structure = [
{ID: 'UNH', MIN: 1, MAX: 1, LEVEL: [
{ID: 'BGM', MIN: 1, MAX: 1},
{ID: 'DTM', MIN: 1, MAX: 1},
{ID: 'RFF', MIN: 0, MAX: 9, LEVEL: [
{ID: 'DTM', MIN: 0, MAX: 9},
]},
{ID: 'NAD', MIN: 0, MAX: 9, LEVEL: [
{ID: 'CTA', MIN: 0, MAX: 9, LEVEL: [
{ID: 'COM', MIN: 0, MAX: 9},
]},
]},
{ID: 'TDT', MIN: 1, MAX: 3, LEVEL: [
{ID: 'LOC', MIN: 1, MAX: 1},
{ID: 'DTM', MIN: 1, MAX: 99},
{ID: 'RFF', MIN: 0, MAX: 1},
{ID: 'FTX', MIN: 0, MAX: 1},
]},
{ID: 'HAN', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'LOC', MIN: 0, MAX: 99999, LEVEL: [
{ID: 'RFF', MIN: 1, MAX: 99},
{ID: 'FTX', MIN: 0, MAX: 9},
{ID: 'MEA', MIN: 0, MAX: 9},
{ID: 'DIM', MIN: 0, MAX: 9},
{ID: 'LOC', MIN: 0, MAX: 9},
{ID: 'NAD', MIN: 1, MAX: 99},
{ID: 'TMP', MIN: 0, MAX: 1, LEVEL: [
{ID: 'RNG', MIN: 0, MAX: 1},
]},
{ID: 'EQD', MIN: 0, MAX: 99, LEVEL: [
{ID: 'EQN', MIN: 0, MAX: 1},
]},
{ID: 'EQA', MIN: 0, MAX: 99, LEVEL: [
{ID: 'EQN', MIN: 0, MAX: 1},
]},
{ID: 'GID', MIN: 0, MAX: 9999, LEVEL: [
{ID: 'GDS', MIN: 0, MAX: 1},
]},
{ID: 'RFF', MIN: 0, MAX: 999, LEVEL: [
{ID: 'DGS', MIN: 1, MAX: 99, LEVEL: [
{ID: 'FTX', MIN: 0, MAX: 1},
]},
]},
]},
]},
{ID: 'UNT', MIN: 1, MAX: 1},
]},
]
| [
"[email protected]"
]
| |
f19254d602637fe894fafb4102e6d9b28bd124df | 19d83ef36909a6d830e2e41af05102b19186ebbd | /memory.py | 15a7965920fc4d8558bf68344a4db9f3ed4f597c | []
| no_license | ChrisProgramming2018/BTD3_Implemenation | 93637b2262b86e7ad19d048127d83da57c9c7508 | 9409cd472ca406c118a45ab60414a070f1a5f709 | refs/heads/master | 2023-01-06T15:30:56.171425 | 2020-10-28T06:58:06 | 2020-10-28T06:58:06 | 295,357,224 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,049 | py | import numpy as np
import random
class ReplayBuffer(object):
def __init__(self, max_size=1e6):
self.storage = []
self.max_size = max_size
self.ptr = 0
self.k = 0
def add(self, transition):
self.k += 1
if len(self.storage) == self.max_size:
self.storage[int(self.ptr)] = transition
self.ptr = (self.ptr + 1) % self.max_size
else:
self.storage.append(transition)
def sample(self, batch_size):
ind = np.random.randint(0, len(self.storage), size=batch_size)
batch_states, batch_next_states, batch_actions, batch_rewards, batch_dones = [], [], [], [], []
for i in ind:
state, next_state, action, reward, done = self.storage[i]
batch_states.append(np.array(state, copy=False))
batch_next_states.append(np.array(next_state, copy=False))
batch_actions.append(np.array(action, copy=False))
batch_rewards.append(np.array(reward, copy=False))
batch_dones.append(np.array(done, copy=False))
return np.array(batch_states), np.array(batch_next_states), batch_actions, np.array(batch_rewards).reshape(-1, 1), np.array(batch_dones).reshape(-1, 1)
def get_last_k_trajectories(self):
ind = [x for x in range(self.ptr - self.k, self.ptr)]
batch_states, batch_next_states, batch_actions, batch_rewards, batch_dones = [], [], [], [], []
for i in ind:
state, next_state, action, reward, done = self.storage[i]
batch_states.append(np.array(state, copy=False))
batch_next_states.append(np.array(next_state, copy=False))
batch_actions.append(np.array(action, copy=False))
batch_rewards.append(np.array(reward, copy=False))
batch_dones.append(np.array(done, copy=False))
return np.array(batch_states), np.array(batch_next_states), np.array(batch_actions), np.array(batch_rewards).reshape(-1, 1), np.array(batch_dones).reshape(-1, 1)
| [
"[email protected]"
]
| |
d0a55f6475bbbb29dcd260910386e57a71f8243f | 0316925e2bad29d60f0dcccdf91277fd8f03ef09 | /q034.py | 87da5609fd58d81457cd3687dc67218378f6df5f | []
| no_license | nomadlife/project-euler | 426614df7b2c9368a4db59954dc1df2902a44d6b | 9bc09843637a361fa93c7abb20ac990f973b08e5 | refs/heads/master | 2021-07-03T00:52:07.760948 | 2021-06-26T14:28:32 | 2021-06-26T14:28:32 | 100,214,096 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 520 | py | # Q034 Digit factorials, all,
import time
start = time.time()
print("expected calculation time : 2 min")
def factorial(num):
total=1
while num>1:
total = total * num
num-=1
return total
for i in range(1,10000000):
total=0
for j in str(i):
total += factorial(int(j))
if total == i:
print("loop:{} factorial_sum:{} True".format(i,total))
# maximum range proof
for i in range(1,9):
print("9"*i, factorial(9)*i)
print("Calculation time:",time.time()-start)
| [
"[email protected]"
]
| |
f2f503660cb416b4276951b9021d361ae00bc5f8 | a56a74b362b9263289aad96098bd0f7d798570a2 | /venv/bin/jupyter-serverextension | ee3c04b05b9c0eeb104333a5e6409bd21d07588f | [
"MIT"
]
| permissive | yoonkt200/ml-theory-python | 5812d06841d30e1068f6592b5730a40e87801313 | 7643136230fd4f291b6e3dbf9fa562c3737901a2 | refs/heads/master | 2022-12-21T14:53:21.624453 | 2021-02-02T09:33:07 | 2021-02-02T09:33:07 | 132,319,537 | 13 | 14 | MIT | 2022-12-19T17:23:57 | 2018-05-06T08:17:45 | Python | UTF-8 | Python | false | false | 283 | #!/Users/A202009066/Documents/private-github/ml-theory-python/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from notebook.serverextensions import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
]
| ||
99b67ae8ecb8a2e0cc8954c7eff9606a673895a9 | 2c4efe2ce49a900c68348f50e71802994c84900a | /braindecode/braindecode/venv1/Lib/site-packages/numba/cuda/tests/cudapy/test_record_dtype.py | 954560f9407b4775bb115ed272f6bfd9403570c9 | [
"BSD-3-Clause",
"BSD-2-Clause"
]
| permissive | sisi2/Masterthesis | b508632526e82b23c2efb34729141bfdae078fa0 | 7ce17644af47db4ad62764ed062840a10afe714d | refs/heads/master | 2022-11-19T15:21:28.272824 | 2018-08-13T15:02:20 | 2018-08-13T15:02:20 | 131,345,102 | 2 | 1 | null | 2022-11-15T14:08:07 | 2018-04-27T21:09:21 | Python | UTF-8 | Python | false | false | 8,240 | py | from __future__ import print_function, division, absolute_import
import sys
import numpy as np
from numba import cuda, numpy_support, types
from numba import unittest_support as unittest
from numba.cuda.testing import skip_on_cudasim, SerialMixin
def set_a(ary, i, v):
ary[i].a = v
def set_b(ary, i, v):
ary[i].b = v
def set_c(ary, i, v):
ary[i].c = v
def set_record(ary, i, j):
ary[i] = ary[j]
def record_set_a(r, v):
r.a = v
def record_set_b(r, v):
r.b = v
def record_set_c(r, v):
r.c = v
def record_read_a(r, arr):
arr[0] = r.a
def record_read_b(r, arr):
arr[0] = r.b
def record_read_c(r, arr):
arr[0] = r.c
def record_write_array(r):
r.g = 2
r.h[0] = 3.0
r.h[1] = 4.0
def record_write_2d_array(r):
r.i = 3
r.j[0, 0] = 5.0
r.j[0, 1] = 6.0
r.j[1, 0] = 7.0
r.j[1, 1] = 8.0
r.j[2, 0] = 9.0
r.j[2, 1] = 10.0
def record_read_array(r, a):
a[0] = r.h[0]
a[1] = r.h[1]
def record_read_2d_array(r, a):
a[0, 0] = r.j[0, 0]
a[0, 1] = r.j[0, 1]
a[1, 0] = r.j[1, 0]
a[1, 1] = r.j[1, 1]
a[2, 0] = r.j[2, 0]
a[2, 1] = r.j[2, 1]
recordtype = np.dtype(
[
('a', np.float64),
('b', np.int32),
('c', np.complex64),
('d', (np.str, 5))
],
align=True
)
recordwitharray = np.dtype(
[
('g', np.int32),
('h', np.float32, 2)
],
align=True
)
recordwith2darray = np.dtype([('i', np.int32),
('j', np.float32, (3, 2))])
class TestRecordDtype(SerialMixin, unittest.TestCase):
def _createSampleArrays(self):
self.sample1d = np.recarray(3, dtype=recordtype)
self.samplerec1darr = np.recarray(1, dtype=recordwitharray)[0]
self.samplerec2darr = np.recarray(1, dtype=recordwith2darray)[0]
def setUp(self):
self._createSampleArrays()
ary = self.sample1d
for i in range(ary.size):
x = i + 1
ary[i]['a'] = x / 2
ary[i]['b'] = x
ary[i]['c'] = x * 1j
ary[i]['d'] = "%d" % x
def get_cfunc(self, pyfunc, argspec):
return cuda.jit()(pyfunc)
def _test_set_equal(self, pyfunc, value, valuetype):
rec = numpy_support.from_dtype(recordtype)
cfunc = self.get_cfunc(pyfunc, (rec[:], types.intp, valuetype))
for i in range(self.sample1d.size):
got = self.sample1d.copy()
# Force the argument to the pure Python function to be
# a recarray, as attribute access isn't supported on
# structured arrays.
if numpy_support.version <= (1, 9):
expect = np.recarray(got.shape, got.dtype)
expect[:] = got
else:
expect = got.copy().view(np.recarray)
cfunc(got, i, value)
pyfunc(expect, i, value)
# Match the entire array to ensure no memory corruption
self.assertTrue(np.all(expect == got))
def test_set_a(self):
self._test_set_equal(set_a, 3.1415, types.float64)
# Test again to check if coercion works
self._test_set_equal(set_a, 3., types.float32)
def test_set_b(self):
self._test_set_equal(set_b, 123, types.int32)
# Test again to check if coercion works
self._test_set_equal(set_b, 123, types.float64)
def test_set_c(self):
self._test_set_equal(set_c, 43j, types.complex64)
# Test again to check if coercion works
self._test_set_equal(set_c, 43j, types.complex128)
def test_set_record(self):
pyfunc = set_record
rec = numpy_support.from_dtype(recordtype)
cfunc = self.get_cfunc(pyfunc, (rec[:], types.intp, types.intp))
test_indices = [(0, 1), (1, 2), (0, 2)]
for i, j in test_indices:
expect = self.sample1d.copy()
pyfunc(expect, i, j)
got = self.sample1d.copy()
cfunc(got, i, j)
# Match the entire array to ensure no memory corruption
self.assertEqual(expect[i], expect[j])
self.assertEqual(got[i], got[j])
self.assertTrue(np.all(expect == got))
def _test_rec_set(self, v, pyfunc, f):
rec = self.sample1d.copy()[0]
nbrecord = numpy_support.from_dtype(recordtype)
cfunc = self.get_cfunc(pyfunc, (nbrecord,))
cfunc(rec, v)
np.testing.assert_equal(rec[f], v)
def test_rec_set_a(self):
self._test_rec_set(np.float64(1.5), record_set_a, 'a')
def test_rec_set_b(self):
self._test_rec_set(np.int32(2), record_set_b, 'b')
def test_rec_set_c(self):
self._test_rec_set(np.complex64(4.0+5.0j), record_set_c, 'c')
def _test_rec_read(self, v, pyfunc, f):
rec = self.sample1d.copy()[0]
rec[f] = v
arr = np.zeros(1, v.dtype)
nbrecord = numpy_support.from_dtype(recordtype)
cfunc = self.get_cfunc(pyfunc, (nbrecord,))
cfunc(rec, arr)
np.testing.assert_equal(arr[0], v)
def test_rec_read_a(self):
self._test_rec_read(np.float64(1.5), record_read_a, 'a')
def test_rec_read_b(self):
self._test_rec_read(np.int32(2), record_read_b, 'b')
def test_rec_read_c(self):
self._test_rec_read(np.complex64(4.0+5.0j), record_read_c, 'c')
def test_record_write_1d_array(self):
'''
Test writing to a 1D array within a structured type
'''
rec = self.samplerec1darr.copy()
nbrecord = numpy_support.from_dtype(recordwitharray)
cfunc = self.get_cfunc(record_write_array, (nbrecord,))
cfunc(rec)
expected = self.samplerec1darr.copy()
expected['g'] = 2
expected['h'][0] = 3.0
expected['h'][1] = 4.0
np.testing.assert_equal(expected, rec)
def test_record_write_2d_array(self):
'''
Test writing to a 2D array within a structured type
'''
rec = self.samplerec2darr.copy()
nbrecord = numpy_support.from_dtype(recordwith2darray)
cfunc = self.get_cfunc(record_write_2d_array, (nbrecord,))
cfunc(rec)
expected = self.samplerec2darr.copy()
expected['i'] = 3
expected['j'][:] = np.asarray([5.0, 6.0, 7.0, 8.0, 9.0, 10.0],
np.float32).reshape(3, 2)
np.testing.assert_equal(expected, rec)
def test_record_read_1d_array(self):
'''
Test reading from a 1D array within a structured type
'''
rec = self.samplerec1darr.copy()
rec['h'][0] = 4.0
rec['h'][1] = 5.0
nbrecord = numpy_support.from_dtype(recordwitharray)
cfunc = self.get_cfunc(record_read_array, (nbrecord,))
arr = np.zeros(2, dtype=rec['h'].dtype)
cfunc(rec, arr)
np.testing.assert_equal(rec['h'], arr)
def test_record_read_2d_array(self):
'''
Test reading from a 2D array within a structured type
'''
rec = self.samplerec2darr.copy()
rec['j'][:] = np.asarray([5.0, 6.0, 7.0, 8.0, 9.0, 10.0],
np.float32).reshape(3, 2)
nbrecord = numpy_support.from_dtype(recordwith2darray)
cfunc = self.get_cfunc(record_read_2d_array, (nbrecord,))
arr = np.zeros((3,2), dtype=rec['j'].dtype)
cfunc(rec, arr)
np.testing.assert_equal(rec['j'], arr)
@skip_on_cudasim('Attribute access of structured arrays not supported in simulator')
class TestRecordDtypeWithStructArrays(TestRecordDtype):
'''
Same as TestRecordDtype, but using structured arrays instead of recarrays.
'''
def _createSampleArrays(self):
self.sample1d = np.zeros(3, dtype=recordtype)
self.samplerec1darr = np.zeros(1, dtype=recordwitharray)[0]
self.samplerec2darr = np.zeros(1, dtype=recordwith2darray)[0]
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
2c2e336ab747dbdb1f5feca82e9d6644c1bdbe5d | 90f52d0348aa0f82dc1f9013faeb7041c8f04cf8 | /wxPython3.0 Docs and Demos/demo/PenAndBrushStyles.py | 62c8f033f0917a085babb249479e55f1e22d50ef | []
| no_license | resource-jason-org/python-wxPythonTool | 93a25ad93c768ca8b69ba783543cddf7deaf396b | fab6ec3155e6c1ae08ea30a23310006a32d08c36 | refs/heads/master | 2021-06-15T10:58:35.924543 | 2017-04-14T03:39:27 | 2017-04-14T03:39:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,093 | py | import wx
import images
pen_styles = ["wx.SOLID", "wx.TRANSPARENT", "wx.DOT", "wx.LONG_DASH",
"wx.SHORT_DASH", "wx.DOT_DASH", "wx.BDIAGONAL_HATCH",
"wx.CROSSDIAG_HATCH", "wx.FDIAGONAL_HATCH", "wx.CROSS_HATCH",
"wx.HORIZONTAL_HATCH", "wx.VERTICAL_HATCH", "wx.USER_DASH"]
if 'wxMSW' in wx.PlatformInfo:
pen_styles.append("wx.STIPPLE")
brush_styles = ["wx.SOLID", "wx.TRANSPARENT", "wx.STIPPLE", "wx.BDIAGONAL_HATCH",
"wx.CROSSDIAG_HATCH", "wx.FDIAGONAL_HATCH", "wx.CROSS_HATCH",
"wx.HORIZONTAL_HATCH", "wx.VERTICAL_HATCH"]
class BasePanel(wx.Panel):
def __init__(self, parent):
wx.Panel.__init__(self, parent, style=wx.SUNKEN_BORDER|wx.WANTS_CHARS)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_PAINT, self.OnPaint)
def OnSize(self, event):
event.Skip()
self.Refresh()
class PenPanel(BasePanel):
def __init__(self, parent, pen_name):
BasePanel.__init__(self, parent)
self.pen_name = pen_name
def OnPaint(self, event):
width, height = self.GetClientSize()
dc = wx.AutoBufferedPaintDC(self)
dc.SetBackground(wx.WHITE_BRUSH)
dc.Clear()
font = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
font.MakeSmaller()
dc.SetFont(font)
w, labelHeight = dc.GetTextExtent('Wy')
name = self.pen_name
if "STIPPLE" in name:
bmp = images.Smiles.GetBitmap()
penWidth = 8 #bmp.GetHeight()
pen = wx.Pen(wx.BLUE, penWidth, eval(name))
pen.SetStipple(bmp)
else:
penWidth = 3
if 'HATCH' in name:
penWidth = 8
pen = wx.Pen(wx.BLUE, penWidth, eval(name))
if "USER" in name:
# dash values represent units on, off, on. off...
pen.SetDashes([2, 5, 2, 2])
name += " ([2, 5, 2, 2])"
dc.SetTextForeground(wx.BLACK)
dc.DrawText(name, 1, 1)
dc.SetPen(pen)
y = labelHeight + (height - labelHeight)/2
dc.DrawLine(5, y, width-5, y)
class BrushPanel(BasePanel):
def __init__(self, parent, brush_name):
BasePanel.__init__(self, parent)
self.brush_name = brush_name
def OnPaint(self, event):
width, height = self.GetClientSize()
dc = wx.AutoBufferedPaintDC(self)
dc.SetBackground(wx.WHITE_BRUSH)
dc.Clear()
font = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
font.MakeSmaller()
dc.SetFont(font)
w, labelHeight = dc.GetTextExtent('Wy')
dc.SetPen(wx.TRANSPARENT_PEN)
name = self.brush_name
if "STIPPLE" in name:
bmp = images.Smiles.GetBitmap()
bmp.SetMask(None)
brush = wx.BrushFromBitmap(bmp)
else:
brush = wx.Brush(wx.BLUE, eval(name))
dc.SetTextForeground(wx.BLACK)
dc.DrawText(name, 1, 1)
dc.SetBrush(brush)
dc.DrawRectangle(5, labelHeight+2, width-10, height-labelHeight-5-2)
class TestPanel(wx.Panel):
def __init__(self, *args, **kw):
wx.Panel.__init__(self, *args, **kw)
font = wx.SystemSettings.GetFont(wx.SYS_DEFAULT_GUI_FONT)
font.SetWeight(wx.BOLD)
mainSizer = wx.BoxSizer(wx.VERTICAL)
label1 = wx.StaticText(self, -1, "Pen Styles:")
label1.SetFont(font)
mainSizer.Add(label1, 0, wx.EXPAND|wx.ALL, 10)
gs1 = wx.GridSizer(4, 4, 3, 3) # rows, cols, vgap, hgap
for pen_name in pen_styles:
small = PenPanel(self, pen_name)
gs1.Add(small, 0, wx.EXPAND)
mainSizer.Add(gs1, 1, wx.EXPAND|wx.LEFT|wx.RIGHT|wx.BOTTOM, 10)
label2 = wx.StaticText(self, -1, "Brush Styles:")
label2.SetFont(font)
mainSizer.Add(label2, 0, wx.EXPAND|wx.ALL, 10)
gs2 = wx.GridSizer(3, 3, 3, 3) # rows, cols, vgap, hgap
for brush_name in brush_styles:
small = BrushPanel(self, brush_name)
gs2.Add(small, 0, wx.EXPAND)
mainSizer.Add(gs2, 1, wx.EXPAND|wx.LEFT|wx.RIGHT|wx.BOTTOM, 10)
self.SetSizer(mainSizer)
#----------------------------------------------------------------------
def runTest(frame, nb, log):
win = TestPanel(nb)
return win
#----------------------------------------------------------------------
overview = """<html><body>
<h2><center>Pen and Brush Styles</center></h2>
This sample shows an e3xample of drawing with each of the available
wx.Pen and wx.Brush styles.
</body></html>
"""
if __name__ == '__main__':
import sys,os
import run
run.main(['', os.path.basename(sys.argv[0])] + sys.argv[1:])
| [
"[email protected]"
]
| |
1e17f23fd4fc3919a1165407f95e07c0435195be | abad82a1f487c5ff2fb6a84059a665aa178275cb | /Codewars/8kyu/freudian-translator/Python/test.py | ccece49a500303752883fdcb55bc12ed42a76600 | [
"MIT"
]
| permissive | RevansChen/online-judge | 8ae55f136739a54f9c9640a967ec931425379507 | ad1b07fee7bd3c49418becccda904e17505f3018 | refs/heads/master | 2021-01-19T23:02:58.273081 | 2019-07-05T09:42:40 | 2019-07-05T09:42:40 | 88,911,035 | 9 | 0 | null | null | null | null | UTF-8 | Python | false | false | 384 | py | # Python - 2.7.6
Test.describe('Basic tests')
Test.assert_equals(to_freud('test'), 'sex')
Test.assert_equals(to_freud('sexy sex'), 'sex sex')
Test.assert_equals(to_freud('This is a test'), 'sex sex sex sex')
Test.assert_equals(to_freud('This is a longer test'), 'sex sex sex sex sex')
Test.assert_equals(to_freud("You're becoming a true freudian expert"), 'sex sex sex sex sex sex')
| [
"[email protected]"
]
| |
774ac47a0f6c5172d745e86ec2e211a1d8970ad4 | c97d3c8848e4f03edb6c64b6abff530a6e74d616 | /apps/__init__.py | 9dc47c4de6093e3511f7fe4e63949ac44c74b162 | [
"Apache-2.0"
]
| permissive | simhaonline/Django_web | eeb80d8f32a460258fceb30ecececd7410949f72 | f7df1a7b101d41835a334b78cddf3570968799e4 | refs/heads/master | 2023-04-24T23:33:51.535515 | 2021-04-02T15:20:29 | 2021-04-02T15:20:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 231 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# __author__ : stray_camel
# __description__ :
# __REFERENCES__ :
# __date__: 2020/05/25 10
from .constants import system_name, description
__all__ = [
'description', 'system_name',
]
| [
"[email protected]"
]
| |
cfb20ae27607364005a23d811d3836639a73b19b | afbcee5187c88b52b416fa742baa825c14cd9d7c | /CarelinkUploadDownload/CheckForUSB.py | 05a26260d92f1453d9ac28d1a045f75d741e33a6 | []
| no_license | brendlin/BGSuggest | 7fae8665e8c2ea9d60980f3f3e5d57be406dd290 | c2ad6019e323d22358f5a1af35f6683a96a6366d | refs/heads/master | 2021-01-17T07:08:12.729631 | 2019-07-29T17:47:35 | 2019-07-29T17:47:35 | 15,495,317 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,734 | py | import sys
import usb.core
# Put the following lines in your bash_profile:
# export PYTHONPATH=$PYTHONPATH:$HOME/pyusb-1.0.0rc1
# export DYLD_LIBRARY_PATH=$DYLD_LIBRARY_PATH:$HOME/libusb/lib
def CheckForUSB() :
found = False
var_usb_device = usb.core.find(find_all = True)
for var_dev in var_usb_device:
var_usb = usb.core.find(idVendor=var_dev.idVendor, idProduct=var_dev.idProduct)
if not getattr(var_usb,'langids') :
continue
var_manu = usb.util.get_string(var_usb,var_usb.iManufacturer,langid=getattr(var_usb,'langids')[0])
if 'Bayer HealthCare LLC' not in var_manu :
continue
found = True
if True :
continue
var_product = usb.util.get_string(var_usb,var_dev.iProduct ,langid=getattr(var_usb,'langids')[0])
var_serial = usb.util.get_string(var_usb,var_dev.iSerialNumber,langid=getattr(var_usb,'langids')[0])
var_drv = var_usb.is_kernel_driver_active(0)
var_cfg = var_usb.get_active_configuration()
var_int = var_cfg[(0,0)].bInterfaceNumber
print "iManufacturer: ", var_dev.iManufacturer, hex(var_dev.iManufacturer)
print "IdVendor: ", var_dev.idVendor, hex(var_dev.idVendor)
print "IdProduct: ", var_dev.idProduct, hex(var_dev.idProduct)
print "Manufacturer: ", var_manu
print "Product: ", var_product
print "Serial: ", var_serial
print "Interface #: ", var_int
print "Kernel Driver: ", var_drv
for var_config in var_usb:
for var_i in var_config:
for var_e in var_i:
print " - Endpoint Address: ", var_e.bEndpointAddress
return found
| [
"[email protected]"
]
| |
e184433261654f1e09efb557b3037e57f2b7a13e | dca232d51f508edbb37e85f6744e22fb1c9a5a20 | /lifesaver/bot/exts/health.py | 4f04ccc66e0aa4c4da0b593ea6ec31633e0aaa48 | [
"MIT"
]
| permissive | Gorialis/lifesaver | 835d0fda04b7d8a436f37184b6419a7ab46fe885 | 3a91c5e5ec60dce0c076d83d984c3a270113e484 | refs/heads/master | 2021-08-31T10:13:25.219124 | 2017-12-21T01:53:22 | 2017-12-21T01:53:22 | 114,949,865 | 0 | 0 | null | 2017-12-21T01:52:07 | 2017-12-21T01:52:06 | null | UTF-8 | Python | false | false | 335 | py | from discord.ext.commands import command
from lifesaver.bot import Cog
class Health(Cog):
@command()
async def ping(self, ctx):
"""Pings the bot."""
ping_time = round(ctx.bot.latency * 1000, 2)
await ctx.send(f'Pong! Heartbeat latency: `{ping_time}ms`')
def setup(bot):
bot.add_cog(Health(bot))
| [
"[email protected]"
]
| |
b2ab0e8ccf3f30ede7971f81298d6da053b380ab | f058cd1ec57b2e24430605883387b1c34391a2e3 | /blender_tests/main.py | 221a14485d090e140a70b215f4b2a1cc33c25b3a | []
| no_license | Danny-Dasilva/Blender_Mediapipe | 9a2966f38e3e6a9aea503eed1bdcc0e4e2ebc502 | 80cbd45e721bc12759d26c317f3a57b6176e1af5 | refs/heads/main | 2023-04-21T09:49:47.200918 | 2021-05-15T01:03:40 | 2021-05-15T01:03:40 | 365,960,178 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 240,191 | py |
import json
import os
import bpy
from bpy import context
from bpy_extras.io_utils import ImportHelper
from bpy.types import Operator
from bpy.props import StringProperty, BoolProperty, EnumProperty
import math
import bpy
import math
import bpy
class Test_PT_Panel(bpy.types.Panel):
bl_idname = "MOCAP_IMPORT_PT_Panel_ALPHA"
bl_label = "MOCAP PE Import Data ALPHA"
bl_category = "MOCAP_ALPHA"
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
def draw(self, context):
layout = self.layout
sk_value_prop = context.scene.sk_value_prop
# percent_render_var = sk_value_prop.sk_value
# row = layout.row()
# row.prop(sk_value_prop, "sk_raw_bool", text="Raw Import")
# row = layout.row()
# row.operator('mocap.import_easymocap', text="Import EasyMOCAP")
row = layout.row()
row.prop(sk_value_prop, "sk_record_bool", text="Record Realtime")
row = layout.row()
row.prop(sk_value_prop, "sk_record_frame_start", text="Frame to start Recording")
row = layout.row()
row.operator('mocap.mediapipe_prepare_sk_rt', text="Prepare MP Skeleton for RT")
layout.row().separator()
layout.prop(sk_value_prop, "sk_value", text="SK Mult")
layout.operator('mocap.import_mediapipe_reload', text="Reload MP Skeleton for RT")
# row = layout.row()
# row.operator('mocap.import_frankmocap', text="SK Import FrankMocap")
# row = layout.row()
# row.operator('mocap.import_vibe', text="SK Import VIBE")
# row = layout.row()
# row.operator('mocap.mediapipe_pose', text="SK Generate Mocap (MediaPipe)")
# layout.row().separator()
layout.row().separator()
row = layout.row()
row.operator('mocap.mediapipe_pose_rt', text="MediaPipe Real Time")
layout.row().separator()
row = layout.row()
row.prop(sk_value_prop, "sk_socket_buffer", text="Buffer")
row = layout.row()
row.prop(sk_value_prop, "sk_refresh_rate", text="Refresh Rate")
# Create two columns, by using a split layout.
# split = layout.split()
# # First column
# col = split.column()
# # col.label(text="Column One:")
# # layout.label(text='Original angles')
# # layout.label(text='x: '+ '%.2f' %sk_value_prop.sk_root_rot_x)
# # layout.label(text='y: '+ '%.2f' %sk_value_prop.sk_root_rot_y)
# # layout.label(text='z: '+ '%.2f' %sk_value_prop.sk_root_rot_z)
# col.label(text='Original angles')
# col.label(text='x: '+ '%.2f' %sk_value_prop.sk_root_rot_x)
# col.label(text='y: '+ '%.2f' %sk_value_prop.sk_root_rot_y)
# col.label(text='z: '+ '%.2f' %sk_value_prop.sk_root_rot_z)
# # Second column, aligned
# col = split.column(align=True)
# col.label(text="Actual Angle:")
# col.label(text='x: '+ '%.2f' %sk_value_prop.sk_root_actual_rot_x)
# col.label(text='y: '+ '%.2f' %sk_value_prop.sk_root_actual_rot_y)
# col.label(text='z: '+ '%.2f' %sk_value_prop.sk_root_actual_rot_z)
class Modify_PT_Panel(bpy.types.Panel):
bl_idname = "MODIFY_PT_Panel_ALPHA"
bl_label = "Modify Data ALPHA"
bl_category = "MOCAP_ALPHA"
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
def draw(self, context):
layout = self.layout
sk_value_prop = context.scene.sk_value_prop
# percent_render_var = sk_value_prop.sk_value
layout.label(text=" Convert axis")
row = layout.row()
# row.prop(sk_value_prop, "sk_from_axis", text="From")
# row.prop(sk_value_prop, "sk_to_axis", text="To")
# row = layout.row()
# row.operator('mocap.convert_axis', text='Convert')
# row = layout.row()
# row.label(text='----------')
# row = layout.row()
# row.operator('mocap.reset_location', text='Reset loc')
# row.operator('mocap.reset_rotation', text='Reset rot')
# row.operator('mocap.foot_high', text='Foot')
# row = layout.row()
# row.operator('mocap.smooth_bones', text='Smooth Curves')
# row = layout.row()
# row.label(text='----------')
# row = layout.row()
# row.label(text='Compensate Rotation')
# row = layout.row()
# row.prop(sk_value_prop, "sk_rot_compens_x", text="x")
# row.prop(sk_value_prop, "sk_rot_compens_y", text="y")
# row.prop(sk_value_prop, "sk_rot_compens_z", text="z")
# row = layout.row()
# row.operator('mocap.compensate_rotation', text='Rotate')
class Debug_PT_Panel(bpy.types.Panel):
bl_idname = "Debug_PT_Panel_ALPHA"
bl_label = "Debug Panel"
bl_category = "MOCAP_ALPHA"
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
def draw(self, context):
layout = self.layout
sk_value_prop = context.scene.sk_value_prop
row = layout.row()
row.label(text='Debug skeleton size')
row = layout.row()
row.label(text='Main Structure')
row = layout.row()
row.prop(sk_value_prop, "sk_spine_mulitplier", text="Spine: ")
row.prop(sk_value_prop, "sk_neck_mulitplier", text="Neck")
row = layout.row()
row.prop(sk_value_prop, "sk_head_mulitplier", text="Head")
layout.row().separator()
row = layout.row()
row.label(text='Arms')
row = layout.row()
row.prop(sk_value_prop, "sk_forearm_mulitplier", text="Forearm: ")
row.prop(sk_value_prop, "sk_arm_mulitplier", text="Arm: ")
layout.row().separator()
row = layout.row()
row.label(text='Legs')
row = layout.row()
row.prop(sk_value_prop, "sk_tigh_mulitplier", text="Tigh: ")
row.prop(sk_value_prop, "sk_leg_mulitplier", text="Leg: ")
row = layout.row()
row.prop(sk_value_prop, "sk_foot_mulitplier", text="Foot: ")
from bpy.props import (#StringProperty,
# BoolProperty,
IntProperty,
FloatProperty,
# FloatVectorProperty,
# EnumProperty,
PointerProperty,
)
from bpy.props import (StringProperty,
BoolProperty,
IntProperty,
FloatProperty,
# FloatVectorProperty,
EnumProperty,
# PointerProperty,
)
from bpy.types import (Panel,
Operator,
AddonPreferences,
PropertyGroup,
)
class MySettings(PropertyGroup):
sk_value: FloatProperty(name="multiplier", description="Multiplier for base proportion of the bones", default=0.9)
sk_rot_compens_x: IntProperty(name="Rotation_compensate_x", description="Value to compensate Roation X", default=0)
sk_rot_compens_y: IntProperty(name="Rotation_compensate_y", description="Value to compensate Roation Y", default=0)
sk_rot_compens_z: IntProperty(name="Rotation_compensate_z", description="Value to compensate Roation Z", default=0)
sk_rot_original: StringProperty(name="rotation", description="rotation")
sk_root_rot_x: FloatProperty(name="original rotation x", description="original rotation of root bone x")
sk_root_rot_y: FloatProperty(name="original rotation y", description="original rotation of root bone y")
sk_root_rot_z: FloatProperty(name="original rotation z", description="original rotation of root bone z")
sk_root_actual_rot_x: FloatProperty(name="Actual rotation x", description="Actual rotation of root bone x")
sk_root_actual_rot_y: FloatProperty(name="Actual rotation y", description="Actual rotation of root bone y")
sk_root_actual_rot_z: FloatProperty(name="Actual rotation z", description="Actual rotation of root bone z")
sk_raw_bool: BoolProperty(name='raw_bool', default=False)
sk_from_axis: EnumProperty(
name= "From Axis",
description="From specific axis of animation",
items= [('from_x', "x","Choose origin x axis"),
('from_y', "y","Choose origin y axis"),
('from_z', "z","Choose origin z axis")
],
default = 'from_y'
)
sk_to_axis: EnumProperty(
name= "To Axis",
description="To specific axis of animation",
items= [('to_x', "x","Choose destination x axis"),
('to_y', "y","Choose destination y axis"),
('to_z', "z","Choose destination z axis")
],
default = 'to_z'
)
sk_spine_mulitplier: FloatProperty(name="Spine size multiplier", description="Ajust the Spine size", default=1)
sk_neck_mulitplier: FloatProperty(name="Neck size multiplier", description="Ajust the Neck size", default=1)
sk_head_mulitplier: FloatProperty(name="Head size multiplier", description="Ajust the Head size", default=1)
sk_forearm_mulitplier: FloatProperty(name="Forearm size multiplier", description="Ajust the Forearm size", default=1)
sk_arm_mulitplier: FloatProperty(name="Arm size multiplier", description="Ajust the Arm size", default=1)
sk_tigh_mulitplier: FloatProperty(name="Thigh size multiplier", description="Ajust the Thigh size", default=1)
sk_leg_mulitplier: FloatProperty(name="Leg size multiplier", description="Ajust the Leg size", default=1)
sk_foot_mulitplier: FloatProperty(name="Foot size multiplier", description="Ajust the Foot size", default=1)
sk_socket_buffer: IntProperty(name="Socket buffer", description="Socket buffer value", default=1024)
sk_refresh_rate: FloatProperty(name="Refresh_rate", description="Value of refresh rate", default=0.1)
sk_record_bool: BoolProperty(name='record_bool', default=False)
sk_record_frame_start: IntProperty(name='Frame to start recording',description="Frame to start recording", default=1)
class helper_functions(object):
def anim_to_origin():
f_start = bpy.context.scene.frame_start
f_end = bpy.context.scene.frame_end
bpy.context.scene.frame_current=f_start
#==========================================
#selecting and making the armature Active
#selecionando armature
#==========================================
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.select_all(action='DESELECT')
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
armature = obs[len(obs)-1].name
#bpy.data.objects[armature].select_set(True)
obs[len(obs)-1].select_set(True)
view_layer = bpy.context.view_layer
#Armature_obj = bpy.context.scene.objects[armature]
Armature_obj = obs[len(obs)-1]
view_layer.objects.active = Armature_obj
#############################################################################
##found that to move the animation to the center,
##I just have to subtract the inicial frame loc and rot from the other frames
#########
x_dif = bpy.context.object.pose.bones["Root"].rotation_euler[0] * -1
y_dif = bpy.context.object.pose.bones["Root"].rotation_euler[1] * -1
z_dif = bpy.context.object.pose.bones["Root"].rotation_euler[2] * -1
x_loc_dif = bpy.context.object.pose.bones["Root"].location[0] * -1
y_loc_dif = bpy.context.object.pose.bones["Root"].location[1] * -1
z_loc_dif = bpy.context.object.pose.bones["Root"].location[2] * -1
bpy.ops.object.mode_set(mode='EDIT')
z_high_to_add = bpy.context.object.data.edit_bones["Foot_L"].tail.z
bpy.ops.object.mode_set(mode='POSE')
range(f_start,f_end+1)
for f in range(f_start,f_end+1):
print('frame: ',f)
bpy.context.scene.frame_current = f
bpy.context.view_layer.update()
# print('rot orig x: ',bpy.context.object.pose.bones["Root"].rotation_euler[0])
# print('rot x: ',bpy.context.object.pose.bones["Root"].rotation_euler[0] + x_dif)
bpy.context.object.pose.bones["Root"].rotation_euler[0] = bpy.context.object.pose.bones["Root"].rotation_euler[0] + x_dif
bpy.context.object.pose.bones["Root"].rotation_euler[1] = bpy.context.object.pose.bones["Root"].rotation_euler[1] + y_dif
bpy.context.object.pose.bones["Root"].rotation_euler[2] = bpy.context.object.pose.bones["Root"].rotation_euler[2] + z_dif
bpy.context.object.pose.bones["Root"].keyframe_insert(data_path='rotation_euler',frame=f)
#################
## location to origin
##
bpy.context.object.pose.bones["Root"].location[0] = bpy.context.object.pose.bones["Root"].location[0] + x_loc_dif
bpy.context.object.pose.bones["Root"].location[1] = bpy.context.object.pose.bones["Root"].location[1] + y_loc_dif
bpy.context.object.pose.bones["Root"].location[2] = bpy.context.object.pose.bones["Root"].location[2] + z_loc_dif
bpy.context.object.pose.bones["Root"].keyframe_insert(data_path='location',frame=f)
#Check if need to transpose axis
if abs(abs(math.degrees(x_dif))-90) < 45 or abs(abs(math.degrees(x_dif))-270) < 45:
# if 1==1:
#############################
#rotate oprientation z por y
for f in range(f_start,f_end+1):
print('frame: ',f)
bpy.context.scene.frame_current = f
bpy.context.view_layer.update()
#changing location
bone_root_loc_x = bpy.context.object.pose.bones["Root"].location[0]
bone_root_loc_y = bpy.context.object.pose.bones["Root"].location[1]
bone_root_loc_z = bpy.context.object.pose.bones["Root"].location[2]
#changing orientation from z to y
#z=y
bpy.context.object.pose.bones["Root"].location[2] = bone_root_loc_y
#y=z
bpy.context.object.pose.bones["Root"].location[1] = bone_root_loc_z
bpy.context.object.pose.bones["Root"].keyframe_insert(data_path='location',frame=f)
#######################
## rotation orientation change
##
#rotation the rotation z to y
bone_root_rot_x = bpy.context.object.pose.bones["Root"].rotation_euler[0]
bone_root_rot_y = bpy.context.object.pose.bones["Root"].rotation_euler[1]
bone_root_rot_z = bpy.context.object.pose.bones["Root"].rotation_euler[2]
#changing orientation from z to y
#z=y
bpy.context.object.pose.bones["Root"].rotation_euler[2] = bone_root_rot_y
#y=z
bpy.context.object.pose.bones["Root"].rotation_euler[1] = bone_root_rot_z
bpy.context.object.pose.bones["Root"].keyframe_insert(data_path='rotation_euler',frame=f)
###############################
## adjust the foot to z=0
for f in range(f_start,f_end+1):
# print('frame: ',f)
bpy.context.scene.frame_current = f
bpy.context.view_layer.update()
bpy.context.object.pose.bones["Root"].location[1] = bpy.context.object.pose.bones["Root"].location[1] + abs(z_high_to_add)
bpy.context.object.pose.bones["Root"].keyframe_insert(data_path='location',frame=f)
# print('org x: ', math.degrees(x_dif), 'orig y: ', math.degrees(y_dif), 'orig_z: ', math.degrees(z_dif))
rot_original = 'x: ', math.degrees(x_dif), ' y: ', math.degrees(y_dif), ' z: ', math.degrees(z_dif)
print(rot_original)
bpy.ops.object.mode_set(mode='OBJECT')
return (math.degrees(x_dif),math.degrees(y_dif),math.degrees(z_dif))
def compensate_rot(x,y,z):
f_start = bpy.context.scene.frame_start
f_end = bpy.context.scene.frame_end
#just to compensate grad
x_grad_compensate = x
y_grad_compensate = y
z_grad_compensate = z
for f in range(f_start,f_end+1):
print('frame: ',f)
bpy.context.scene.frame_current = f
bpy.context.view_layer.update()
print('rot orig x: ',bpy.context.object.pose.bones["Root"].rotation_euler[0])
print('rot x: ',bpy.context.object.pose.bones["Root"].rotation_euler[0] +math.radians(x_grad_compensate))
bpy.context.object.pose.bones["Root"].rotation_euler[0] = bpy.context.object.pose.bones["Root"].rotation_euler[0] +math.radians(x_grad_compensate)
bpy.context.object.pose.bones["Root"].rotation_euler[1] = bpy.context.object.pose.bones["Root"].rotation_euler[1] +math.radians(y_grad_compensate)
bpy.context.object.pose.bones["Root"].rotation_euler[2] = bpy.context.object.pose.bones["Root"].rotation_euler[2] +math.radians(z_grad_compensate)
bpy.context.object.pose.bones["Root"].keyframe_insert(data_path='rotation_euler',frame=f)
return True
def rotate_orientation(from_axis,to_axis):
#############################
#rotate oprientation acording to choice on menu
f_start = bpy.context.scene.frame_start
f_end = bpy.context.scene.frame_end
if from_axis == 'x':
from_ax = 0
elif from_axis == 'y':
from_ax = 1
elif from_axis == 'z':
from_ax = 2
if to_axis == 'x':
to_ax = 0
elif to_axis == 'y':
to_ax = 1
elif to_axis == 'z':
to_ax = 2
if (from_axis == 'x' and to_axis == 'y') or (to_axis == 'x' and from_axis == 'y'):
rotate_axis = 'z'
elif (from_axis == 'y' and to_axis == 'z') or (to_axis == 'y' and from_axis == 'z'):
rotate_axis = 'x'
elif (from_axis == 'z' and to_axis == 'x') or (to_axis == 'z' and from_axis == 'x'):
rotate_axis = 'y'
if 'rotate_axis' in locals():
if rotate_axis == 'x':
rot_ax = 0
elif rotate_axis == 'y':
rot_ax = 1
elif rotate_axis == 'z':
rot_ax = 2
if from_axis != rot_ax:
for f in range(f_start,f_end+1):
print('frame: ',f)
bpy.context.scene.frame_current = f
bpy.context.view_layer.update()
##################
#changing location
bone_root_loc = []
bone_root_loc.append(bpy.context.object.pose.bones["Root"].location[0])
bone_root_loc.append(bpy.context.object.pose.bones["Root"].location[1])
bone_root_loc.append(bpy.context.object.pose.bones["Root"].location[2])
# bone_root_loc_x = bpy.context.object.pose.bones["Root"].location[0]
# bone_root_loc_y = bpy.context.object.pose.bones["Root"].location[1]
# bone_root_loc_z = bpy.context.object.pose.bones["Root"].location[2]
#from-to
# bpy.context.object.pose.bones["Root"].location[2] = bone_root_loc_y
bpy.context.object.pose.bones["Root"].location[from_ax] = bone_root_loc[to_ax]
#to-from
# bpy.context.object.pose.bones["Root"].location[1] = bone_root_loc_z
bpy.context.object.pose.bones["Root"].location[to_ax] = bone_root_loc[from_ax]
bpy.context.object.pose.bones["Root"].keyframe_insert(data_path='location',frame=f)
#######################
## rotation orientation change
##
bone_root_rot=[]
bone_root_rot.append(bpy.context.object.pose.bones["Root"].rotation_euler[0])
bone_root_rot.append(bpy.context.object.pose.bones["Root"].rotation_euler[1])
bone_root_rot.append(bpy.context.object.pose.bones["Root"].rotation_euler[2])
# bone_root_rot_x = bpy.context.object.pose.bones["Root"].rotation_euler[0]
# bone_root_rot_y = bpy.context.object.pose.bones["Root"].rotation_euler[1]
# bone_root_rot_z = bpy.context.object.pose.bones["Root"].rotation_euler[2]
#from-to
bpy.context.object.pose.bones["Root"].rotation_euler[from_ax] = bone_root_rot[to_ax]
#to-from
bpy.context.object.pose.bones["Root"].rotation_euler[to_ax] = bone_root_rot[from_ax]
#convert adding 90 degrees
bpy.context.object.pose.bones["Root"].rotation_euler[rot_ax] = bpy.context.object.pose.bones["Root"].rotation_euler[rot_ax] + math.radians(-90)
bpy.context.object.pose.bones["Root"].keyframe_insert(data_path='rotation_euler',frame=f)
return True
def reset_loc(): #make the animation start from where the boneas are located
f_start = bpy.context.scene.frame_start
f_end = bpy.context.scene.frame_end
x_loc_dif = bpy.context.object.pose.bones["Root"].location[0] * -1
y_loc_dif = bpy.context.object.pose.bones["Root"].location[1] * -1
z_loc_dif = bpy.context.object.pose.bones["Root"].location[2] * -1
for f in range(f_start,f_end+1):
print('frame: ',f)
bpy.context.scene.frame_current = f
bpy.context.view_layer.update()
#################
## location to origin
##
bpy.context.object.pose.bones["Root"].location[0] = bpy.context.object.pose.bones["Root"].location[0] + x_loc_dif
bpy.context.object.pose.bones["Root"].location[1] = bpy.context.object.pose.bones["Root"].location[1] + y_loc_dif
bpy.context.object.pose.bones["Root"].location[2] = bpy.context.object.pose.bones["Root"].location[2] + z_loc_dif
bpy.context.object.pose.bones["Root"].keyframe_insert(data_path='location',frame=f)
return True
def reset_rot():
f_start = bpy.context.scene.frame_start
f_end = bpy.context.scene.frame_end
x_dif = bpy.context.object.pose.bones["Root"].rotation_euler[0] * -1
y_dif = bpy.context.object.pose.bones["Root"].rotation_euler[1] * -1
z_dif = bpy.context.object.pose.bones["Root"].rotation_euler[2] * -1
for f in range(f_start,f_end+1):
print('frame: ',f)
bpy.context.scene.frame_current = f
bpy.context.view_layer.update()
bpy.context.object.pose.bones["Root"].rotation_euler[0] = bpy.context.object.pose.bones["Root"].rotation_euler[0] + x_dif
bpy.context.object.pose.bones["Root"].rotation_euler[1] = bpy.context.object.pose.bones["Root"].rotation_euler[1] + y_dif
bpy.context.object.pose.bones["Root"].rotation_euler[2] = bpy.context.object.pose.bones["Root"].rotation_euler[2] + z_dif
bpy.context.object.pose.bones["Root"].keyframe_insert(data_path='rotation_euler',frame=f)
return True
def foot_high():
f_start = bpy.context.scene.frame_start
f_end = bpy.context.scene.frame_end
bpy.ops.object.mode_set(mode='EDIT')
z_high_to_add = bpy.context.object.data.edit_bones["Foot_L"].tail.z
bpy.ops.object.mode_set(mode='POSE')
for f in range(f_start,f_end+1):
# print('frame: ',f)
bpy.context.scene.frame_current = f
bpy.context.view_layer.update()
bpy.context.object.pose.bones["Root"].location[1] = bpy.context.object.pose.bones["Root"].location[1] + abs(z_high_to_add)
bpy.context.object.pose.bones["Root"].keyframe_insert(data_path='location',frame=f)
bpy.ops.object.mode_set(mode='OBJECT')
return True
def compensate_rot(x,y,z):
f_start = bpy.context.scene.frame_start
f_end = bpy.context.scene.frame_end
#just to compensate grad
x_grad_compensate = x
y_grad_compensate = y
z_grad_compensate = z
for f in range(f_start,f_end+1):
print('frame: ',f)
bpy.context.scene.frame_current = f
bpy.context.view_layer.update()
print('rot orig x: ',bpy.context.object.pose.bones["Root"].rotation_euler[0])
print('rot x: ',bpy.context.object.pose.bones["Root"].rotation_euler[0] +math.radians(x_grad_compensate))
bpy.context.object.pose.bones["Root"].rotation_euler[0] = bpy.context.object.pose.bones["Root"].rotation_euler[0] +math.radians(x_grad_compensate)
bpy.context.object.pose.bones["Root"].rotation_euler[1] = bpy.context.object.pose.bones["Root"].rotation_euler[1] +math.radians(y_grad_compensate)
bpy.context.object.pose.bones["Root"].rotation_euler[2] = bpy.context.object.pose.bones["Root"].rotation_euler[2] +math.radians(z_grad_compensate)
bpy.context.object.pose.bones["Root"].keyframe_insert(data_path='rotation_euler',frame=f)
return True
def get_rotations():
bpy.context.scene.frame_current = 1
bpy.context.view_layer.update()
actual_rot_x = bpy.context.object.pose.bones["Root"].rotation_euler[0]
actual_rot_y = bpy.context.object.pose.bones["Root"].rotation_euler[1]
actual_rot_z = bpy.context.object.pose.bones["Root"].rotation_euler[2]
return (actual_rot_x, actual_rot_y, actual_rot_z)
# types = {'VIEW_3D', 'TIMELINE', 'GRAPH_EDITOR', 'DOPESHEET_EDITOR', 'NLA_EDITOR', 'IMAGE_EDITOR', 'SEQUENCE_EDITOR', 'CLIP_EDITOR', 'TEXT_EDITOR', 'NODE_EDITOR', 'LOGIC_EDITOR', 'PROPERTIES', 'OUTLINER', 'USER_PREFERENCES', 'INFO', 'FILE_BROWSER', 'CONSOLE'}
def smooth_curves(o):
current_area = bpy.context.area.type
layer = bpy.context.view_layer
# select all (relevant) bones
for b in o.data.bones:
b.select = False
o.data.bones[0].select = True
layer.update()
# change to graph editor
bpy.context.area.type = "GRAPH_EDITOR"
# # lock or unlock the respective fcurves
# for fc in o.animation_data.action.fcurves:
# print(fc.data_path)
# if "location" in fc.data_path:
# fc.lock = False
# else:
# fc.lock = True
layer.update()
# smooth curves of all selected bones
bpy.ops.graph.smooth()
# switch back to original area
bpy.context.area.type = current_area
# deselect all (relevant) bones
for b in o.data.bones:
b.select = False
layer.update()
return True
class skeleton_import(object):
def middle_point(p1,p2,p_middle):
bpy.ops.object.select_all(action='DESELECT')
bpy.data.objects[p1].select_set(True)
bpy.data.objects[p2].select_set(True)
bpy.context.view_layer.objects.active = bpy.data.objects[p2]
obs = bpy.context.selected_objects
n = len(obs)
# print('n: ',n)
assert(n)
#scene.cursor.location = sum([o.matrix_world.translation for o in obs], Vector()) / n
bpy.data.objects[p_middle].location = sum([o.matrix_world.translation for o in obs], Vector()) / n
def create_dots(name, amount):
#remove Collection
if bpy.data.collections.find(name) >= 0:
collection = bpy.data.collections.get(name)
#
for obj in collection.objects:
bpy.data.objects.remove(obj, do_unlink=True)
bpy.data.collections.remove(collection)
#cria os pontos nuima collection chamada Points
#=====================================================
collection = bpy.data.collections.new(name)
bpy.context.scene.collection.children.link(collection)
#
layer_collection = bpy.context.view_layer.layer_collection.children[collection.name]
bpy.context.view_layer.active_layer_collection = layer_collection
#
for point in range(amount):
bpy.ops.mesh.primitive_plane_add(enter_editmode=True, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1))
bpy.ops.mesh.merge(type='CENTER')
bpy.ops.object.editmode_toggle()
bpy.context.active_object.name = name+'.'+str(1000+point)[1:]
#=====================================================
def remove_dots(name):
#apagar collection points criada
collection = bpy.data.collections.get(name)
#
for obj in collection.objects:
bpy.data.objects.remove(obj, do_unlink=True)
bpy.data.collections.remove(collection)
def distance(point1, point2) -> float:
#Calculate distance between two points in 3D.
# return math.sqrt((point2[0] - point1[0]) ** 2 + (point2[1] - point1[1]) ** 2 + (point2[2] - point1[2]) ** 2)
return math.sqrt((point2.location[0] - point1.location[0]) ** 2 + (point2.location[1] - point1.location[1]) ** 2 + (point2.location[2] - point1.location[2]) ** 2)
def size_bone(point_name1, point_name2, bone):
p1 = bpy.data.objects[point_name1]
p2 = bpy.data.objects[point_name2]
#edit bones
if bpy.context.active_object.mode == 'EDIT':
bpy.context.object.data.edit_bones[bone].length= distance(p1,p2)
else:
bpy.ops.object.editmode_toggle()
bpy.context.object.data.edit_bones[bone].length= distance(p1,p2)
bpy.ops.object.editmode_toggle()
def create_bones(bones_list):
#===================================
#creating bones
#====================================
bpy.ops.object.armature_add(enter_editmode=True, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) #cria armature e primeiro bone
#bpy.ops.object.editmode_toggle()
#bpy.data.armatures['Armature'].edit_bones.active = bpy.context.object.data.edit_bones['Bone']
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
bpy.ops.armature.select_all(action='DESELECT')
obs[len(obs)-1].data.edit_bones['Bone'].select_tail=True
bpy.ops.armature.bone_primitive_add()#Spine
#Neck
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1)})
#Face
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1)})
bpy.ops.armature.bone_primitive_add()#Arm_L
#Forearm_L
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1)})
bpy.ops.armature.bone_primitive_add()#Arm_R
#Forearm_R
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1)})
bpy.ops.armature.bone_primitive_add()#Thigh_L
#Leg_L
#Foot_L
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1)})
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1)})
bpy.ops.armature.bone_primitive_add()#Thigh_R
#Leg_R
#Foot_R
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1)})
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1)})
for i in range(len(bones_list)):
obs[len(obs)-1].data.edit_bones[bones_list[i][0]].name = bones_list[i][1]
#Hierarquia
bpy.context.object.data.edit_bones["Spine"].parent = bpy.context.object.data.edit_bones["Root"]
bpy.context.object.data.edit_bones["Arm_L"].parent = bpy.context.object.data.edit_bones["Spine"]
bpy.context.object.data.edit_bones["Arm_R"].parent = bpy.context.object.data.edit_bones["Spine"]
bpy.context.object.data.edit_bones["Thigh_L"].parent = bpy.context.object.data.edit_bones["Root"]
bpy.context.object.data.edit_bones["Thigh_R"].parent = bpy.context.object.data.edit_bones["Root"]
bpy.ops.object.editmode_toggle()
def distance(point1, point2) -> float:
#Calculate distance between two points in 3D.
#return math.sqrt((point2[0] - point1[0]) ** 2 + (point2[1] - point1[1]) ** 2 + (point2[2] - point1[2]) ** 2)
return math.sqrt((point2.location[0] - point1.location[0]) ** 2 + (point2.location[1] - point1.location[1]) ** 2 + (point2.location[2] - point1.location[2]) ** 2)
def size_bone(point_name1, point_name2, bone):
p1 = bpy.data.objects[point_name1]
p2 = bpy.data.objects[point_name2]
#edit bones
if bpy.context.active_object.mode == 'EDIT':
bpy.context.object.data.edit_bones[bone].length= distance(p1,p2)
else:
bpy.ops.object.editmode_toggle()
bpy.context.object.data.edit_bones[bone].length= distance(p1,p2)
bpy.ops.object.editmode_toggle()
def size_ref_bone(p1,p2,p_final):
from mathutils import Vector
import bpy
## size of the reference bone (spine)
bpy.ops.object.select_all(action='DESELECT')
bpy.data.objects[p1].select_set(True)
bpy.data.objects[p2].select_set(True)
# bpy.context.view_layer.objects.active = bpy.data.objects['Point.034']
bpy.context.view_layer.objects.active = bpy.data.objects[p2]
obs = bpy.context.selected_objects
n = len(obs)
# print('n: ',n)
assert(n)
#scene.cursor.location = sum([o.matrix_world.translation for o in obs], Vector()) / n
#bpy.data.objects[p_middle].location = sum([o.matrix_world.translation for o in obs], Vector()) / n
x_subtract = abs(obs[0].matrix_world.translation.x - obs[1].matrix_world.translation.x)
y_subtract = abs(obs[0].matrix_world.translation.y - obs[1].matrix_world.translation.y)
z_subtract = abs(obs[0].matrix_world.translation.z - obs[1].matrix_world.translation.z)
max(x_subtract, y_subtract, z_subtract) #maior das medidas
unit_def = max(x_subtract, y_subtract, z_subtract)/3
#end of size of reference bone Spine
return unit_def
def size_of_bones(unit, root_size, spine_size, neck_size, face_size, thigh_size, leg_size, foot_size, arm_size, forearm_size):
#==========================================
#selecting and making the armature Active
#selecionando armature
#==========================================
bpy.ops.object.select_all(action='DESELECT')
#bpy.ops.armature.select_all(action='DESELECT')
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
armature = obs[len(obs)-1].name
#bpy.data.objects[armature].select_set(True)
obs[len(obs)-1].select_set(True)
view_layer = bpy.context.view_layer
#Armature_obj = bpy.context.scene.objects[armature]
Armature_obj = obs[len(obs)-1]
view_layer.objects.active = Armature_obj
#converting to euler rotation
order = 'XYZ'
context = bpy.context
rig_object = context.active_object
for pb in rig_object.pose.bones:
pb.rotation_mode = order
bpy.ops.object.editmode_toggle()
#changing location
#resetting
bpy.context.object.data.edit_bones["Spine"].head.xy=0
bpy.context.object.data.edit_bones["Neck"].head.xy=0
bpy.context.object.data.edit_bones["Face"].head.xy=0
bpy.context.object.data.edit_bones["Arm_L"].head.xy=0
bpy.context.object.data.edit_bones["Forearm_L"].head.xy=0
bpy.context.object.data.edit_bones["Arm_R"].head.xy=0
bpy.context.object.data.edit_bones["Forearm_R"].head.xy=0
bpy.context.object.data.edit_bones["Thigh_L"].head.xy=0
bpy.context.object.data.edit_bones["Leg_L"].head.xy=0
bpy.context.object.data.edit_bones["Foot_L"].head.xy=0
bpy.context.object.data.edit_bones["Thigh_R"].head.xy=0
bpy.context.object.data.edit_bones["Leg_R"].head.xy=0
bpy.context.object.data.edit_bones["Foot_R"].head.xy=0
#tail
bpy.context.object.data.edit_bones["Face"].tail.xy=0
bpy.context.object.data.edit_bones["Neck"].tail.xy=0
bpy.context.object.data.edit_bones["Forearm_L"].tail.xy=0
bpy.context.object.data.edit_bones["Forearm_R"].tail.xy=0
bpy.context.object.data.edit_bones["Foot_L"].tail.xy=0
bpy.context.object.data.edit_bones["Foot_R"].tail.xy=0
bpy.context.object.data.edit_bones["Root"].length = root_size
bpy.context.object.data.edit_bones["Spine"].head.z = unit/2
bpy.context.object.data.edit_bones["Spine"].tail.z = spine_size
bpy.context.object.data.edit_bones["Neck"].tail.z = spine_size + neck_size
bpy.context.object.data.edit_bones["Neck"].tail.y = neck_size/3
bpy.context.object.data.edit_bones["Face"].tail.z = spine_size + neck_size
bpy.context.object.data.edit_bones["Face"].tail.y = face_size*-1
bpy.context.object.data.edit_bones["Arm_L"].head.z= spine_size
bpy.context.object.data.edit_bones["Arm_L"].head.x= unit*3/4
bpy.context.object.data.edit_bones["Forearm_L"].head.z= spine_size
bpy.context.object.data.edit_bones["Forearm_L"].head.x= unit + arm_size
bpy.context.object.data.edit_bones["Forearm_L"].tail.z= spine_size
bpy.context.object.data.edit_bones["Forearm_L"].tail.x= unit + arm_size + forearm_size
bpy.context.object.data.edit_bones["Arm_R"].head.z= spine_size
bpy.context.object.data.edit_bones["Arm_R"].head.x= (unit*3/4)*-1
bpy.context.object.data.edit_bones["Forearm_R"].head.z= spine_size
bpy.context.object.data.edit_bones["Forearm_R"].head.x= (unit + arm_size) *-1
bpy.context.object.data.edit_bones["Forearm_R"].tail.z= spine_size
bpy.context.object.data.edit_bones["Forearm_R"].tail.x= (unit + arm_size + forearm_size) *-1
bpy.context.object.data.edit_bones["Thigh_L"].head.x= unit*3/4
bpy.context.object.data.edit_bones["Thigh_L"].head.z= (unit/5)*-1
bpy.context.object.data.edit_bones["Leg_L"].head.x= unit*3/4
bpy.context.object.data.edit_bones["Leg_L"].head.z= (unit/5 + thigh_size)*-1
bpy.context.object.data.edit_bones["Foot_L"].head.x= unit*3/4
bpy.context.object.data.edit_bones["Foot_L"].head.z= (unit/5 + thigh_size + leg_size)*-1
bpy.context.object.data.edit_bones["Foot_L"].tail.x= unit*3/4
bpy.context.object.data.edit_bones["Foot_L"].tail.z= (unit/5 + thigh_size + leg_size + foot_size/2)*-1
bpy.context.object.data.edit_bones["Foot_L"].tail.y= foot_size/2*-1
bpy.context.object.data.edit_bones["Thigh_R"].head.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Thigh_R"].head.z= (unit/5)*-1
bpy.context.object.data.edit_bones["Leg_R"].head.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Leg_R"].head.z= (unit/5 + thigh_size)*-1
bpy.context.object.data.edit_bones["Foot_R"].head.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Foot_R"].head.z= (unit/5 + thigh_size + leg_size)*-1
bpy.context.object.data.edit_bones["Foot_R"].tail.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Foot_R"].tail.z= (unit/5 + thigh_size + leg_size + foot_size/2)*-1
bpy.context.object.data.edit_bones["Foot_R"].tail.y= foot_size/2*-1
bpy.ops.object.editmode_toggle()
def add_constraints(constraints, limit_rotation):
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
bpy.ops.object.mode_set(mode='POSE')
for i in range(len(constraints)):
print('processar: ',constraints[i])
if constraints[i][1] == 'COPY_LOCATION' or constraints[i][1] == 'DAMPED_TRACK':
# print('in 1 j: ',j,' - name: ',constraints[i][0],' constraint: ',constraints[i][1])
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[constraints[i][0]].bone
obs[len(obs)-1].pose.bones[constraints[i][0]].bone.select = True
#
bpy.ops.pose.constraint_add(type=constraints[i][1])
qtd_constraint = len(bpy.context.object.pose.bones[constraints[i][0]].constraints)
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].target = bpy.data.objects[constraints[i][2]]
if constraints[i][1] == 'DAMPED_TRACK' and len(constraints[i])==4:
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].track_axis = constraints[i][3]
#
if constraints[i][1] == 'LIMIT_ROTATION' and limit_rotation == True :
qtd_constraint = len(bpy.context.object.pose.bones[constraints[i][0]].constraints)
if constraints[i][2] == 'LOCAL':
bpy.ops.pose.constraint_add(type=constraints[i][1])
qtd_constraint = len(bpy.context.object.pose.bones[constraints[i][0]].constraints)
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].owner_space = constraints[i][2]
if constraints[i][2] == 'X':
if constraints[i][3] == True:
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].use_limit_x = constraints[i][3]
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].min_x = constraints[i][4]
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].max_x = constraints[i][5]
if constraints[i][2] == 'Y':
if constraints[i][3] == True:
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].use_limit_y = constraints[i][3]
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].min_y = constraints[i][4]
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].max_y = constraints[i][5]
if constraints[i][2] == 'Z':
if constraints[i][3] == True:
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].use_limit_z = constraints[i][3]
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].min_z = constraints[i][4]
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].max_z = constraints[i][5]
def add_constraints_track_X(constraints,limit_rotation):
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
bpy.ops.object.mode_set(mode='POSE')
for i in range(len(constraints)):
print('processar: ',constraints[i])
if constraints[i][1] == 'COPY_LOCATION' or constraints[i][1] == 'DAMPED_TRACK':
# print('in 1 j: ',j,' - name: ',constraints[i][0],' constraint: ',constraints[i][1])
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[constraints[i][0]].bone
obs[len(obs)-1].pose.bones[constraints[i][0]].bone.select = True
#
bpy.ops.pose.constraint_add(type=constraints[i][1])
qtd_constraint = len(bpy.context.object.pose.bones[constraints[i][0]].constraints)
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].target = bpy.data.objects[constraints[i][2]]
if constraints[i][1] == 'DAMPED_TRACK' and len(constraints[i])>=4:
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].track_axis = constraints[i][3]
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].influence = constraints[i][4]
#
if constraints[i][1] == 'LIMIT_ROTATION' and limit_rotation == True:
qtd_constraint = len(bpy.context.object.pose.bones[constraints[i][0]].constraints)
if constraints[i][2] == 'LOCAL':
bpy.ops.pose.constraint_add(type=constraints[i][1])
qtd_constraint = len(bpy.context.object.pose.bones[constraints[i][0]].constraints)
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].owner_space = constraints[i][2]
if constraints[i][2] == 'X':
if constraints[i][3] == True:
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].use_limit_x = constraints[i][3]
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].min_x = constraints[i][4]
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].max_x = constraints[i][5]
if constraints[i][2] == 'Y':
if constraints[i][3] == True:
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].use_limit_y = constraints[i][3]
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].min_y = constraints[i][4]
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].max_y = constraints[i][5]
if constraints[i][2] == 'Z':
if constraints[i][3] == True:
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].use_limit_z = constraints[i][3]
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].min_z = constraints[i][4]
bpy.context.object.pose.bones[constraints[i][0]].constraints[qtd_constraint-1].max_z = constraints[i][5]
class OT_TestOpenFilebrowser(Operator, ImportHelper):
bl_idname = "test.open_filebrowser"
bl_label = "Open the file browser (yay)"
def execute(self, context):
filename, extension = os.path.splitext(self.filepath)
print('real path', os.path.dirname(self.filepath))
print('Selected file:', self.filepath)
print('File name:', filename)
print('File extension:', extension)
# print('Some Boolean:', self.some_boolean)
return {'FINISHED'}
class Import_Data_easymocap(Operator, ImportHelper):
bl_idname = "mocap.import_easymocap"
bl_label = "Import data"
bl_description = "Import EasyMOCAP"
filename_ext = ".json"
filter_glob: StringProperty(
default="*.json",
options={'HIDDEN'},
maxlen=255, # Max internal buffer length, longer would be clamped.
)
def execute(self,context):
#========================
#EASYMOCAP
#=====================
import os
import json
import bpy
from bpy import context
import math
# bpy.ops.test.open_filebrowser('INVOKE_DEFAULT')#abrir janela de navegador
object = []
for ob in bpy.context.scene.objects:
object.append(ob)
if len(object) >0 :
if bpy.context.mode != 'OBJECT':
bpy.ops.object.editmode_toggle()
#path = r'D:\MOCAP\EasyMocap-master\Teste_20210321_1_out\keypoints3d'
path = os.path.dirname(self.filepath)
list_dir = os.listdir(path)
s_list = sorted(list_dir)
data = []
for i in s_list:
with open(path+ os.sep +i,'r') as f:
data.append(json.load(f))
#json.load(f)
len(data)
#-----------------
x=0
y=1
z=2
#armature = 'Armature'
#=====================
#trecho usado para rotacionar ao redor do cursor
def get_override(area_type, region_type):
for area in bpy.context.screen.areas:
if area.type == area_type:
for region in area.regions:
if region.type == region_type:
override = {'area': area, 'region': region}
return override
#error message if the area or region wasn't found
raise RuntimeError("Wasn't able to find", region_type," in area ", area_type,
"\n Make sure it's open while executing script.")
#===================================
#creating bones
#====================================
# obs = []
# for ob in bpy.context.scene.objects:
# # if ob.type == 'ARMATURE':
# obs.append(ob)
# if len(obs)>0:
# if obs[len(obs)-1].mode != 'OBJECT':
# bpy.ops.object.editmode_toggle() #try to change to object mode
# if obs[len(obs)-1].mode != 'OBJECT':
# bpy.ops.object.editmode_toggle() #try again to change to object mode
bpy.ops.object.armature_add(enter_editmode=True, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) #cria armature e primeiro bone
#bpy.ops.object.editmode_toggle()
#bpy.data.armatures['Armature'].edit_bones.active = bpy.context.object.data.edit_bones['Bone']
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
bpy.ops.armature.select_all(action='DESELECT')
#bpy.context.object.data.edit_bones['Bone'].select_tail=True
obs[len(obs)-1].data.edit_bones['Bone'].select_tail=True
#bpy.ops.armature.extrude_move()#Neck
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.1, 0.1, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
#bpy.ops.armature.extrude_move()#Head_L
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.1, 0.1, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.select_all(action='DESELECT')
bpy.context.object.data.edit_bones['Bone.001'].select_tail=True
#bpy.ops.armature.extrude_move()#Head_R
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.1, 0.1, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Forearm_L
#bpy.ops.armature.extrude_move()#Arm_L
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.1, 0.1, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Forearm_R
#bpy.ops.armature.extrude_move()#Arm_R
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.1, 0.1, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Thigh_L
#bpy.ops.armature.extrude_move()#Leg_L
#bpy.ops.armature.extrude_move()#Foot_L
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.1, 0.1, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.1, 0.1, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Thigh_R
#bpy.ops.armature.extrude_move()#Leg_R
#bpy.ops.armature.extrude_move()#Foot_R
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.1, 0.1, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.1, 0.1, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
#bpy.ops.object.editmode_toggle()
#bpy.data.objects['Armature'].data.edit_bones['Arm_L'].name = 'Teste'
#bpy.context.object.data.edit_bones["Bone"].name = 'Spline'
#bpy.context.object.data.edit_bones["Bone.001"].name = 'Neck'
#bpy.context.object.data.edit_bones["Bone.002"].name = 'Head_L'
#bpy.context.object.data.edit_bones["Bone.003"].name = 'Head_R'
#bpy.context.object.data.edit_bones["Bone.004"].name = 'Forearm_L'
#bpy.context.object.data.edit_bones["Bone.005"].name = 'Arm_L'
#bpy.context.object.data.edit_bones["Bone.006"].name = 'Forearm_R'
#bpy.context.object.data.edit_bones["Bone.007"].name = 'Arm_R'
#bpy.context.object.data.edit_bones["Bone.008"].name = 'Thigh_L'
#bpy.context.object.data.edit_bones["Bone.009"].name = 'Leg_L'
#bpy.context.object.data.edit_bones["Bone.010"].name = 'Foot_L'
#bpy.context.object.data.edit_bones["Bone.011"].name = 'Thigh_R'
#bpy.context.object.data.edit_bones["Bone.012"].name = 'Leg_R'
#bpy.context.object.data.edit_bones["Bone.013"].name = 'Foot_R'
obs[len(obs)-1].data.edit_bones["Bone"].name = 'Spline'
obs[len(obs)-1].data.edit_bones["Bone.001"].name = 'Neck'
obs[len(obs)-1].data.edit_bones["Bone.002"].name = 'Head_L'
obs[len(obs)-1].data.edit_bones["Bone.003"].name = 'Head_R'
obs[len(obs)-1].data.edit_bones["Bone.004"].name = 'Forearm_L'
obs[len(obs)-1].data.edit_bones["Bone.005"].name = 'Arm_L'
obs[len(obs)-1].data.edit_bones["Bone.006"].name = 'Forearm_R'
obs[len(obs)-1].data.edit_bones["Bone.007"].name = 'Arm_R'
obs[len(obs)-1].data.edit_bones["Bone.008"].name = 'Thigh_L'
obs[len(obs)-1].data.edit_bones["Bone.009"].name = 'Leg_L'
obs[len(obs)-1].data.edit_bones["Bone.010"].name = 'Foot_L'
obs[len(obs)-1].data.edit_bones["Bone.011"].name = 'Thigh_R'
obs[len(obs)-1].data.edit_bones["Bone.012"].name = 'Leg_R'
obs[len(obs)-1].data.edit_bones["Bone.013"].name = 'Foot_R'
bpy.ops.object.editmode_toggle()
#remove Collection
if bpy.data.collections.find("Points") >= 0:
collection = bpy.data.collections.get('Points')
#
for obj in collection.objects:
bpy.data.objects.remove(obj, do_unlink=True)
bpy.data.collections.remove(collection)
#cria os pontos nuima collection chamada Points
#=====================================================
collection = bpy.data.collections.new("Points")
bpy.context.scene.collection.children.link(collection)
layer_collection = bpy.context.view_layer.layer_collection.children[collection.name]
bpy.context.view_layer.active_layer_collection = layer_collection
for point in range(25):
bpy.ops.mesh.primitive_plane_add(enter_editmode=True, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1))
bpy.ops.mesh.merge(type='CENTER')
bpy.ops.object.editmode_toggle()
context.active_object.name = 'Point.'+str(1000+point)[1:]
#=====================================================
#colocar cursor no tempo
#bpy.context.scene.cursor.location = (0.0, 0.0, 0.0)
#bpy.context.scene.tool_settings.transform_pivot_point = 'CURSOR'
## Deselect all objects
#bpy.ops.object.select_all(action='DESELECT')
#for o in bpy.data.objects:
# # Check for given object names
# if o.name in ("Point.000","Point.001","Point.002","Point.003","Point.004","Point.005","Point.006","Point.007","Point.008","Point.009" ,"Point.010" ,"Point.011","Point.012","Point.013","Point.014","Point.015","Point.016","Point.017","Point.018","Point.019","Point.020" ,"Point.021","Point.022","Point.023","Point.024"):
# o.select_set(True)
for item in range(len(data)):
print("frame: ",item)
for limb in range(len(data[item][0]['keypoints3d'])):
# print("limb: ",limb)
bpy.data.objects["Point."+str(1000+limb)[1:]].location[x]=data[item][0]['keypoints3d'][limb][x]
bpy.data.objects["Point."+str(1000+limb)[1:]].location[y]=data[item][0]['keypoints3d'][limb][y]
bpy.data.objects["Point."+str(1000+limb)[1:]].location[z]=data[item][0]['keypoints3d'][limb][z]
#
# #we need to override the context of our operator
# override = get_override( 'VIEW_3D', 'WINDOW' )
# #rotate about the X-axis by 45 degrees
# bpy.ops.transform.rotate(override, value=6.283/2, orient_axis="Y")
#
#Salva Frame
bpy.data.objects["Point."+str(1000+limb)[1:]].keyframe_insert(data_path="location", frame=item)
#==========================================================================================================
def distance(point1, point2) -> float:
#Calculate distance between two points in 3D.
# return math.sqrt((point2[0] - point1[0]) ** 2 + (point2[1] - point1[1]) ** 2 + (point2[2] - point1[2]) ** 2)
return math.sqrt((point2.location[0] - point1.location[0]) ** 2 + (point2.location[1] - point1.location[1]) ** 2 + (point2.location[2] - point1.location[2]) ** 2)
def size_bone(point_name1, point_name2, bone):
p1 = bpy.data.objects[point_name1]
p2 = bpy.data.objects[point_name2]
#edit bones
if bpy.context.active_object.mode == 'EDIT':
bpy.context.object.data.edit_bones[bone].length= distance(p1,p2)
else:
bpy.ops.object.editmode_toggle()
bpy.context.object.data.edit_bones[bone].length= distance(p1,p2)
bpy.ops.object.editmode_toggle()
#selecting and making the armature Active
#selecionando armature
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
armature = obs[len(obs)-1].name
#bpy.data.objects[armature].select_set(True)
obs[len(obs)-1].select_set(True)
view_layer = bpy.context.view_layer
#Armature_obj = bpy.context.scene.objects[armature]
Armature_obj = obs[len(obs)-1]
view_layer.objects.active = Armature_obj
size_bone("Point.008", "Point.001", "Spline")
size_bone("Point.001", "Point.000", "Neck")
size_bone("Point.000", "Point.016", "Head_L")
size_bone("Point.000", "Point.015", "Head_R")
size_bone("Point.005", "Point.006", "Forearm_L")
size_bone("Point.006", "Point.007", "Arm_L")
size_bone("Point.002", "Point.003", "Forearm_R")
size_bone("Point.003", "Point.004", "Arm_R")
size_bone("Point.012", "Point.013", "Thigh_L")
size_bone("Point.013", "Point.014", "Leg_L")
size_bone("Point.014", "Point.019", "Foot_L")
size_bone("Point.009", "Point.010", "Thigh_R")
size_bone("Point.010", "Point.011", "Leg_R")
size_bone("Point.011", "Point.022", "Foot_R")
#comecando configuração seguir movimentos pontos
#colocando em pose mode
bpy.ops.object.mode_set(mode='POSE')
#bpy.data.objects[armature].pose.bones["Spine"]
#bpy.data.objects[armature].pose.bones["Spine"].bone
actual_bone = 'Spline'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='COPY_LOCATION')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.008"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[1].target = bpy.data.objects["Point.001"]
#=====
actual_bone = 'Neck'
bpy.context.object.data.bones.active = bpy.data.objects[armature].pose.bones[actual_bone].bone
bpy.context.object.pose.bones[actual_bone].bone.select = True
#bpy.ops.pose.constraint_add(type='COPY_LOCATION')
#bpy.context.object.pose.bones[actual_bone].constraints["Copy Location"].target = bpy.data.objects["Point.001"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.000"]
#=====
actual_bone = 'Head_L'
bpy.context.object.data.bones.active = bpy.data.objects[armature].pose.bones[actual_bone].bone
bpy.context.object.pose.bones[actual_bone].bone.select = True
#bpy.ops.pose.constraint_add(type='COPY_LOCATION')
#bpy.context.object.pose.bones[actual_bone].constraints["Copy Location"].target = bpy.data.objects["Point.000"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.016"]
#=====
actual_bone = 'Head_R'
bpy.context.object.data.bones.active = bpy.data.objects[armature].pose.bones[actual_bone].bone
bpy.context.object.pose.bones[actual_bone].bone.select = True
#bpy.ops.pose.constraint_add(type='COPY_LOCATION')
#bpy.context.object.pose.bones[actual_bone].constraints["Copy Location"].target = bpy.data.objects["Point.000"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.015"]
#=====
actual_bone = 'Forearm_L'
bpy.context.object.data.bones.active = bpy.data.objects[armature].pose.bones[actual_bone].bone
bpy.context.object.pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='COPY_LOCATION')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.005"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[1].target = bpy.data.objects["Point.006"]
#=====
actual_bone = 'Arm_L'
bpy.context.object.data.bones.active = bpy.data.objects[armature].pose.bones[actual_bone].bone
bpy.context.object.pose.bones[actual_bone].bone.select = True
#bpy.ops.pose.constraint_add(type='COPY_LOCATION')
#bpy.context.object.pose.bones[actual_bone].constraints["Copy Location"].target = bpy.data.objects["Point.006"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.007"]
#=====
actual_bone = 'Forearm_R'
bpy.context.object.data.bones.active = bpy.data.objects[armature].pose.bones[actual_bone].bone
bpy.context.object.pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='COPY_LOCATION')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.002"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[1].target = bpy.data.objects["Point.003"]
#=====
actual_bone = 'Arm_R'
bpy.context.object.data.bones.active = bpy.data.objects[armature].pose.bones[actual_bone].bone
bpy.context.object.pose.bones[actual_bone].bone.select = True
#bpy.ops.pose.constraint_add(type='COPY_LOCATION')
#bpy.context.object.pose.bones[actual_bone].constraints["Copy Location"].target = bpy.data.objects["Point.003"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.004"]
#=====
actual_bone = 'Thigh_L'
bpy.context.object.data.bones.active = bpy.data.objects[armature].pose.bones[actual_bone].bone
bpy.context.object.pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='COPY_LOCATION')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.012"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[1].target = bpy.data.objects["Point.013"]
#=====
actual_bone = 'Leg_L'
bpy.context.object.data.bones.active = bpy.data.objects[armature].pose.bones[actual_bone].bone
bpy.context.object.pose.bones[actual_bone].bone.select = True
#bpy.ops.pose.constraint_add(type='COPY_LOCATION')
#bpy.context.object.pose.bones[actual_bone].constraints["Copy Location"].target = bpy.data.objects["Point.013"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.014"]
#=====
actual_bone = 'Foot_L'
bpy.context.object.data.bones.active = bpy.data.objects[armature].pose.bones[actual_bone].bone
bpy.context.object.pose.bones[actual_bone].bone.select = True
#bpy.ops.pose.constraint_add(type='COPY_LOCATION')
#bpy.context.object.pose.bones[actual_bone].constraints["Copy Location"].target = bpy.data.objects["Point.014"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.019"]
#=====
actual_bone = 'Thigh_R'
bpy.context.object.data.bones.active = bpy.data.objects[armature].pose.bones[actual_bone].bone
bpy.context.object.pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='COPY_LOCATION')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.009"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[1].target = bpy.data.objects["Point.010"]
#=====
actual_bone = 'Leg_R'
bpy.context.object.data.bones.active = bpy.data.objects[armature].pose.bones[actual_bone].bone
bpy.context.object.pose.bones[actual_bone].bone.select = True
#bpy.ops.pose.constraint_add(type='COPY_LOCATION')
#bpy.context.object.pose.bones[actual_bone].constraints["Copy Location"].target = bpy.data.objects["Point.010"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.011"]
#=====
actual_bone = 'Foot_R'
bpy.context.object.data.bones.active = bpy.data.objects[armature].pose.bones[actual_bone].bone
bpy.context.object.pose.bones[actual_bone].bone.select = True
#bpy.ops.pose.constraint_add(type='COPY_LOCATION')
#bpy.context.object.pose.bones[actual_bone].constraints["Copy Location"].target = bpy.data.objects["Point.011"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.022"]
#=====
#bpy.data.objects['Armature'].pose.bones.items()
#[('Bone', bpy.data.objects['Armature'].pose.bones["Bone"]), ('Thigh_L', bpy.data.objects['Armature'].pose.bones["Thigh_L"]), ('Leg_L', bpy.data.objects['Armature'].pose.bones["Leg_L"]), ('Foot_L', bpy.data.objects['Armature'].pose.bones["Foot_L"]), ('Spine', bpy.data.objects['Armature'].pose.bones["Spine"]), ('Neck', bpy.data.objects['Armature'].pose.bones["Neck"]), ('Head_L', bpy.data.objects['Armature'].pose.bones["Head_L"]), ('Head_R', bpy.data.objects['Armature'].pose.bones["Head_R"]), ('Forearm_L', bpy.data.objects['Armature'].pose.bones["Forearm_L"]), ('Arm_L', bpy.data.objects['Armature'].pose.bones["Arm_L"]), ('Thigh_R', bpy.data.objects['Armature'].pose.bones["Thigh_R"]), ('Leg_R', bpy.data.objects['Armature'].pose.bones["Leg_R"]), ('Foot_R', bpy.data.objects['Armature'].pose.bones["Foot_R"]), ('Forearm_R', bpy.data.objects['Armature'].pose.bones["Forearm_R"]), ('Arm_R', bpy.data.objects['Armature'].pose.bones["Arm_R"])]
print(len(data))
bpy.context.scene.frame_end = len(data)
bpy.ops.nla.bake(frame_start=1, frame_end=len(data), visual_keying=True, clear_constraints=True, clear_parents=True, bake_types={'POSE'})
bpy.ops.object.mode_set(mode='OBJECT')
#apagar collection points criada
collection = bpy.data.collections.get('Points')
#
for obj in collection.objects:
bpy.data.objects.remove(obj, do_unlink=True)
bpy.data.collections.remove(collection)
return{'FINISHED'}
class Import_Data_frankmocap(Operator, ImportHelper):
bl_idname = "mocap.import_frankmocap"
bl_label = "Import data from Frankmocap"
bl_description = "Import FrankMocap"
filename_ext = ".pkl"
filter_glob: StringProperty(
default="*.pkl",
options={'HIDDEN'},
maxlen=255, # Max internal buffer length, longer would be clamped.
)
def execute(self,context):
#"""
#Frnakmocap
#==========================
import math
import bpy
import os
import pickle
import numpy as np
from bpy import context
import joblib
multiplier = context.scene.sk_value_prop.sk_value
raw_bool = context.scene.sk_value_prop.sk_raw_bool
def middle_point(p1,p2,p_middle):
bpy.ops.object.select_all(action='DESELECT')
bpy.data.objects[p1].select_set(True)
bpy.data.objects[p2].select_set(True)
bpy.context.view_layer.objects.active = bpy.data.objects[p2]
obs = bpy.context.selected_objects
n = len(obs)
# print('n: ',n)
assert(n)
#scene.cursor.location = sum([o.matrix_world.translation for o in obs], Vector()) / n
bpy.data.objects[p_middle].location = sum([o.matrix_world.translation for o in obs], Vector()) / n
def create_dots(name, amount):
#remove Collection
if bpy.data.collections.find(name) >= 0:
collection = bpy.data.collections.get(name)
#
for obj in collection.objects:
bpy.data.objects.remove(obj, do_unlink=True)
bpy.data.collections.remove(collection)
#cria os pontos nuima collection chamada Points
#=====================================================
collection = bpy.data.collections.new(name)
bpy.context.scene.collection.children.link(collection)
#
layer_collection = bpy.context.view_layer.layer_collection.children[collection.name]
bpy.context.view_layer.active_layer_collection = layer_collection
#
for point in range(amount):
bpy.ops.mesh.primitive_plane_add(enter_editmode=True, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1))
bpy.ops.mesh.merge(type='CENTER')
bpy.ops.object.editmode_toggle()
bpy.context.active_object.name = name+'.'+str(1000+point)[1:]
#=====================================================
#==============================
#codes to size the bones
#==============================
def distance(point1, point2) -> float:
#Calculate distance between two points in 3D.
# return math.sqrt((point2[0] - point1[0]) ** 2 + (point2[1] - point1[1]) ** 2 + (point2[2] - point1[2]) ** 2)
return math.sqrt((point2.location[0] - point1.location[0]) ** 2 + (point2.location[1] - point1.location[1]) ** 2 + (point2.location[2] - point1.location[2]) ** 2)
def size_bone(point_name1, point_name2, bone):
p1 = bpy.data.objects[point_name1]
p2 = bpy.data.objects[point_name2]
#edit bones
if bpy.context.active_object.mode == 'EDIT':
bpy.context.object.data.edit_bones[bone].length= distance(p1,p2)
else:
bpy.ops.object.editmode_toggle()
bpy.context.object.data.edit_bones[bone].length= distance(p1,p2)
bpy.ops.object.editmode_toggle()
create_dots('Point',49)
# pkl_path=r'C:\MOCAP\frankmocap\mocap_output\mocap\temp'
pkl_path = os.path.dirname(self.filepath)
list_dir = os.listdir(pkl_path)
s_list = sorted(list_dir)
len(s_list)
x=0
y=1
z=2
multi=100
#armature = 'Armature'
#exemplo
file = open(pkl_path+ os.sep +s_list[0],'rb')
pic = pickle.load(file)
file.close()
nppic = np.load(pkl_path+ os.sep +s_list[0], allow_pickle=True)
for item in range(len(s_list)-1):
nppic = np.load(pkl_path+ os.sep +s_list[item], allow_pickle=True)
# nppic['pred_output_list'][0]['pred_body_joints_img'] #todos os limbs
print("frame: ",item)
for limb in range(len(nppic['pred_output_list'][0]['pred_body_joints_img'])):
# print("limb: ",limb)
bpy.data.objects["Point."+str(1000+limb)[1:]].location[z]=nppic['pred_output_list'][0]['pred_body_joints_img'][limb][x]/multi
bpy.data.objects["Point."+str(1000+limb)[1:]].location[y]=nppic['pred_output_list'][0]['pred_body_joints_img'][limb][y]/multi
bpy.data.objects["Point."+str(1000+limb)[1:]].location[x]=nppic['pred_output_list'][0]['pred_body_joints_img'][limb][z]/multi
bpy.data.objects["Point."+str(1000+limb)[1:]].keyframe_insert(data_path="location", frame=item)
len(nppic['pred_output_list'][0]['pred_body_joints_img'])
import bpy
#===========
# selectign Scene Collection
scene_collection = bpy.context.view_layer.layer_collection
bpy.context.view_layer.active_layer_collection = scene_collection
#===================================
#creating bones
#====================================
bpy.ops.object.armature_add(enter_editmode=True, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) #cria armature e primeiro bone
#bpy.ops.object.editmode_toggle()
#bpy.data.armatures['Armature'].edit_bones.active = bpy.context.object.data.edit_bones['Bone']
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
bpy.ops.armature.select_all(action='DESELECT')
#bpy.context.object.data.edit_bones['Bone'].select_tail=True
obs[len(obs)-1].data.edit_bones['Bone'].select_tail=True
bpy.ops.armature.bone_primitive_add()#Spine
#bpy.ops.armature.extrude_move()#Neck
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
##bpy.ops.armature.extrude_move()#Face
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Arm_L
#bpy.ops.armature.extrude_move()#Forearm_L
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Arm_R
#bpy.ops.armature.extrude_move()#Forearm_R
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Thigh_L
#bpy.ops.armature.extrude_move()#Leg_L
#bpy.ops.armature.extrude_move()#Foot_L
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Thigh_R
#bpy.ops.armature.extrude_move()#Leg_R
#bpy.ops.armature.extrude_move()#Foot_R
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
obs[len(obs)-1].data.edit_bones["Bone"].name = 'Root'
obs[len(obs)-1].data.edit_bones["Bone.001"].name = 'Spine'
obs[len(obs)-1].data.edit_bones["Bone.002"].name = 'Neck'
obs[len(obs)-1].data.edit_bones["Bone.003"].name = 'Face'
obs[len(obs)-1].data.edit_bones["Bone.004"].name = 'Arm_L'
obs[len(obs)-1].data.edit_bones["Bone.005"].name = 'Forearm_L'
obs[len(obs)-1].data.edit_bones["Bone.006"].name = 'Arm_R'
obs[len(obs)-1].data.edit_bones["Bone.007"].name = 'Forearm_R'
obs[len(obs)-1].data.edit_bones["Bone.008"].name = 'Thigh_L'
obs[len(obs)-1].data.edit_bones["Bone.009"].name = 'Leg_L'
obs[len(obs)-1].data.edit_bones["Bone.010"].name = 'Foot_L'
obs[len(obs)-1].data.edit_bones["Bone.011"].name = 'Thigh_R'
obs[len(obs)-1].data.edit_bones["Bone.012"].name = 'Leg_R'
obs[len(obs)-1].data.edit_bones["Bone.013"].name = 'Foot_R'
#Hierarquia
bpy.context.object.data.edit_bones["Spine"].parent = bpy.context.object.data.edit_bones["Root"]
bpy.context.object.data.edit_bones["Arm_L"].parent = bpy.context.object.data.edit_bones["Spine"]
bpy.context.object.data.edit_bones["Arm_R"].parent = bpy.context.object.data.edit_bones["Spine"]
bpy.context.object.data.edit_bones["Thigh_L"].parent = bpy.context.object.data.edit_bones["Root"]
bpy.context.object.data.edit_bones["Thigh_R"].parent = bpy.context.object.data.edit_bones["Root"]
bpy.ops.object.editmode_toggle()
from mathutils import Vector
import bpy
bpy.ops.object.select_all(action='DESELECT')
bpy.data.objects['Point.001'].select_set(True)
bpy.data.objects['Point.008'].select_set(True)
bpy.context.view_layer.objects.active = bpy.data.objects['Point.034']
obs = bpy.context.selected_objects
n = len(obs)
# print('n: ',n)
assert(n)
#scene.cursor.location = sum([o.matrix_world.translation for o in obs], Vector()) / n
#bpy.data.objects[p_middle].location = sum([o.matrix_world.translation for o in obs], Vector()) / n
x_subtract = abs(obs[0].matrix_world.translation.x - obs[1].matrix_world.translation.x)
y_subtract = abs(obs[0].matrix_world.translation.y - obs[1].matrix_world.translation.y)
z_subtract = abs(obs[0].matrix_world.translation.z - obs[1].matrix_world.translation.z)
max(x_subtract, y_subtract, z_subtract) #maior das medidas
unit = max(x_subtract, y_subtract, z_subtract)/3
unit = unit*multiplier
root_sz =unit/10
spine_sz =unit*3.5
neck_sz =unit
face_sz =unit
thigh_sz =unit*3
leg_sz =unit*2.5
foot_sz =unit #inclinado 45 graud pra frente
arm_sz =unit*1.5
forearm_sz =unit*1.5
#if bpy.context.active_object.mode != 'EDIT':
# bpy.ops.object.editmode_toggle()
#==========================================
#selecting and making the armature Active
#selecionando armature
#==========================================
bpy.ops.object.select_all(action='DESELECT')
#bpy.ops.armature.select_all(action='DESELECT')
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
armature = obs[len(obs)-1].name
#bpy.data.objects[armature].select_set(True)
obs[len(obs)-1].select_set(True)
view_layer = bpy.context.view_layer
#Armature_obj = bpy.context.scene.objects[armature]
Armature_obj = obs[len(obs)-1]
view_layer.objects.active = Armature_obj
#converting to euler rotation
order = 'XYZ'
context = bpy.context
rig_object = context.active_object
for pb in rig_object.pose.bones:
pb.rotation_mode = order
bpy.ops.object.editmode_toggle()
#changing location
#resetting
bpy.context.object.data.edit_bones["Spine"].head.xy=0
bpy.context.object.data.edit_bones["Neck"].head.xy=0
bpy.context.object.data.edit_bones["Face"].head.xy=0
bpy.context.object.data.edit_bones["Arm_L"].head.xy=0
bpy.context.object.data.edit_bones["Forearm_L"].head.xy=0
bpy.context.object.data.edit_bones["Arm_R"].head.xy=0
bpy.context.object.data.edit_bones["Forearm_R"].head.xy=0
bpy.context.object.data.edit_bones["Thigh_L"].head.xy=0
bpy.context.object.data.edit_bones["Leg_L"].head.xy=0
bpy.context.object.data.edit_bones["Foot_L"].head.xy=0
bpy.context.object.data.edit_bones["Thigh_R"].head.xy=0
bpy.context.object.data.edit_bones["Leg_R"].head.xy=0
bpy.context.object.data.edit_bones["Foot_R"].head.xy=0
#tail
bpy.context.object.data.edit_bones["Face"].tail.xy=0
bpy.context.object.data.edit_bones["Neck"].tail.xy=0
bpy.context.object.data.edit_bones["Forearm_L"].tail.xy=0
bpy.context.object.data.edit_bones["Forearm_R"].tail.xy=0
bpy.context.object.data.edit_bones["Foot_L"].tail.xy=0
bpy.context.object.data.edit_bones["Foot_R"].tail.xy=0
bpy.context.object.data.edit_bones["Root"].length = root_sz
bpy.context.object.data.edit_bones["Spine"].head.z = unit/2
bpy.context.object.data.edit_bones["Spine"].tail.z = spine_sz
bpy.context.object.data.edit_bones["Neck"].tail.z = spine_sz + neck_sz
bpy.context.object.data.edit_bones["Neck"].tail.y = neck_sz/3
bpy.context.object.data.edit_bones["Face"].tail.z = spine_sz + neck_sz
bpy.context.object.data.edit_bones["Face"].tail.y = face_sz*-1
bpy.context.object.data.edit_bones["Arm_L"].head.z= spine_sz
bpy.context.object.data.edit_bones["Arm_L"].head.x= unit*3/4
bpy.context.object.data.edit_bones["Forearm_L"].head.z= spine_sz
bpy.context.object.data.edit_bones["Forearm_L"].head.x= unit + arm_sz
bpy.context.object.data.edit_bones["Forearm_L"].tail.z= spine_sz
bpy.context.object.data.edit_bones["Forearm_L"].tail.x= unit + arm_sz + forearm_sz
bpy.context.object.data.edit_bones["Arm_R"].head.z= spine_sz
bpy.context.object.data.edit_bones["Arm_R"].head.x= (unit*3/4)*-1
bpy.context.object.data.edit_bones["Forearm_R"].head.z= spine_sz
bpy.context.object.data.edit_bones["Forearm_R"].head.x= (unit + arm_sz) *-1
bpy.context.object.data.edit_bones["Forearm_R"].tail.z= spine_sz
bpy.context.object.data.edit_bones["Forearm_R"].tail.x= (unit + arm_sz + forearm_sz) *-1
bpy.context.object.data.edit_bones["Thigh_L"].head.x= unit*3/4
bpy.context.object.data.edit_bones["Thigh_L"].head.z= (unit/5)*-1
bpy.context.object.data.edit_bones["Leg_L"].head.x= unit*3/4
bpy.context.object.data.edit_bones["Leg_L"].head.z= (unit/5 + thigh_sz)*-1
bpy.context.object.data.edit_bones["Foot_L"].head.x= unit*3/4
bpy.context.object.data.edit_bones["Foot_L"].head.z= (unit/5 + thigh_sz + leg_sz)*-1
bpy.context.object.data.edit_bones["Foot_L"].tail.x= unit*3/4
bpy.context.object.data.edit_bones["Foot_L"].tail.z= (unit/5 + thigh_sz + leg_sz + foot_sz/2)*-1
bpy.context.object.data.edit_bones["Foot_L"].tail.y= foot_sz/2*-1
bpy.context.object.data.edit_bones["Thigh_R"].head.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Thigh_R"].head.z= (unit/5)*-1
bpy.context.object.data.edit_bones["Leg_R"].head.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Leg_R"].head.z= (unit/5 + thigh_sz)*-1
bpy.context.object.data.edit_bones["Foot_R"].head.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Foot_R"].head.z= (unit/5 + thigh_sz + leg_sz)*-1
bpy.context.object.data.edit_bones["Foot_R"].tail.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Foot_R"].tail.z= (unit/5 + thigh_sz + leg_sz + foot_sz/2)*-1
bpy.context.object.data.edit_bones["Foot_R"].tail.y= foot_sz/2*-1
bpy.ops.object.editmode_toggle()
import bpy
#comecando configuração seguir movimentos pontos
#colocando em pose mode
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
bpy.ops.object.mode_set(mode='POSE')
actual_bone = 'Root'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='COPY_LOCATION')
#bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.008"]
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.039"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
#bpy.context.object.pose.bones[actual_bone].constraints[1].target = bpy.data.objects["Point.001"]
bpy.context.object.pose.bones[actual_bone].constraints[1].target = bpy.data.objects["Point.037"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
#bpy.context.object.pose.bones[actual_bone].constraints[1].target = bpy.data.objects["Point.001"]
bpy.context.object.pose.bones[actual_bone].constraints[2].target = bpy.data.objects["Point.027"]
bpy.context.object.pose.bones[actual_bone].constraints[2].track_axis = 'TRACK_X'
#====
actual_bone = 'Spine'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
#bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.001"]
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.037"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -0.349066
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 0.349066
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = -0.698132
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0.698132
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -0.174533
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0.174533
#=====
actual_bone = 'Neck'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
#bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.000"]
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.042"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -0.174533
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 1.0472
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = -0.523599
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0.523599
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -0.349066
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0.349066
#=====
actual_bone = 'Face'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.044"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -0.174533
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 0.872665
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -0.523599
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0.523599
#=====
actual_bone = 'Arm_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
#bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.006"]
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.032"]
#=====
actual_bone = 'Forearm_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
#bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.007"]
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.031"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 0
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -2.53073
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = -0.191986
#=====
actual_bone = 'Arm_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
#bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.003"]
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.035"]
#=====
actual_bone = 'Forearm_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
#bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.004"]
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.036"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = False
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = False
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0.191986
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 2.53073
#=====
actual_bone = 'Thigh_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
#bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.013"]
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.026"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -1.76278
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 1.3439
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -0.785398
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0.174533
#=====
actual_bone = 'Leg_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
#bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.014"]
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.025"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = 0.0698132
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 2.0944
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0
#=====
actual_bone = 'Foot_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
#bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.019"]
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.022"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -0.523599
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 0.523599
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0
#=====
actual_bone = 'Thigh_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
#bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.010"]
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.029"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -1.76278
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 1.3439
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -0.174533
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0.785398
actual_bone = 'Leg_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
#bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.011"]
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.030"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = 0.0698132
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 2.0944
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0
actual_bone = 'Foot_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
#bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.022"]
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.019"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -0.523599
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 0.523599
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0
print(len(s_list))
bpy.context.scene.frame_end = len(s_list)
bpy.ops.nla.bake(frame_start=1, frame_end=len(s_list), visual_keying=True, clear_constraints=True, clear_parents=True, bake_types={'POSE'})
bpy.ops.object.mode_set(mode='OBJECT')
#apagar collection points criada
collection = bpy.data.collections.get('Point')
#
for obj in collection.objects:
bpy.data.objects.remove(obj, do_unlink=True)
bpy.data.collections.remove(collection)
sk_value_prop = context.scene.sk_value_prop
if raw_bool == True:
print('raw_bool True - ',raw_bool)
x_original, y_original, z_original = helper_functions.get_rotations()
sk_value_prop.sk_root_rot_x = math.degrees(x_original)
sk_value_prop.sk_root_rot_y = math.degrees(y_original)
sk_value_prop.sk_root_rot_z = math.degrees(z_original)
#in this case both original and actual is the same, because there was no alteration on the angle
x_actual_deg = math.degrees(x_original)
y_actual_deg = math.degrees(y_original)
z_actual_deg = math.degrees(z_original)
sk_value_prop.sk_root_actual_rot_x = x_actual_deg
sk_value_prop.sk_root_actual_rot_y = y_actual_deg
sk_value_prop.sk_root_actual_rot_z = z_actual_deg
else:
print('raw_bool False - ',raw_bool)
x_deg, y_deg, z_deg = helper_functions.anim_to_origin()
#take the information of the rotation to the panel
print('result x: ',x_deg)
print('result y: ',y_deg)
print('result z: ',z_deg)
sk_value_prop.sk_root_rot_x = x_deg
sk_value_prop.sk_root_rot_y = y_deg
sk_value_prop.sk_root_rot_z = z_deg
#"""
return{'FINISHED'}
#"""
class Import_Data_vibe(Operator, ImportHelper):
bl_idname = "mocap.import_vibe"
bl_label = "Import data from Vibe (needs joblib install)"
bl_description = "Import Vibe"
filename_ext = ".pkl"
filter_glob: StringProperty(
default="*.pkl",
options={'HIDDEN'},
maxlen=255, # Max internal buffer length, longer would be clamped.
)
def execute(self,context):
#==========================
#======VIBE
#instalar joblib
#D:\Blender\blender-2.92.0-windows64\2.92\python\bin\python.exe D:\Blender\blender-2.92.0-windows64\2.92\python\lib\site-packages\pip install joblib
import math
import bpy
import os
import pickle
import numpy as np
from bpy import context
import joblib
multiplier = context.scene.sk_value_prop.sk_value
raw_bool = context.scene.sk_value_prop.sk_raw_bool
def middle_point(p1,p2,p_middle):
bpy.ops.object.select_all(action='DESELECT')
bpy.data.objects[p1].select_set(True)
bpy.data.objects[p2].select_set(True)
bpy.context.view_layer.objects.active = bpy.data.objects[p2]
obs = bpy.context.selected_objects
n = len(obs)
# print('n: ',n)
assert(n)
#scene.cursor.location = sum([o.matrix_world.translation for o in obs], Vector()) / n
bpy.data.objects[p_middle].location = sum([o.matrix_world.translation for o in obs], Vector()) / n
def create_dots(name, amount):
#remove Collection
if bpy.data.collections.find(name) >= 0:
collection = bpy.data.collections.get(name)
#
for obj in collection.objects:
bpy.data.objects.remove(obj, do_unlink=True)
bpy.data.collections.remove(collection)
#cria os pontos nuima collection chamada Points
#=====================================================
collection = bpy.data.collections.new(name)
bpy.context.scene.collection.children.link(collection)
#
layer_collection = bpy.context.view_layer.layer_collection.children[collection.name]
bpy.context.view_layer.active_layer_collection = layer_collection
#
for point in range(amount):
bpy.ops.mesh.primitive_plane_add(enter_editmode=True, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1))
bpy.ops.mesh.merge(type='CENTER')
bpy.ops.object.editmode_toggle()
bpy.context.active_object.name = name+'.'+str(1000+point)[1:]
#=====================================================
#==============================
#codes to size the bones
#==============================
def distance(point1, point2) -> float:
#Calculate distance between two points in 3D.
# return math.sqrt((point2[0] - point1[0]) ** 2 + (point2[1] - point1[1]) ** 2 + (point2[2] - point1[2]) ** 2)
return math.sqrt((point2.location[0] - point1.location[0]) ** 2 + (point2.location[1] - point1.location[1]) ** 2 + (point2.location[2] - point1.location[2]) ** 2)
def size_bone(point_name1, point_name2, bone):
p1 = bpy.data.objects[point_name1]
p2 = bpy.data.objects[point_name2]
#edit bones
if bpy.context.active_object.mode == 'EDIT':
bpy.context.object.data.edit_bones[bone].length= distance(p1,p2)
else:
bpy.ops.object.editmode_toggle()
bpy.context.object.data.edit_bones[bone].length= distance(p1,p2)
bpy.ops.object.editmode_toggle()
#path = r'D:\MOCAP\EasyMocap-master\demo_test\videos\1.mp4'
#path = r'D:\Video_editing\running e brack dance para mocap.mp4'
create_dots('Point',49)
# pkl_path=r'D:\MOCAP\VIBE\output\sample_video\vibe_output.pkl'
pkl_path=self.filepath
pic = joblib.load(pkl_path)
x=0
y=1
z=2
person_id=1
for item in range(len(pic[person_id]['pose'])):
print("frame: ",item)
final_limbs = int(len(pic[person_id]['pose'][item])/3)
for limb in range(final_limbs):
# print("limb: ",limb)
bpy.data.objects["Point."+str(1000+limb)[1:]].location[x]=pic[person_id]['joints3d'][item][limb][x]
bpy.data.objects["Point."+str(1000+limb)[1:]].location[y]=pic[person_id]['joints3d'][item][limb][y]
bpy.data.objects["Point."+str(1000+limb)[1:]].location[z]=pic[person_id]['joints3d'][item][limb][z]
bpy.data.objects["Point."+str(1000+limb)[1:]].keyframe_insert(data_path="location", frame=item)
import bpy
#===========
# selectign Scene Collection
scene_collection = bpy.context.view_layer.layer_collection
bpy.context.view_layer.active_layer_collection = scene_collection
#===================================
#creating bones
#====================================
bpy.ops.object.armature_add(enter_editmode=True, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) #cria armature e primeiro bone
#bpy.ops.object.editmode_toggle()
#bpy.data.armatures['Armature'].edit_bones.active = bpy.context.object.data.edit_bones['Bone']
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
bpy.ops.armature.select_all(action='DESELECT')
#bpy.context.object.data.edit_bones['Bone'].select_tail=True
obs[len(obs)-1].data.edit_bones['Bone'].select_tail=True
bpy.ops.armature.bone_primitive_add()#Spine
#bpy.ops.armature.extrude_move()#Neck
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
##bpy.ops.armature.extrude_move()#Face
#bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Arm_L
#bpy.ops.armature.extrude_move()#Forearm_L
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Arm_R
#bpy.ops.armature.extrude_move()#Forearm_R
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Thigh_L
#bpy.ops.armature.extrude_move()#Leg_L
#bpy.ops.armature.extrude_move()#Foot_L
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Thigh_R
#bpy.ops.armature.extrude_move()#Leg_R
#bpy.ops.armature.extrude_move()#Foot_R
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
obs[len(obs)-1].data.edit_bones["Bone"].name = 'Root'
obs[len(obs)-1].data.edit_bones["Bone.001"].name = 'Spine'
obs[len(obs)-1].data.edit_bones["Bone.002"].name = 'Neck'
#obs[len(obs)-1].data.edit_bones["Bone.003"].name = 'Face'
obs[len(obs)-1].data.edit_bones["Bone.003"].name = 'Arm_L'
obs[len(obs)-1].data.edit_bones["Bone.004"].name = 'Forearm_L'
obs[len(obs)-1].data.edit_bones["Bone.005"].name = 'Arm_R'
obs[len(obs)-1].data.edit_bones["Bone.006"].name = 'Forearm_R'
obs[len(obs)-1].data.edit_bones["Bone.007"].name = 'Thigh_L'
obs[len(obs)-1].data.edit_bones["Bone.008"].name = 'Leg_L'
obs[len(obs)-1].data.edit_bones["Bone.009"].name = 'Foot_L'
obs[len(obs)-1].data.edit_bones["Bone.010"].name = 'Thigh_R'
obs[len(obs)-1].data.edit_bones["Bone.011"].name = 'Leg_R'
obs[len(obs)-1].data.edit_bones["Bone.012"].name = 'Foot_R'
#Hierarquia
bpy.context.object.data.edit_bones["Spine"].parent = bpy.context.object.data.edit_bones["Root"]
bpy.context.object.data.edit_bones["Arm_L"].parent = bpy.context.object.data.edit_bones["Spine"]
bpy.context.object.data.edit_bones["Arm_R"].parent = bpy.context.object.data.edit_bones["Spine"]
bpy.context.object.data.edit_bones["Thigh_L"].parent = bpy.context.object.data.edit_bones["Root"]
bpy.context.object.data.edit_bones["Thigh_R"].parent = bpy.context.object.data.edit_bones["Root"]
bpy.ops.object.editmode_toggle()
from mathutils import Vector
import bpy
bpy.ops.object.select_all(action='DESELECT')
bpy.data.objects['Point.001'].select_set(True)
bpy.data.objects['Point.008'].select_set(True)
bpy.context.view_layer.objects.active = bpy.data.objects['Point.034']
obs = bpy.context.selected_objects
n = len(obs)
# print('n: ',n)
assert(n)
#scene.cursor.location = sum([o.matrix_world.translation for o in obs], Vector()) / n
#bpy.data.objects[p_middle].location = sum([o.matrix_world.translation for o in obs], Vector()) / n
x_subtract = abs(obs[0].matrix_world.translation.x - obs[1].matrix_world.translation.x)
y_subtract = abs(obs[0].matrix_world.translation.y - obs[1].matrix_world.translation.y)
z_subtract = abs(obs[0].matrix_world.translation.z - obs[1].matrix_world.translation.z)
max(x_subtract, y_subtract, z_subtract) #maior das medidas
unit = max(x_subtract, y_subtract, z_subtract)/3
unit = unit*multiplier
root_sz =unit/10
spine_sz =unit*3.5
neck_sz =unit
face_sz =unit
thigh_sz =unit*3
leg_sz =unit*2.5
foot_sz =unit #inclinado 45 graud pra frente
arm_sz =unit*1.5
forearm_sz =unit*1.5
#if bpy.context.active_object.mode != 'EDIT':
# bpy.ops.object.editmode_toggle()
#==========================================
#selecting and making the armature Active
#selecionando armature
#==========================================
bpy.ops.object.select_all(action='DESELECT')
#bpy.ops.armature.select_all(action='DESELECT')
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
armature = obs[len(obs)-1].name
#bpy.data.objects[armature].select_set(True)
obs[len(obs)-1].select_set(True)
view_layer = bpy.context.view_layer
#Armature_obj = bpy.context.scene.objects[armature]
Armature_obj = obs[len(obs)-1]
view_layer.objects.active = Armature_obj
#converting to euler rotation
order = 'XYZ'
context = bpy.context
rig_object = context.active_object
for pb in rig_object.pose.bones:
pb.rotation_mode = order
bpy.ops.object.editmode_toggle()
#changing location
#resetting
bpy.context.object.data.edit_bones["Spine"].head.xy=0
bpy.context.object.data.edit_bones["Neck"].head.xy=0
#bpy.context.object.data.edit_bones["Face"].head.xy=0
bpy.context.object.data.edit_bones["Arm_L"].head.xy=0
bpy.context.object.data.edit_bones["Forearm_L"].head.xy=0
bpy.context.object.data.edit_bones["Arm_R"].head.xy=0
bpy.context.object.data.edit_bones["Forearm_R"].head.xy=0
bpy.context.object.data.edit_bones["Thigh_L"].head.xy=0
bpy.context.object.data.edit_bones["Leg_L"].head.xy=0
bpy.context.object.data.edit_bones["Foot_L"].head.xy=0
bpy.context.object.data.edit_bones["Thigh_R"].head.xy=0
bpy.context.object.data.edit_bones["Leg_R"].head.xy=0
bpy.context.object.data.edit_bones["Foot_R"].head.xy=0
#tail
#bpy.context.object.data.edit_bones["Face"].tail.xy=0
bpy.context.object.data.edit_bones["Neck"].tail.xy=0
bpy.context.object.data.edit_bones["Forearm_L"].tail.xy=0
bpy.context.object.data.edit_bones["Forearm_R"].tail.xy=0
bpy.context.object.data.edit_bones["Foot_L"].tail.xy=0
bpy.context.object.data.edit_bones["Foot_R"].tail.xy=0
bpy.context.object.data.edit_bones["Root"].length = root_sz
bpy.context.object.data.edit_bones["Spine"].head.z = unit/2
bpy.context.object.data.edit_bones["Spine"].tail.z = spine_sz
bpy.context.object.data.edit_bones["Neck"].tail.z = spine_sz + neck_sz
bpy.context.object.data.edit_bones["Neck"].tail.y = neck_sz/3
#bpy.context.object.data.edit_bones["Face"].tail.z = spine_sz + neck_sz
#bpy.context.object.data.edit_bones["Face"].tail.y = face_sz*-1
bpy.context.object.data.edit_bones["Arm_L"].head.z= spine_sz
bpy.context.object.data.edit_bones["Arm_L"].head.x= unit*3/4
bpy.context.object.data.edit_bones["Forearm_L"].head.z= spine_sz
bpy.context.object.data.edit_bones["Forearm_L"].head.x= unit + arm_sz
bpy.context.object.data.edit_bones["Forearm_L"].tail.z= spine_sz
bpy.context.object.data.edit_bones["Forearm_L"].tail.x= unit + arm_sz + forearm_sz
bpy.context.object.data.edit_bones["Arm_R"].head.z= spine_sz
bpy.context.object.data.edit_bones["Arm_R"].head.x= (unit*3/4)*-1
bpy.context.object.data.edit_bones["Forearm_R"].head.z= spine_sz
bpy.context.object.data.edit_bones["Forearm_R"].head.x= (unit + arm_sz) *-1
bpy.context.object.data.edit_bones["Forearm_R"].tail.z= spine_sz
bpy.context.object.data.edit_bones["Forearm_R"].tail.x= (unit + arm_sz + forearm_sz) *-1
bpy.context.object.data.edit_bones["Thigh_L"].head.x= unit*3/4
bpy.context.object.data.edit_bones["Thigh_L"].head.z= (unit/5)*-1
bpy.context.object.data.edit_bones["Leg_L"].head.x= unit*3/4
bpy.context.object.data.edit_bones["Leg_L"].head.z= (unit/5 + thigh_sz)*-1
bpy.context.object.data.edit_bones["Foot_L"].head.x= unit*3/4
bpy.context.object.data.edit_bones["Foot_L"].head.z= (unit/5 + thigh_sz + leg_sz)*-1
bpy.context.object.data.edit_bones["Foot_L"].tail.x= unit*3/4
bpy.context.object.data.edit_bones["Foot_L"].tail.z= (unit/5 + thigh_sz + leg_sz + foot_sz/2)*-1
bpy.context.object.data.edit_bones["Foot_L"].tail.y= foot_sz/2*-1
bpy.context.object.data.edit_bones["Thigh_R"].head.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Thigh_R"].head.z= (unit/5)*-1
bpy.context.object.data.edit_bones["Leg_R"].head.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Leg_R"].head.z= (unit/5 + thigh_sz)*-1
bpy.context.object.data.edit_bones["Foot_R"].head.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Foot_R"].head.z= (unit/5 + thigh_sz + leg_sz)*-1
bpy.context.object.data.edit_bones["Foot_R"].tail.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Foot_R"].tail.z= (unit/5 + thigh_sz + leg_sz + foot_sz/2)*-1
bpy.context.object.data.edit_bones["Foot_R"].tail.y= foot_sz/2*-1
bpy.ops.object.editmode_toggle()
import bpy
#comecando configuração seguir movimentos pontos
#colocando em pose mode
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
bpy.ops.object.mode_set(mode='POSE')
actual_bone = 'Root'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='COPY_LOCATION')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.008"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[1].target = bpy.data.objects["Point.001"]
#====
actual_bone = 'Spine'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.001"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -0.349066
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 0.349066
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = -0.698132
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0.698132
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -0.174533
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0.174533
#=====
actual_bone = 'Neck'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.000"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -0.174533
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 1.0472
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = -0.523599
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0.523599
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -0.349066
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0.349066
#=====
#actual_bone = 'Face'
#obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
#obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
#bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
#bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.000"]
#bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
#bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
##x
#bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
#bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -0.174533
#bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 0.872665
##y
#bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
#bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
#bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
##z
#bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
#bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -0.523599
#bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0.523599
#=====
actual_bone = 'Arm_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.006"]
#=====
actual_bone = 'Forearm_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.007"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 0
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -2.53073
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = -0.191986
#=====
actual_bone = 'Arm_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.003"]
#=====
actual_bone = 'Forearm_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.004"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = False
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = False
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0.191986
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 2.53073
#=====
actual_bone = 'Thigh_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.013"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -1.76278
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 1.3439
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -0.785398
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0.174533
#=====
actual_bone = 'Leg_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.014"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = 0.0698132
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 2.0944
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0
#=====
actual_bone = 'Foot_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.019"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -0.523599
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 0.523599
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0
#=====
actual_bone = 'Thigh_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.010"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -1.76278
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 1.3439
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -0.174533
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0.785398
actual_bone = 'Leg_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.011"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = 0.0698132
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 2.0944
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0
actual_bone = 'Foot_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.022"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -0.523599
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 0.523599
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0
bpy.context.scene.frame_end = len(pic[person_id]['pose'])
bpy.ops.nla.bake(frame_start=1, frame_end=len(pic[person_id]['pose']), visual_keying=True, clear_constraints=True, clear_parents=True, bake_types={'POSE'})
bpy.ops.object.mode_set(mode='OBJECT')
#apagar collection points criada
collection = bpy.data.collections.get('Point')
#
for obj in collection.objects:
bpy.data.objects.remove(obj, do_unlink=True)
bpy.data.collections.remove(collection)
sk_value_prop = context.scene.sk_value_prop
if raw_bool == True:
print('raw_bool True - ',raw_bool)
x_original, y_original, z_original = helper_functions.get_rotations()
sk_value_prop.sk_root_rot_x = math.degrees(x_original)
sk_value_prop.sk_root_rot_y = math.degrees(y_original)
sk_value_prop.sk_root_rot_z = math.degrees(z_original)
#in this case both original and actual is the same, because there was no alteration on the angle
x_actual_deg = math.degrees(x_original)
y_actual_deg = math.degrees(y_original)
z_actual_deg = math.degrees(z_original)
sk_value_prop.sk_root_actual_rot_x = x_actual_deg
sk_value_prop.sk_root_actual_rot_y = y_actual_deg
sk_value_prop.sk_root_actual_rot_z = z_actual_deg
else:
print('raw_bool False - ',raw_bool)
x_deg, y_deg, z_deg = helper_functions.anim_to_origin()
#take the information of the rotation to the panel
print('result x: ',x_deg)
print('result y: ',y_deg)
print('result z: ',z_deg)
sk_value_prop.sk_root_rot_x = x_deg
sk_value_prop.sk_root_rot_y = y_deg
sk_value_prop.sk_root_rot_z = z_deg
return{'FINISHED'}
class Mediapipe_Pose_estimation(Operator, ImportHelper):
bl_idname = "mocap.mediapipe_pose"
bl_label = "Generate Pose using MediaPipe"
bl_description = "Generate Mocap data with MediaPipe"
filename_ext = ".mp4"
filter_glob: StringProperty(
default="*.mp4",
options={'HIDDEN'},
maxlen=255, # Max internal buffer length, longer would be clamped.
)
def execute(self,context):
import cv2
import mediapipe as mp
import bpy
import sys
from mathutils import Vector
import math
multiplier = context.scene.sk_value_prop.sk_value
raw_bool = context.scene.sk_value_prop.sk_raw_bool
def middle_point(p1,p2,p_middle):
bpy.ops.object.select_all(action='DESELECT')
bpy.data.objects[p1].select_set(True)
bpy.data.objects[p2].select_set(True)
bpy.context.view_layer.objects.active = bpy.data.objects[p2]
obs = bpy.context.selected_objects
n = len(obs)
# print('n: ',n)
assert(n)
#scene.cursor.location = sum([o.matrix_world.translation for o in obs], Vector()) / n
bpy.data.objects[p_middle].location = sum([o.matrix_world.translation for o in obs], Vector()) / n
def get_landmarks(vid_name, frame_list):
mp_drawing = mp.solutions.drawing_utils
mp_holistic = mp.solutions.holistic
#
# For static images:
holistic = mp_holistic.Holistic(static_image_mode=True)
for idx, image in enumerate(frame_list):
# image_height, image_width, _ = image.shape
# Convert the BGR image to RGB before processing.
results = holistic.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB))
#
#
x=0
y=1
z=2
i=0
print('frame: ',idx)
try:
len(results.pose_landmarks.landmark)
for i in range(len(results.pose_landmarks.landmark)):
x_pose = results.pose_landmarks.landmark[i].x
y_pose = results.pose_landmarks.landmark[i].y
z_pose = results.pose_landmarks.landmark[i].z
bpy.data.objects["Point."+str(1000+i)[1:]].location[x]=x_pose
bpy.data.objects["Point."+str(1000+i)[1:]].location[y]=y_pose
bpy.data.objects["Point."+str(1000+i)[1:]].location[z]=z_pose
if i == 10:
middle_point('Point.009','Point.010','Point.033')
bpy.data.objects["Point."+str(1000+33)[1:]].keyframe_insert(data_path="location", frame=idx)
if i == 12:
middle_point('Point.011','Point.012','Point.034')
bpy.data.objects["Point."+str(1000+34)[1:]].keyframe_insert(data_path="location", frame=idx)
if i == 24:
middle_point('Point.023','Point.024','Point.035')
bpy.data.objects["Point."+str(1000+35)[1:]].keyframe_insert(data_path="location", frame=idx)
bpy.data.objects["Point."+str(1000+i)[1:]].keyframe_insert(data_path="location", frame=idx)
#
# print('frame: ',idx,' landmark_id: ',i,'x: ', x_pose, ' - y: ',y_pose,' - z: ',z_pose)
except:
print('Error Frame: ',idx)
bpy.data.objects["Point."+str(1000+i)[1:]].location[x]=0
bpy.data.objects["Point."+str(1000+i)[1:]].location[y]=0
bpy.data.objects["Point."+str(1000+i)[1:]].location[z]=0
bpy.data.objects["Point."+str(1000+i)[1:]].keyframe_insert(data_path="location", frame=idx)
continue
holistic.close()
def get_video_frames(file_url):
vidcap = cv2.VideoCapture(file_url)
success, image = vidcap.read()
# array of objects with class 'numpy.ndarray'
frames = []
while success:
frames.append(image)
success, image = vidcap.read()
#
return frames
def create_dots(name, amount):
#remove Collection
if bpy.data.collections.find(name) >= 0:
collection = bpy.data.collections.get(name)
#
for obj in collection.objects:
bpy.data.objects.remove(obj, do_unlink=True)
bpy.data.collections.remove(collection)
#cria os pontos nuima collection chamada Points
#=====================================================
collection = bpy.data.collections.new(name)
bpy.context.scene.collection.children.link(collection)
#
layer_collection = bpy.context.view_layer.layer_collection.children[collection.name]
bpy.context.view_layer.active_layer_collection = layer_collection
#
for point in range(amount):
bpy.ops.mesh.primitive_plane_add(enter_editmode=True, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1))
bpy.ops.mesh.merge(type='CENTER')
bpy.ops.object.editmode_toggle()
bpy.context.active_object.name = name+'.'+str(1000+point)[1:]
#=====================================================
#==============================
#codes to size the bones
#==============================
def distance(point1, point2) -> float:
#Calculate distance between two points in 3D.
# return math.sqrt((point2[0] - point1[0]) ** 2 + (point2[1] - point1[1]) ** 2 + (point2[2] - point1[2]) ** 2)
return math.sqrt((point2.location[0] - point1.location[0]) ** 2 + (point2.location[1] - point1.location[1]) ** 2 + (point2.location[2] - point1.location[2]) ** 2)
def size_bone(point_name1, point_name2, bone):
p1 = bpy.data.objects[point_name1]
p2 = bpy.data.objects[point_name2]
#edit bones
if bpy.context.active_object.mode == 'EDIT':
bpy.context.object.data.edit_bones[bone].length= distance(p1,p2)
else:
bpy.ops.object.editmode_toggle()
bpy.context.object.data.edit_bones[bone].length= distance(p1,p2)
bpy.ops.object.editmode_toggle()
#path = r'D:\MOCAP\EasyMocap-master\demo_test\videos\1.mp4'
# path = r'D:\Video_editing\running e brack dance para mocap.mp4'
path = self.filepath
create_dots('Point',36)
get_landmarks('Name', get_video_frames(path))
import bpy
#===========
# selectign Scene Collection
scene_collection = bpy.context.view_layer.layer_collection
bpy.context.view_layer.active_layer_collection = scene_collection
#===================================
#creating bones
#====================================
bpy.ops.object.armature_add(enter_editmode=True, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) #cria armature e primeiro bone
#bpy.ops.object.editmode_toggle()
#bpy.data.armatures['Armature'].edit_bones.active = bpy.context.object.data.edit_bones['Bone']
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
bpy.ops.armature.select_all(action='DESELECT')
#bpy.context.object.data.edit_bones['Bone'].select_tail=True
obs[len(obs)-1].data.edit_bones['Bone'].select_tail=True
bpy.ops.armature.bone_primitive_add()#Spine
#bpy.ops.armature.extrude_move()#Neck
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
#bpy.ops.armature.extrude_move()#Face
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Arm_L
#bpy.ops.armature.extrude_move()#Forearm_L
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Arm_R
#bpy.ops.armature.extrude_move()#Forearm_R
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Thigh_L
#bpy.ops.armature.extrude_move()#Leg_L
#bpy.ops.armature.extrude_move()#Foot_L
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Thigh_R
#bpy.ops.armature.extrude_move()#Leg_R
#bpy.ops.armature.extrude_move()#Foot_R
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
obs[len(obs)-1].data.edit_bones["Bone"].name = 'Root'
obs[len(obs)-1].data.edit_bones["Bone.001"].name = 'Spine'
obs[len(obs)-1].data.edit_bones["Bone.002"].name = 'Neck'
obs[len(obs)-1].data.edit_bones["Bone.003"].name = 'Face'
obs[len(obs)-1].data.edit_bones["Bone.004"].name = 'Arm_L'
obs[len(obs)-1].data.edit_bones["Bone.005"].name = 'Forearm_L'
obs[len(obs)-1].data.edit_bones["Bone.006"].name = 'Arm_R'
obs[len(obs)-1].data.edit_bones["Bone.007"].name = 'Forearm_R'
obs[len(obs)-1].data.edit_bones["Bone.008"].name = 'Thigh_L'
obs[len(obs)-1].data.edit_bones["Bone.009"].name = 'Leg_L'
obs[len(obs)-1].data.edit_bones["Bone.010"].name = 'Foot_L'
obs[len(obs)-1].data.edit_bones["Bone.011"].name = 'Thigh_R'
obs[len(obs)-1].data.edit_bones["Bone.012"].name = 'Leg_R'
obs[len(obs)-1].data.edit_bones["Bone.013"].name = 'Foot_R'
#Hierarquia
bpy.context.object.data.edit_bones["Spine"].parent = bpy.context.object.data.edit_bones["Root"]
bpy.context.object.data.edit_bones["Arm_L"].parent = bpy.context.object.data.edit_bones["Spine"]
bpy.context.object.data.edit_bones["Arm_R"].parent = bpy.context.object.data.edit_bones["Spine"]
bpy.context.object.data.edit_bones["Thigh_L"].parent = bpy.context.object.data.edit_bones["Root"]
bpy.context.object.data.edit_bones["Thigh_R"].parent = bpy.context.object.data.edit_bones["Root"]
bpy.ops.object.editmode_toggle()
from mathutils import Vector
import bpy
bpy.ops.object.select_all(action='DESELECT')
bpy.data.objects['Point.034'].select_set(True)
bpy.data.objects['Point.035'].select_set(True)
bpy.context.view_layer.objects.active = bpy.data.objects['Point.034']
obs = bpy.context.selected_objects
n = len(obs)
# print('n: ',n)
assert(n)
#scene.cursor.location = sum([o.matrix_world.translation for o in obs], Vector()) / n
#bpy.data.objects[p_middle].location = sum([o.matrix_world.translation for o in obs], Vector()) / n
x_subtract = abs(obs[0].matrix_world.translation.x - obs[1].matrix_world.translation.x)
y_subtract = abs(obs[0].matrix_world.translation.y - obs[1].matrix_world.translation.y)
z_subtract = abs(obs[0].matrix_world.translation.z - obs[1].matrix_world.translation.z)
max(x_subtract, y_subtract, z_subtract) #maior das medidas
unit = max(x_subtract, y_subtract, z_subtract)/3
unit = unit*multiplier
root_sz =unit/10
spine_sz =unit*3.5
neck_sz =unit
face_sz =unit
thigh_sz =unit*3
leg_sz =unit*2.5
foot_sz =unit #inclinado 45 graud pra frente
arm_sz =unit*1.5
forearm_sz =unit*1.5
#if bpy.context.active_object.mode != 'EDIT':
# bpy.ops.object.editmode_toggle()
#==========================================
#selecting and making the armature Active
#selecionando armature
#==========================================
bpy.ops.object.select_all(action='DESELECT')
#bpy.ops.armature.select_all(action='DESELECT')
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
armature = obs[len(obs)-1].name
#bpy.data.objects[armature].select_set(True)
obs[len(obs)-1].select_set(True)
view_layer = bpy.context.view_layer
#Armature_obj = bpy.context.scene.objects[armature]
Armature_obj = obs[len(obs)-1]
view_layer.objects.active = Armature_obj
#converting to euler rotation
order = 'XYZ'
context = bpy.context
rig_object = context.active_object
for pb in rig_object.pose.bones:
pb.rotation_mode = order
bpy.ops.object.editmode_toggle()
#changing location
#resetting
bpy.context.object.data.edit_bones["Spine"].head.xy=0
bpy.context.object.data.edit_bones["Neck"].head.xy=0
bpy.context.object.data.edit_bones["Face"].head.xy=0
bpy.context.object.data.edit_bones["Arm_L"].head.xy=0
bpy.context.object.data.edit_bones["Forearm_L"].head.xy=0
bpy.context.object.data.edit_bones["Arm_R"].head.xy=0
bpy.context.object.data.edit_bones["Forearm_R"].head.xy=0
bpy.context.object.data.edit_bones["Thigh_L"].head.xy=0
bpy.context.object.data.edit_bones["Leg_L"].head.xy=0
bpy.context.object.data.edit_bones["Foot_L"].head.xy=0
bpy.context.object.data.edit_bones["Thigh_R"].head.xy=0
bpy.context.object.data.edit_bones["Leg_R"].head.xy=0
bpy.context.object.data.edit_bones["Foot_R"].head.xy=0
#tail
bpy.context.object.data.edit_bones["Face"].tail.xy=0
bpy.context.object.data.edit_bones["Forearm_L"].tail.xy=0
bpy.context.object.data.edit_bones["Forearm_R"].tail.xy=0
bpy.context.object.data.edit_bones["Foot_L"].tail.xy=0
bpy.context.object.data.edit_bones["Foot_R"].tail.xy=0
bpy.context.object.data.edit_bones["Root"].length = root_sz
bpy.context.object.data.edit_bones["Spine"].head.z = unit/2
bpy.context.object.data.edit_bones["Spine"].tail.z = spine_sz
bpy.context.object.data.edit_bones["Neck"].tail.z = spine_sz + neck_sz
bpy.context.object.data.edit_bones["Neck"].tail.y = neck_sz/3
bpy.context.object.data.edit_bones["Face"].tail.z = spine_sz + neck_sz
bpy.context.object.data.edit_bones["Face"].tail.y = face_sz*-1
bpy.context.object.data.edit_bones["Arm_L"].head.z= spine_sz
bpy.context.object.data.edit_bones["Arm_L"].head.x= unit/2
bpy.context.object.data.edit_bones["Forearm_L"].head.z= spine_sz
bpy.context.object.data.edit_bones["Forearm_L"].head.x= unit + arm_sz
bpy.context.object.data.edit_bones["Forearm_L"].tail.z= spine_sz
bpy.context.object.data.edit_bones["Forearm_L"].tail.x= unit + arm_sz + forearm_sz
bpy.context.object.data.edit_bones["Arm_R"].head.z= spine_sz
bpy.context.object.data.edit_bones["Arm_R"].head.x= (unit/2)*-1
bpy.context.object.data.edit_bones["Forearm_R"].head.z= unit/2 + spine_sz
bpy.context.object.data.edit_bones["Forearm_R"].head.x= (unit + arm_sz) *-1
bpy.context.object.data.edit_bones["Forearm_R"].tail.z= unit/2 + spine_sz
bpy.context.object.data.edit_bones["Forearm_R"].tail.x= (unit + arm_sz + forearm_sz) *-1
bpy.context.object.data.edit_bones["Thigh_L"].head.x= unit*3/4
bpy.context.object.data.edit_bones["Thigh_L"].head.z= (unit/5)*-1
bpy.context.object.data.edit_bones["Leg_L"].head.x= unit*3/4
bpy.context.object.data.edit_bones["Leg_L"].head.z= (unit/5 + thigh_sz)*-1
bpy.context.object.data.edit_bones["Foot_L"].head.x= unit*3/4
bpy.context.object.data.edit_bones["Foot_L"].head.z= (unit/5 + thigh_sz + leg_sz)*-1
bpy.context.object.data.edit_bones["Foot_L"].tail.x= unit*3/4
bpy.context.object.data.edit_bones["Foot_L"].tail.z= (unit/5 + thigh_sz + leg_sz + foot_sz/2)*-1
bpy.context.object.data.edit_bones["Foot_L"].tail.y= foot_sz/2*-1
bpy.context.object.data.edit_bones["Thigh_R"].head.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Thigh_R"].head.z= (unit/5)*-1
bpy.context.object.data.edit_bones["Leg_R"].head.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Leg_R"].head.z= (unit/5 + thigh_sz)*-1
bpy.context.object.data.edit_bones["Foot_R"].head.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Foot_R"].head.z= (unit/5 + thigh_sz + leg_sz)*-1
bpy.context.object.data.edit_bones["Foot_R"].tail.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Foot_R"].tail.z= (unit/5 + thigh_sz + leg_sz + foot_sz/2)*-1
bpy.context.object.data.edit_bones["Foot_R"].tail.y= foot_sz/2*-1
bpy.ops.object.editmode_toggle()
import bpy
#comecando configuração seguir movimentos pontos
#colocando em pose mode
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
bpy.ops.object.mode_set(mode='POSE')
actual_bone = 'Root'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='COPY_LOCATION')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.035"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[1].target = bpy.data.objects["Point.011"]
#====
actual_bone = 'Spine'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.011"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[1].target = bpy.data.objects["Point.012"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[2].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[2].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[2].min_x = -0.349066
bpy.context.object.pose.bones[actual_bone].constraints[2].max_x = 0.349066
#y
bpy.context.object.pose.bones[actual_bone].constraints[2].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[2].min_y = -0.698132
bpy.context.object.pose.bones[actual_bone].constraints[2].max_y = 0.698132
#z
bpy.context.object.pose.bones[actual_bone].constraints[2].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[2].min_z = -0.174533
bpy.context.object.pose.bones[actual_bone].constraints[2].max_z = 0.174533
#=====
actual_bone = 'Neck'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.033"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -0.174533
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 1.0472
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = -0.523599
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0.523599
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -0.349066
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0.349066
#=====
actual_bone = 'Face'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.000"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -0.174533
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 0.872665
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -0.523599
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0.523599
#=====
actual_bone = 'Arm_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.013"]
#=====
actual_bone = 'Forearm_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.015"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 0
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -2.53073
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = -0.191986
#=====
actual_bone = 'Arm_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.014"]
#=====
actual_bone = 'Forearm_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.016"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = False
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = False
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0.191986
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 2.53073
#=====
actual_bone = 'Thigh_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.025"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -1.76278
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 1.3439
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -0.785398
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0.174533
#=====
actual_bone = 'Leg_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.027"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = 0.0698132
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 2.0944
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0
#=====
actual_bone = 'Foot_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.031"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -0.523599
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 0.523599
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0
#=====
actual_bone = 'Thigh_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.026"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -1.76278
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 1.3439
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -0.174533
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0.785398
actual_bone = 'Leg_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.028"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = 0.0698132
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 2.0944
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0
actual_bone = 'Foot_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.032"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -0.523599
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 0.523599
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0
frames = len(get_video_frames(path))
bpy.context.scene.frame_end = frames
bpy.ops.nla.bake(frame_start=1, frame_end=frames, visual_keying=True, clear_constraints=True, clear_parents=True, bake_types={'POSE'})
bpy.ops.object.mode_set(mode='OBJECT')
#apagar collection points criada
collection = bpy.data.collections.get('Point')
#
for obj in collection.objects:
bpy.data.objects.remove(obj, do_unlink=True)
bpy.data.collections.remove(collection)
sk_value_prop = context.scene.sk_value_prop
if raw_bool == True:
print('raw_bool True - ',raw_bool)
x_original, y_original, z_original = helper_functions.get_rotations()
sk_value_prop.sk_root_rot_x = math.degrees(x_original)
sk_value_prop.sk_root_rot_y = math.degrees(y_original)
sk_value_prop.sk_root_rot_z = math.degrees(z_original)
#in this case both original and actual is the same, because there was no alteration on the angle
x_actual_deg = math.degrees(x_original)
y_actual_deg = math.degrees(y_original)
z_actual_deg = math.degrees(z_original)
sk_value_prop.sk_root_actual_rot_x = x_actual_deg
sk_value_prop.sk_root_actual_rot_y = y_actual_deg
sk_value_prop.sk_root_actual_rot_z = z_actual_deg
else:
print('raw_bool False - ',raw_bool)
x_deg, y_deg, z_deg = helper_functions.anim_to_origin()
#take the information of the rotation to the panel
print('result x: ',x_deg)
print('result y: ',y_deg)
print('result z: ',z_deg)
sk_value_prop.sk_root_rot_x = x_deg
sk_value_prop.sk_root_rot_y = y_deg
sk_value_prop.sk_root_rot_z = z_deg
return{'FINISHED'}
class Convert_axis(Operator):
bl_idname = "mocap.convert_axis"
bl_label = "Convert animation axis"
bl_description = "Convert Axis"
def execute(self,context):
skvalue = context.scene.sk_value_prop
print('from: ',skvalue.sk_from_axis,' ','to: ',skvalue.sk_to_axis)
print('from simplified: ',skvalue.sk_from_axis[-1:],' ','to: ',skvalue.sk_to_axis[-1:])
helper_functions.rotate_orientation(skvalue.sk_from_axis[-1:],skvalue.sk_to_axis[-1:])
#send actual rotations
x_actual_deg, y_actual_deg, z_actual_deg = helper_functions.get_rotations()
skvalue.sk_root_actual_rot_x = math.degrees(x_actual_deg)
skvalue.sk_root_actual_rot_y = math.degrees(y_actual_deg)
skvalue.sk_root_actual_rot_z = math.degrees(z_actual_deg)
return{'FINISHED'}
class Reset_location(Operator):
bl_idname = "mocap.reset_location"
bl_label = "Move animation to origin"
bl_description = "Center Location"
def execute(sel,context):
helper_functions.reset_loc()
return{'FINISHED'}
class Reset_rotation(Operator):
bl_idname = "mocap.reset_rotation"
bl_label = "Reset rotation, to the Rest rotatio position"
bl_description = "Reset Rotation"
def execute(sel,context):
helper_functions.reset_rot()
sk_value_prop = context.scene.sk_value_prop
x_actual_deg, y_actual_deg, z_actual_deg = helper_functions.get_rotations()
sk_value_prop.sk_root_actual_rot_x = math.degrees(x_actual_deg)
sk_value_prop.sk_root_actual_rot_y = math.degrees(y_actual_deg)
sk_value_prop.sk_root_actual_rot_z = math.degrees(z_actual_deg)
return{'FINISHED'}
class Foot_high(Operator):
bl_idname = "mocap.foot_high"
bl_label = "Move the animation so the feet touch the floor"
bl_description = "Move the feet to touch the floor"
def execute(sel,context):
helper_functions.foot_high()
return{'FINISHED'}
class Compensate_Rotation(Operator):
bl_idname = "mocap.compensate_rotation"
bl_label = "compensate rotation"
bl_description = "Compensate rotatio acording to value inserted"
def execute(sel,context):
skvalue = context.scene.sk_value_prop
helper_functions.compensate_rot(skvalue.sk_rot_compens_x,skvalue.sk_rot_compens_y,skvalue.sk_rot_compens_z)
return{'FINISHED'}
class Smooth_Bone(Operator):
bl_idname = "mocap.smooth_bones"
bl_label = "Smooth Bones"
bl_description = "Smooth the curves"
def execute(sel,context):
# currently selected
o = bpy.context.object
helper_functions.smooth_curves(o)
return{'FINISHED'}
########################################
##### MediaPipe Realtime
########################################
class Mediapipe_Pose_Prepare_Skeleton_RT(Operator):
bl_idname = "mocap.mediapipe_prepare_sk_rt"
bl_label = "Generate Pose using MediaPipe"
bl_description = "Generate Mocap data with MediaPipe"
def execute(self,context):
# import cv2
# import mediapipe as mp
import bpy
import sys
from mathutils import Vector
import math
multiplier = context.scene.sk_value_prop.sk_value
raw_bool = context.scene.sk_value_prop.sk_raw_bool
def middle_point(p1,p2,p_middle):
bpy.ops.object.select_all(action='DESELECT')
bpy.data.objects[p1].select_set(True)
bpy.data.objects[p2].select_set(True)
bpy.context.view_layer.objects.active = bpy.data.objects[p2]
obs = bpy.context.selected_objects
n = len(obs)
# print('n: ',n)
assert(n)
#scene.cursor.location = sum([o.matrix_world.translation for o in obs], Vector()) / n
bpy.data.objects[p_middle].location = sum([o.matrix_world.translation for o in obs], Vector()) / n
# def get_video_frames(file_url):
# vidcap = cv2.VideoCapture(file_url)
# success, image = vidcap.read()
# # array of objects with class 'numpy.ndarray'
# frames = []
# while success:
# frames.append(image)
# success, image = vidcap.read()
# #
# return frames
def create_dots(name, amount):
#remove Collection
if bpy.data.collections.find(name) >= 0:
collection = bpy.data.collections.get(name)
#
for obj in collection.objects:
bpy.data.objects.remove(obj, do_unlink=True)
bpy.data.collections.remove(collection)
#cria os pontos nuima collection chamada Points
#=====================================================
collection = bpy.data.collections.new(name)
bpy.context.scene.collection.children.link(collection)
#
layer_collection = bpy.context.view_layer.layer_collection.children[collection.name]
bpy.context.view_layer.active_layer_collection = layer_collection
#
for point in range(amount):
bpy.ops.mesh.primitive_plane_add(enter_editmode=True, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1))
bpy.ops.mesh.merge(type='CENTER')
bpy.ops.object.editmode_toggle()
bpy.context.active_object.name = name+'.'+str(1000+point)[1:]
#=====================================================
#==============================
#codes to size the bones
#==============================
def distance(point1, point2) -> float:
#Calculate distance between two points in 3D.
# return math.sqrt((point2[0] - point1[0]) ** 2 + (point2[1] - point1[1]) ** 2 + (point2[2] - point1[2]) ** 2)
return math.sqrt((point2.location[0] - point1.location[0]) ** 2 + (point2.location[1] - point1.location[1]) ** 2 + (point2.location[2] - point1.location[2]) ** 2)
def size_bone(point_name1, point_name2, bone):
p1 = bpy.data.objects[point_name1]
p2 = bpy.data.objects[point_name2]
#edit bones
if bpy.context.active_object.mode == 'EDIT':
bpy.context.object.data.edit_bones[bone].length= distance(p1,p2)
else:
bpy.ops.object.editmode_toggle()
bpy.context.object.data.edit_bones[bone].length= distance(p1,p2)
bpy.ops.object.editmode_toggle()
create_dots('Point',36)
import bpy
#===========
# selectign Scene Collection
scene_collection = bpy.context.view_layer.layer_collection
bpy.context.view_layer.active_layer_collection = scene_collection
#===================================
#creating bones
#====================================
bpy.ops.object.armature_add(enter_editmode=True, align='WORLD', location=(0, 0, 0), scale=(1, 1, 1)) #cria armature e primeiro bone
#bpy.ops.object.editmode_toggle()
#bpy.data.armatures['Armature'].edit_bones.active = bpy.context.object.data.edit_bones['Bone']
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
bpy.ops.armature.select_all(action='DESELECT')
#bpy.context.object.data.edit_bones['Bone'].select_tail=True
obs[len(obs)-1].data.edit_bones['Bone'].select_tail=True
bpy.ops.armature.bone_primitive_add()#Spine
#bpy.ops.armature.extrude_move()#Neck
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
#bpy.ops.armature.extrude_move()#Face
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Arm_L
#bpy.ops.armature.extrude_move()#Forearm_L
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Arm_R
#bpy.ops.armature.extrude_move()#Forearm_R
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Thigh_L
#bpy.ops.armature.extrude_move()#Leg_L
#bpy.ops.armature.extrude_move()#Foot_L
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.bone_primitive_add()#Thigh_R
#bpy.ops.armature.extrude_move()#Leg_R
#bpy.ops.armature.extrude_move()#Foot_R
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
bpy.ops.armature.extrude_move(ARMATURE_OT_extrude={"forked":False}, TRANSFORM_OT_translate={"value":(0.0, 0.0, 0.1), "orient_type":'GLOBAL', "orient_matrix":((1, 0, 0), (0, 1, 0), (0, 0, 1)), "orient_matrix_type":'GLOBAL', "constraint_axis":(False, False, False), "mirror":True, "use_proportional_edit":False, "proportional_edit_falloff":'SMOOTH', "proportional_size":1, "use_proportional_connected":False, "use_proportional_projected":False, "snap":False, "snap_target":'CLOSEST', "snap_point":(0, 0, 0), "snap_align":False, "snap_normal":(0, 0, 0), "gpencil_strokes":False, "cursor_transform":False, "texture_space":False, "remove_on_cancel":False, "release_confirm":False, "use_accurate":False, "use_automerge_and_split":False})
obs[len(obs)-1].data.edit_bones["Bone"].name = 'Root'
obs[len(obs)-1].data.edit_bones["Bone.001"].name = 'Spine'
obs[len(obs)-1].data.edit_bones["Bone.002"].name = 'Neck'
obs[len(obs)-1].data.edit_bones["Bone.003"].name = 'Face'
obs[len(obs)-1].data.edit_bones["Bone.004"].name = 'Arm_L'
obs[len(obs)-1].data.edit_bones["Bone.005"].name = 'Forearm_L'
obs[len(obs)-1].data.edit_bones["Bone.006"].name = 'Arm_R'
obs[len(obs)-1].data.edit_bones["Bone.007"].name = 'Forearm_R'
obs[len(obs)-1].data.edit_bones["Bone.008"].name = 'Thigh_L'
obs[len(obs)-1].data.edit_bones["Bone.009"].name = 'Leg_L'
obs[len(obs)-1].data.edit_bones["Bone.010"].name = 'Foot_L'
obs[len(obs)-1].data.edit_bones["Bone.011"].name = 'Thigh_R'
obs[len(obs)-1].data.edit_bones["Bone.012"].name = 'Leg_R'
obs[len(obs)-1].data.edit_bones["Bone.013"].name = 'Foot_R'
#Hierarquia
bpy.context.object.data.edit_bones["Spine"].parent = bpy.context.object.data.edit_bones["Root"]
bpy.context.object.data.edit_bones["Arm_L"].parent = bpy.context.object.data.edit_bones["Spine"]
bpy.context.object.data.edit_bones["Arm_R"].parent = bpy.context.object.data.edit_bones["Spine"]
bpy.context.object.data.edit_bones["Thigh_L"].parent = bpy.context.object.data.edit_bones["Root"]
bpy.context.object.data.edit_bones["Thigh_R"].parent = bpy.context.object.data.edit_bones["Root"]
bpy.ops.object.editmode_toggle()
from mathutils import Vector
import bpy
bpy.ops.object.select_all(action='DESELECT')
bpy.data.objects['Point.034'].select_set(True)
bpy.data.objects['Point.035'].select_set(True)
bpy.context.view_layer.objects.active = bpy.data.objects['Point.034']
obs = bpy.context.selected_objects
n = len(obs)
# print('n: ',n)
assert(n)
#scene.cursor.location = sum([o.matrix_world.translation for o in obs], Vector()) / n
#bpy.data.objects[p_middle].location = sum([o.matrix_world.translation for o in obs], Vector()) / n
x_subtract = abs(obs[0].matrix_world.translation.x - obs[1].matrix_world.translation.x)
y_subtract = abs(obs[0].matrix_world.translation.y - obs[1].matrix_world.translation.y)
z_subtract = abs(obs[0].matrix_world.translation.z - obs[1].matrix_world.translation.z)
max(x_subtract, y_subtract, z_subtract) #maior das medidas
# unit = max(x_subtract, y_subtract, z_subtract)/3
unit=1
unit = unit*multiplier
root_sz =unit/10
spine_sz =unit*3.5
neck_sz =unit
face_sz =unit
thigh_sz =unit*3
leg_sz =unit*2.5
foot_sz =unit #inclinado 45 graud pra frente
arm_sz =unit*1.5
forearm_sz =unit*1.5
#if bpy.context.active_object.mode != 'EDIT':
# bpy.ops.object.editmode_toggle()
#==========================================
#selecting and making the armature Active
#selecionando armature
#==========================================
bpy.ops.object.select_all(action='DESELECT')
#bpy.ops.armature.select_all(action='DESELECT')
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
armature = obs[len(obs)-1].name
#bpy.data.objects[armature].select_set(True)
obs[len(obs)-1].select_set(True)
view_layer = bpy.context.view_layer
#Armature_obj = bpy.context.scene.objects[armature]
Armature_obj = obs[len(obs)-1]
view_layer.objects.active = Armature_obj
#converting to euler rotation
order = 'XYZ'
context = bpy.context
rig_object = context.active_object
for pb in rig_object.pose.bones:
pb.rotation_mode = order
bpy.ops.object.editmode_toggle()
#changing location
#resetting
bpy.context.object.data.edit_bones["Spine"].head.xy=0
bpy.context.object.data.edit_bones["Neck"].head.xy=0
bpy.context.object.data.edit_bones["Face"].head.xy=0
bpy.context.object.data.edit_bones["Arm_L"].head.xy=0
bpy.context.object.data.edit_bones["Forearm_L"].head.xy=0
bpy.context.object.data.edit_bones["Arm_R"].head.xy=0
bpy.context.object.data.edit_bones["Forearm_R"].head.xy=0
bpy.context.object.data.edit_bones["Thigh_L"].head.xy=0
bpy.context.object.data.edit_bones["Leg_L"].head.xy=0
bpy.context.object.data.edit_bones["Foot_L"].head.xy=0
bpy.context.object.data.edit_bones["Thigh_R"].head.xy=0
bpy.context.object.data.edit_bones["Leg_R"].head.xy=0
bpy.context.object.data.edit_bones["Foot_R"].head.xy=0
#tail
bpy.context.object.data.edit_bones["Face"].tail.xy=0
bpy.context.object.data.edit_bones["Forearm_L"].tail.xy=0
bpy.context.object.data.edit_bones["Forearm_R"].tail.xy=0
bpy.context.object.data.edit_bones["Foot_L"].tail.xy=0
bpy.context.object.data.edit_bones["Foot_R"].tail.xy=0
bpy.context.object.data.edit_bones["Root"].length = root_sz
bpy.context.object.data.edit_bones["Spine"].head.z = unit/2
bpy.context.object.data.edit_bones["Spine"].tail.z = spine_sz
bpy.context.object.data.edit_bones["Neck"].tail.z = spine_sz + neck_sz
bpy.context.object.data.edit_bones["Neck"].tail.y = neck_sz/3
bpy.context.object.data.edit_bones["Face"].tail.z = spine_sz + neck_sz
bpy.context.object.data.edit_bones["Face"].tail.y = face_sz*-1
bpy.context.object.data.edit_bones["Arm_L"].head.z= spine_sz
bpy.context.object.data.edit_bones["Arm_L"].head.x= unit/2
bpy.context.object.data.edit_bones["Forearm_L"].head.z= spine_sz
bpy.context.object.data.edit_bones["Forearm_L"].head.x= unit + arm_sz
bpy.context.object.data.edit_bones["Forearm_L"].tail.z= spine_sz
bpy.context.object.data.edit_bones["Forearm_L"].tail.x= unit + arm_sz + forearm_sz
bpy.context.object.data.edit_bones["Arm_R"].head.z= spine_sz
bpy.context.object.data.edit_bones["Arm_R"].head.x= (unit/2)*-1
bpy.context.object.data.edit_bones["Forearm_R"].head.z= unit/2 + spine_sz
bpy.context.object.data.edit_bones["Forearm_R"].head.x= (unit + arm_sz) *-1
bpy.context.object.data.edit_bones["Forearm_R"].tail.z= unit/2 + spine_sz
bpy.context.object.data.edit_bones["Forearm_R"].tail.x= (unit + arm_sz + forearm_sz) *-1
bpy.context.object.data.edit_bones["Thigh_L"].head.x= unit*3/4
bpy.context.object.data.edit_bones["Thigh_L"].head.z= (unit/5)*-1
bpy.context.object.data.edit_bones["Leg_L"].head.x= unit*3/4
bpy.context.object.data.edit_bones["Leg_L"].head.z= (unit/5 + thigh_sz)*-1
bpy.context.object.data.edit_bones["Foot_L"].head.x= unit*3/4
bpy.context.object.data.edit_bones["Foot_L"].head.z= (unit/5 + thigh_sz + leg_sz)*-1
bpy.context.object.data.edit_bones["Foot_L"].tail.x= unit*3/4
bpy.context.object.data.edit_bones["Foot_L"].tail.z= (unit/5 + thigh_sz + leg_sz + foot_sz/2)*-1
bpy.context.object.data.edit_bones["Foot_L"].tail.y= foot_sz/2*-1
bpy.context.object.data.edit_bones["Thigh_R"].head.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Thigh_R"].head.z= (unit/5)*-1
bpy.context.object.data.edit_bones["Leg_R"].head.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Leg_R"].head.z= (unit/5 + thigh_sz)*-1
bpy.context.object.data.edit_bones["Foot_R"].head.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Foot_R"].head.z= (unit/5 + thigh_sz + leg_sz)*-1
bpy.context.object.data.edit_bones["Foot_R"].tail.x= unit*3/4*-1
bpy.context.object.data.edit_bones["Foot_R"].tail.z= (unit/5 + thigh_sz + leg_sz + foot_sz/2)*-1
bpy.context.object.data.edit_bones["Foot_R"].tail.y= foot_sz/2*-1
bpy.ops.object.editmode_toggle()
import bpy
#comecando configuração seguir movimentos pontos
#colocando em pose mode
obs = []
for ob in bpy.context.scene.objects:
if ob.type == 'ARMATURE':
obs.append(ob)
#obs
bpy.ops.object.mode_set(mode='POSE')
actual_bone = 'Root'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='COPY_LOCATION')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.035"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[1].target = bpy.data.objects["Point.011"]
#====
actual_bone = 'Spine'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.011"]
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[1].target = bpy.data.objects["Point.012"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[2].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[2].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[2].min_x = -0.349066
bpy.context.object.pose.bones[actual_bone].constraints[2].max_x = 0.349066
#y
bpy.context.object.pose.bones[actual_bone].constraints[2].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[2].min_y = -0.698132
bpy.context.object.pose.bones[actual_bone].constraints[2].max_y = 0.698132
#z
bpy.context.object.pose.bones[actual_bone].constraints[2].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[2].min_z = -0.174533
bpy.context.object.pose.bones[actual_bone].constraints[2].max_z = 0.174533
#=====
actual_bone = 'Neck'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.033"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -0.174533
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 1.0472
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = -0.523599
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0.523599
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -0.349066
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0.349066
#=====
actual_bone = 'Face'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.000"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -0.174533
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 0.872665
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -0.523599
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0.523599
#=====
actual_bone = 'Arm_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.013"]
#=====
actual_bone = 'Forearm_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.015"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 0
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -2.53073
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = -0.191986
#=====
actual_bone = 'Arm_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.014"]
#=====
actual_bone = 'Forearm_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.016"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = False
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = False
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0.191986
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 2.53073
#=====
actual_bone = 'Thigh_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.025"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -1.76278
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 1.3439
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -0.785398
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0.174533
#=====
actual_bone = 'Leg_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.027"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = 0.0698132
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 2.0944
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0
#=====
actual_bone = 'Foot_L'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.031"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -0.523599
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 0.523599
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0
#=====
actual_bone = 'Thigh_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.026"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -1.76278
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 1.3439
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = -0.174533
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0.785398
actual_bone = 'Leg_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.028"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = 0.0698132
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 2.0944
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0
actual_bone = 'Foot_R'
obs[len(obs)-1].data.bones.active = obs[len(obs)-1].pose.bones[actual_bone].bone
obs[len(obs)-1].pose.bones[actual_bone].bone.select = True
bpy.ops.pose.constraint_add(type='DAMPED_TRACK')
bpy.context.object.pose.bones[actual_bone].constraints[0].target = bpy.data.objects["Point.032"]
bpy.ops.pose.constraint_add(type='LIMIT_ROTATION')
bpy.context.object.pose.bones[actual_bone].constraints[1].owner_space = 'LOCAL'
#x
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_x = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_x = -0.523599
bpy.context.object.pose.bones[actual_bone].constraints[1].max_x = 0.523599
#y
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_y = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_y = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_y = 0
#z
bpy.context.object.pose.bones[actual_bone].constraints[1].use_limit_z = True
bpy.context.object.pose.bones[actual_bone].constraints[1].min_z = 0
bpy.context.object.pose.bones[actual_bone].constraints[1].max_z = 0
# frames = len(get_video_frames(path))
# bpy.context.scene.frame_end = frames
# bpy.ops.nla.bake(frame_start=1, frame_end=frames, visual_keying=True, clear_constraints=True, clear_parents=True, bake_types={'POSE'})
bpy.ops.object.mode_set(mode='OBJECT')
#apagar collection points criada
# collection = bpy.data.collections.get('Point')
# #
# for obj in collection.objects:
# bpy.data.objects.remove(obj, do_unlink=True)
# bpy.data.collections.remove(collection)
sk_value_prop = context.scene.sk_value_prop
return{'FINISHED'}
# class Mediapipe_Pose_estimation_RT(Operator, ImportHelper):
class Mediapipe_Pose_estimation_RT(Operator):
bl_idname = "mocap.mediapipe_pose_rt"
bl_label = "Generate Pose using MediaPipe RealTime"
bl_description = "Generate Mocap data with MediaPipe RealTime, you have to use separated script running outside Blender"
import bpy
import socket
import json
from datetime import datetime
from mathutils import Vector
import math
##################################
#### Starting modal to get media pipe realtime
##################################
_timer = None
_s = None
_frame = None
def modal(self, context, event):
socket_buffer = context.scene.sk_value_prop.sk_socket_buffer
record_bool = context.scene.sk_value_prop.sk_record_bool
# record_frame_start = context.scene.sk_value_prop.sk_record_frame_start
import socket
from mathutils import Vector
# s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# s.connect((socket.gethostname(), 1234)) #gethostname is the local address,
HEADERSIZE = 10
if event.type in {'RIGHTMOUSE', 'ESC'}:
self.cancel(context)
return {'CANCELLED'}
if event.type == 'TIMER':
#############
#begin of connection code
full_msg = b''
nem_msg = True
while True:
# msg = self._s.recv(1024) #1024 is the buffer
msg = self._s.recv(socket_buffer)
if nem_msg:
# self._frame = self._frame + 1
print(f"new message length: {msg[:HEADERSIZE]}")
msglen = int(msg[:HEADERSIZE])
nem_msg = False
full_msg += msg
if len(full_msg)-HEADERSIZE >= msglen:
d = json.loads(full_msg[HEADERSIZE:].decode('utf-8'))
nem_msg = True
full_msg = b''
break
# print('fim: ',d)
if d != 'nada':
# print('len d[0]: ', len(d[0]))
print('Frame:',self._frame,'bone: ',d[1][0],' x: ',d[1][1],' y: ',d[1][2],' z: ',d[1][3])
for i in range(len(d)):
x_pose = d[i][1]
y_pose = d[i][2]
z_pose = d[i][3]
bpy.data.objects["Point."+str(1000+i)[1:]].location[0]=x_pose
bpy.data.objects["Point."+str(1000+i)[1:]].location[1]=y_pose
bpy.data.objects["Point."+str(1000+i)[1:]].location[2]=z_pose
if i == 10:
# middle_point('Point.009','Point.010','Point.033')
bpy.ops.object.select_all(action='DESELECT')
bpy.data.objects['Point.009'].select_set(True)
bpy.data.objects['Point.010'].select_set(True)
bpy.context.view_layer.objects.active = bpy.data.objects['Point.010']
obs = bpy.context.selected_objects
n = len(obs)
assert(n)
bpy.data.objects['Point.033'].location = sum([o.matrix_world.translation for o in obs], Vector()) / n
if record_bool == True:
bpy.data.objects["Point."+str(1000+33)[1:]].keyframe_insert(data_path="location", frame=self._frame)
if i == 12:
# middle_point('Point.011','Point.012','Point.034')
bpy.ops.object.select_all(action='DESELECT')
bpy.data.objects['Point.011'].select_set(True)
bpy.data.objects['Point.012'].select_set(True)
bpy.context.view_layer.objects.active = bpy.data.objects['Point.012']
obs = bpy.context.selected_objects
n = len(obs)
assert(n)
bpy.data.objects['Point.034'].location = sum([o.matrix_world.translation for o in obs], Vector()) / n
if record_bool == True:
bpy.data.objects["Point."+str(1000+34)[1:]].keyframe_insert(data_path="location", frame=self._frame)
if i == 24:
# middle_point('Point.023','Point.024','Point.035')
bpy.ops.object.select_all(action='DESELECT')
bpy.data.objects['Point.023'].select_set(True)
bpy.data.objects['Point.024'].select_set(True)
bpy.context.view_layer.objects.active = bpy.data.objects['Point.024']
obs = bpy.context.selected_objects
n = len(obs)
assert(n)
bpy.data.objects['Point.035'].location = sum([o.matrix_world.translation for o in obs], Vector()) / n
if record_bool == True:
bpy.data.objects["Point."+str(1000+35)[1:]].keyframe_insert(data_path="location", frame=self._frame)
if record_bool == True:
bpy.data.objects["Point."+str(1000+i)[1:]].keyframe_insert(data_path="location", frame=self._frame)
self._frame = self._frame + 1
#end connection code
return {'PASS_THROUGH'}
def execute(self, context):
import socket
import bpy
refresh_rate = context.scene.sk_value_prop.sk_refresh_rate
self._frame = context.scene.sk_value_prop.sk_record_frame_start
self._s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._s.connect((socket.gethostname(), 1235)) #gethostname is the local address,
name_points = 'Point'
#===========
# selectign Scene Collection
multiplier = 0.9
scene_collection = bpy.context.view_layer.layer_collection
bpy.context.view_layer.active_layer_collection = scene_collection
wm = context.window_manager
# self._timer = wm.event_timer_add(0.1, window=context.window)
self._timer = wm.event_timer_add(refresh_rate, window=context.window)
wm.modal_handler_add(self)
print('MODAL!!!!!!!!!!!!!!!')
return {'RUNNING_MODAL'}
def cancel(self, context):
wm = context.window_manager
wm.event_timer_remove(self._timer)
class Reload_sk_Mediapipe(Operator):
bl_idname = "mocap.import_mediapipe_reload"
bl_label = "Reload Skeleton Easymocap"
bl_description = "Reload SK EasyMOCAP"
def execute(self,context):
bpy.ops.object.mode_set(mode='OBJECT')
multiplier = context.scene.sk_value_prop.sk_value
unit = skeleton_import.size_ref_bone('Point.001','Point.008','Point.008')
unit = unit*multiplier
spine_multi = context.scene.sk_value_prop.sk_spine_mulitplier
neck_multi = context.scene.sk_value_prop.sk_neck_mulitplier
head_multi = context.scene.sk_value_prop.sk_head_mulitplier
forearm_multi = context.scene.sk_value_prop.sk_forearm_mulitplier
arm_multi = context.scene.sk_value_prop.sk_arm_mulitplier
tigh_multi = context.scene.sk_value_prop.sk_tigh_mulitplier
leg_multi = context.scene.sk_value_prop.sk_leg_mulitplier
foot_multi = context.scene.sk_value_prop.sk_foot_mulitplier
root_sz =unit/10
spine_sz =unit*3.5*spine_multi
neck_sz =unit*neck_multi
face_sz =unit*head_multi
thigh_sz =unit*3*tigh_multi
leg_sz =unit*2.5*leg_multi
foot_sz =unit*foot_multi #inclinado 45 graud pra frente
arm_sz =unit*1.5*arm_multi
forearm_sz =unit*1.5*forearm_multi
skeleton_import.size_of_bones(unit, root_sz, spine_sz, neck_sz, face_sz, thigh_sz, leg_sz, foot_sz, arm_sz, forearm_sz)
return{'FINISHED'}
classes = (Import_Data_easymocap, Test_PT_Panel, OT_TestOpenFilebrowser,Import_Data_frankmocap,Import_Data_vibe,Mediapipe_Pose_estimation,
MySettings,Modify_PT_Panel,Convert_axis,Reset_location,Reset_rotation,Foot_high,Compensate_Rotation,Smooth_Bone,Mediapipe_Pose_estimation_RT,Mediapipe_Pose_Prepare_Skeleton_RT,
Reload_sk_Mediapipe, Debug_PT_Panel)
# register, unregister = bpy.utils.register_classes_factory(classes)
def register():
from bpy.utils import register_class
for cls in classes:
register_class(cls)
# bpy.types.Scene.my_tool = PointerProperty(type=MySettings)
bpy.types.Scene.sk_value_prop = PointerProperty(type=MySettings)
def unregister():
from bpy.utils import unregister_class
for cls in reversed(classes):
unregister_class(cls)
del bpy.types.Scene.sk_value_prop
if __name__ == "__main__":
register() | [
"[email protected]"
]
| |
3e9be943b29edc979e16250aaf978ea1d20d10ee | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /BokhFunYBvsvHEjfx_23.py | 9fe657485bb955529d5c399ff9aa5dfe1f3e1652 | []
| no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 114 | py |
def seven_boom(lst):
return "Boom!" if "7" in "".join([str(x) for x in lst]) else "there is no 7 in the list"
| [
"[email protected]"
]
| |
215ae92a7fe987232180e57debe40043791f172c | 386cf667134c1db3242823b33bd5537a462ce986 | /app/user/urls.py | 63d01ec10c4268b0bd2c2b07d1cf5c2bdea21b1d | [
"MIT"
]
| permissive | mukulkkumar/docker-travis-django | 795aaff47e54a211e16cc32104693e80c3634210 | de1d6471538ff2b86f97ee7742bb548875c0b39b | refs/heads/main | 2023-04-22T04:07:01.554172 | 2021-05-15T04:52:36 | 2021-05-15T04:52:36 | 364,581,976 | 1 | 0 | MIT | 2021-05-15T04:52:37 | 2021-05-05T13:15:13 | Python | UTF-8 | Python | false | false | 285 | py | from django.urls import path
from user import views
app_name = 'user'
urlpatterns = [
path('create/', views.CreateUserView.as_view(), name='create'),
path('token/', views.CreateTokenView.as_view(), name='token'),
path('me/', views.ManageUserView.as_view(), name='me'),
]
| [
"[email protected]"
]
| |
cfe92e44ae6fb672278ce8b9e07d449a2724a880 | 0a0e0388727cf219717dc157416d3d1a9a043cb1 | /MyniaCairo/MyniaCairo/urls.py | 419062b7960196dc7ad8c63b0da79fcd3d8b684b | []
| no_license | beshoyAtefZaki/MyniaCairo | 24a7078a3aac7f53f38fe1723fd7c977d9f4f33a | 49e6140cbd951339419f08caf509e59e45656043 | refs/heads/master | 2020-06-30T00:25:04.212268 | 2019-08-08T11:10:51 | 2019-08-08T11:10:51 | 200,666,450 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 995 | py | """MyniaCairo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path ,include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('profiles.urls')),
]
if settings.DEBUG:
urlpatterns = urlpatterns + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| [
"[email protected]"
]
| |
188eb8c5c11d22b9bb3cb7f4979d1aa0b3255125 | dbe012dbedc967332ae58414473185055136d189 | /maskrcnn_benchmark/structures/bounding_box.py | b450da8827054cb4c651ba6949b6fdac5b95ee18 | [
"MIT"
]
| permissive | kevincao91/maskrcnn | 87561a023939a71d624252dd44f4c882b2dfa2a6 | a55f6ab82219329e353a20dd53c3f25f4375f537 | refs/heads/master | 2020-09-24T18:41:36.565752 | 2020-05-07T05:45:39 | 2020-05-07T05:45:39 | 225,819,004 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,755 | py | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import torch
# transpose
FLIP_LEFT_RIGHT = 0
FLIP_TOP_BOTTOM = 1
class BoxList(object):
"""
This class represents a set of bounding boxes.
The bounding boxes are represented as a Nx4 Tensor.
In order to uniquely determine the bounding boxes with respect
to an image, we also store the corresponding image dimensions.
They can contain extra information that is specific to each bounding box, such as
labels.
"""
def __init__(self, bbox, image_size, mode="xyxy"):
device = bbox.device if isinstance(bbox, torch.Tensor) else torch.device("cpu")
bbox = torch.as_tensor(bbox, dtype=torch.float32, device=device)
if bbox.ndimension() != 2:
raise ValueError(
"bbox should have 2 dimensions, got {}".format(bbox.ndimension())
)
if bbox.size(-1) != 4:
raise ValueError(
"last dimension of bbox should have a "
"size of 4, got {}".format(bbox.size(-1))
)
if mode not in ("xyxy", "xywh"):
raise ValueError("mode should be 'xyxy' or 'xywh'")
self.bbox = bbox
self.size = image_size # (image_width, image_height)
self.mode = mode
self.extra_fields = {}
def add_field(self, field, field_data):
self.extra_fields[field] = field_data
def get_field(self, field):
return self.extra_fields[field]
def has_field(self, field):
return field in self.extra_fields
def fields(self):
return list(self.extra_fields.keys())
def _copy_extra_fields(self, bbox):
for k, v in bbox.extra_fields.items():
self.extra_fields[k] = v
def convert(self, mode):
if mode not in ("xyxy", "xywh"):
raise ValueError("mode should be 'xyxy' or 'xywh'")
if mode == self.mode:
return self
# we only have two modes, so don't need to check
# self.mode
xmin, ymin, xmax, ymax = self._split_into_xyxy()
if mode == "xyxy":
bbox = torch.cat((xmin, ymin, xmax, ymax), dim=-1)
bbox = BoxList(bbox, self.size, mode=mode)
else:
TO_REMOVE = 1
bbox = torch.cat(
(xmin, ymin, xmax - xmin + TO_REMOVE, ymax - ymin + TO_REMOVE), dim=-1
)
bbox = BoxList(bbox, self.size, mode=mode)
bbox._copy_extra_fields(self)
return bbox
def _split_into_xyxy(self):
if self.mode == "xyxy":
xmin, ymin, xmax, ymax = self.bbox.split(1, dim=-1)
return xmin, ymin, xmax, ymax
elif self.mode == "xywh":
TO_REMOVE = 1
xmin, ymin, w, h = self.bbox.split(1, dim=-1)
return (
xmin,
ymin,
xmin + (w - TO_REMOVE).clamp(min=0),
ymin + (h - TO_REMOVE).clamp(min=0),
)
else:
raise RuntimeError("Should not be here")
def resize(self, size, *args, **kwargs):
"""
Returns a resized copy of this bounding box
:param size: The requested size in pixels, as a 2-tuple:
(width, height).
"""
ratios = tuple(float(s) / float(s_orig) for s, s_orig in zip(size, self.size))
if ratios[0] == ratios[1]:
ratio = ratios[0]
scaled_box = self.bbox * ratio
bbox = BoxList(scaled_box, size, mode=self.mode)
# bbox._copy_extra_fields(self)
for k, v in self.extra_fields.items():
if not isinstance(v, torch.Tensor):
v = v.resize(size, *args, **kwargs)
bbox.add_field(k, v)
return bbox
ratio_width, ratio_height = ratios
xmin, ymin, xmax, ymax = self._split_into_xyxy()
scaled_xmin = xmin * ratio_width
scaled_xmax = xmax * ratio_width
scaled_ymin = ymin * ratio_height
scaled_ymax = ymax * ratio_height
scaled_box = torch.cat(
(scaled_xmin, scaled_ymin, scaled_xmax, scaled_ymax), dim=-1
)
bbox = BoxList(scaled_box, size, mode="xyxy")
# bbox._copy_extra_fields(self)
for k, v in self.extra_fields.items():
if not isinstance(v, torch.Tensor):
v = v.resize(size, *args, **kwargs)
bbox.add_field(k, v)
return bbox.convert(self.mode)
def transpose(self, method):
"""
Transpose bounding box (flip or rotate in 90 degree steps)
:param method: One of :py:attr:`PIL.Image.FLIP_LEFT_RIGHT`,
:py:attr:`PIL.Image.FLIP_TOP_BOTTOM`, :py:attr:`PIL.Image.ROTATE_90`,
:py:attr:`PIL.Image.ROTATE_180`, :py:attr:`PIL.Image.ROTATE_270`,
:py:attr:`PIL.Image.TRANSPOSE` or :py:attr:`PIL.Image.TRANSVERSE`.
"""
if method not in (FLIP_LEFT_RIGHT, FLIP_TOP_BOTTOM):
raise NotImplementedError(
"Only FLIP_LEFT_RIGHT and FLIP_TOP_BOTTOM implemented"
)
image_width, image_height = self.size
xmin, ymin, xmax, ymax = self._split_into_xyxy()
if method == FLIP_LEFT_RIGHT:
TO_REMOVE = 1
transposed_xmin = image_width - xmax - TO_REMOVE
transposed_xmax = image_width - xmin - TO_REMOVE
transposed_ymin = ymin
transposed_ymax = ymax
elif method == FLIP_TOP_BOTTOM:
transposed_xmin = xmin
transposed_xmax = xmax
transposed_ymin = image_height - ymax
transposed_ymax = image_height - ymin
transposed_boxes = torch.cat(
(transposed_xmin, transposed_ymin, transposed_xmax, transposed_ymax), dim=-1
)
bbox = BoxList(transposed_boxes, self.size, mode="xyxy")
# bbox._copy_extra_fields(self)
for k, v in self.extra_fields.items():
if not isinstance(v, torch.Tensor):
v = v.transpose(method)
bbox.add_field(k, v)
return bbox.convert(self.mode)
def crop(self, box):
"""
Crops a rectangular region from this bounding box. The box is a
4-tuple defining the left, upper, right, and lower pixel
coordinate.
"""
#print(str(type(box)))
xmin, ymin, xmax, ymax = self._split_into_xyxy()
w, h = box[2] - box[0], box[3] - box[1]
cropped_xmin = (xmin - box[0]).clamp(min=0, max=w)
cropped_ymin = (ymin - box[1]).clamp(min=0, max=h)
cropped_xmax = (xmax - box[0]).clamp(min=0, max=w)
cropped_ymax = (ymax - box[1]).clamp(min=0, max=h)
# TODO should I filter empty boxes here?
if False:
is_empty = (cropped_xmin == cropped_xmax) | (cropped_ymin == cropped_ymax)
cropped_box = torch.cat(
(cropped_xmin, cropped_ymin, cropped_xmax, cropped_ymax), dim=-1
)
bbox = BoxList(cropped_box, (w, h), mode="xyxy")
#print(bbox.bbox)
# bbox._copy_extra_fields(self)
for k, v in self.extra_fields.items():
#print(k,v)
if not isinstance(v, torch.Tensor):
v = v.crop(box)
bbox.add_field(k, v)
return bbox.convert(self.mode)
# Tensor-like methods
def to(self, device):
bbox = BoxList(self.bbox.to(device), self.size, self.mode)
for k, v in self.extra_fields.items():
if hasattr(v, "to"):
v = v.to(device)
bbox.add_field(k, v)
return bbox
def __getitem__(self, item):
bbox = BoxList(self.bbox[item], self.size, self.mode)
for k, v in self.extra_fields.items():
bbox.add_field(k, v[item])
return bbox
def __len__(self):
return self.bbox.shape[0]
def clip_to_image(self, remove_empty=True):
TO_REMOVE = 1
self.bbox[:, 0].clamp_(min=0, max=self.size[0] - TO_REMOVE)
self.bbox[:, 1].clamp_(min=0, max=self.size[1] - TO_REMOVE)
self.bbox[:, 2].clamp_(min=0, max=self.size[0] - TO_REMOVE)
self.bbox[:, 3].clamp_(min=0, max=self.size[1] - TO_REMOVE)
if remove_empty:
box = self.bbox
keep = (box[:, 3] > box[:, 1]) & (box[:, 2] > box[:, 0])
return self[keep]
return self
def area(self):
box = self.bbox
if self.mode == "xyxy":
TO_REMOVE = 1
area = (box[:, 2] - box[:, 0] + TO_REMOVE) * (box[:, 3] - box[:, 1] + TO_REMOVE)
elif self.mode == "xywh":
area = box[:, 2] * box[:, 3]
else:
raise RuntimeError("Should not be here")
return area
def copy_with_fields(self, fields, skip_missing=False):
bbox = BoxList(self.bbox, self.size, self.mode)
if not isinstance(fields, (list, tuple)):
fields = [fields]
for field in fields:
if self.has_field(field):
bbox.add_field(field, self.get_field(field))
elif not skip_missing:
raise KeyError("Field '{}' not found in {}".format(field, self))
return bbox
def __repr__(self):
s = self.__class__.__name__ + "("
s += "num_boxes={}, ".format(len(self))
s += "image_width={}, ".format(self.size[0])
s += "image_height={}, ".format(self.size[1])
s += "mode={})".format(self.mode)
return s
if __name__ == "__main__":
bbox = BoxList([[0, 0, 10, 10], [0, 0, 5, 5]], (10, 10))
s_bbox = bbox.resize((5, 5))
print(s_bbox)
print(s_bbox.bbox)
k_bbox = s_bbox[[1]]
print(k_bbox)
print(k_bbox.bbox)
| [
"[email protected]"
]
| |
e92a538ccb84ab322c783ee5bcec0c38e5263b11 | 410029e722668b7b3d5ed0799ae052c4571ab466 | /.venv/bin/django-admin | cb8512b247887b074ffb84f3ec0c397fbf7f537b | []
| no_license | creechcorbin/ghostpost | 8c7cb48a468cb226e7d2c78d80f8dd845d742113 | 898eb28913f94bef48bc09511a67d6491f284a07 | refs/heads/master | 2022-12-04T11:51:42.907582 | 2020-08-21T18:45:02 | 2020-08-21T18:45:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 309 | #!/Users/corbincreech/Kenzie-Projects/ghostpost/.venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from django.core.management import execute_from_command_line
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(execute_from_command_line())
| [
"[email protected]"
]
| ||
f1833a5283b95fdf289b1c1117fb0e652ec8a137 | 77a8581bb042b1164a2aee2c581ebaeba1cd571e | /manager/migrations/0001_initial.py | ea059195deb4dda13ebe62bb251ec602d5ab19c1 | [
"Apache-2.0"
]
| permissive | EruDev/eru_manager | fb6f00d49c40113cf0a0871bc1aa0f771c23cb08 | 9bc7fb3af361c7de734bfa6c4e1562dd7f978500 | refs/heads/master | 2022-12-18T03:14:45.830681 | 2021-06-11T05:42:16 | 2021-06-11T05:42:16 | 139,976,816 | 18 | 5 | Apache-2.0 | 2022-12-08T02:15:21 | 2018-07-06T11:34:19 | SCSS | UTF-8 | Python | false | false | 896 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2018-06-10 23:00
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='UseInfo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=128, unique=True, verbose_name='用户名')),
('password', models.CharField(max_length=128, verbose_name='密码')),
('email', models.CharField(max_length=128, verbose_name='邮箱')),
],
options={
'verbose_name': '用户表',
'verbose_name_plural': '用户表',
},
),
]
| [
"[email protected]"
]
| |
f5e58843cb02368ad358651d4056459f7b2f17f0 | 0b32ba2b4537e024f7edb7682446ca948366111c | /pygly/GlycanFactory.py | 8903265d3abe74822b21e0909187ad315f744b4e | []
| no_license | alternativeTime/PyGly | b459c737b2b24918314ad0e7ebc1696a7d4c5314 | 68675e6896bc7bfc625cda422d08b7ec102e74e5 | refs/heads/master | 2022-04-19T16:12:34.421317 | 2020-04-14T23:37:33 | 2020-04-14T23:37:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,082 | py |
from ReferenceTable import ReferenceTable
from GlycanFormatter import GlycoCTFormat
from MonoFactory import MonoFactory
from Monosaccharide import Anomer, Linkage
# Should structures specified by their oxford abbreviation use
# undetermined linkages for their antennae? Currently no, but
# perhaps. Right now we just get one linkage instantiation.
class GlycanFactory(ReferenceTable):
def __init__(self):
self.fmt = GlycoCTFormat()
self.mf = MonoFactory()
super(GlycanFactory,self).__init__()
def new(self,key):
return self[key].clone()
def parseSection(self,name,kv):
aliases = [name]
g = self.fmt.toGlycan('\n'.join(kv['GlycoCT'].split()))
aliases.extend(map(str.strip,kv.get('Aliases','').split(';')))
return [(a,g) for a in aliases]
def add_mono(self, parent, name, parent_pos,
child_pos=1, anomer=Anomer.beta,
parent_type=Linkage.oxygenPreserved,
child_type=Linkage.oxygenLost):
m = self.mf.new(name)
m.set_anomer(anomer)
parent.add_child(m,parent_pos=parent_pos,
child_pos=child_pos,
parent_type=parent_type,
child_type=child_type)
return m
def oxford2Glycan(self,name):
if name in self:
return self.new(name)
p = 0
if name[p] == 'F':
g = self.new('FM3')
p += 1
else:
g = self.new('M3')
# print repr(g)
# print self.fmt.toStr(g)
# print self
r = g.root()
glcnac2 = filter(lambda m: m.compatible(self.mf['GlcNAc']), r.children())[0]
man1 = glcnac2.children()[0]
man16 = [l.child() for l in man1.links() if l.parent_pos() == 6][0]
man13 = [l.child() for l in man1.links() if l.parent_pos() == 3][0]
assert name[p] == 'A'
nant = int(name[p+1])
ant = [None]
if nant in (1,2,3,4):
ant.append(self.add_mono(man13,'GlcNAc',parent_pos=2))
if nant in (2,3,4):
ant.append(self.add_mono(man16,'GlcNAc',parent_pos=2))
if nant in (3,4):
ant.append(self.add_mono(man13,'GlcNAc',parent_pos=4))
if nant in (4,):
ant.append(self.add_mono(man16,'GlcNAc',parent_pos=6))
p += 2
if p >= len(name):
return g
if name[p] == 'B':
b = self.add_mono(man1,'GlcNAc',4)
name[p] += 1
if p >= len(name):
return g
if name[p] == 'F':
nfuc = int(name[p+1])
assert (nfuc <= nant)
for fi in range(1,nfuc+1):
self.add_mono(ant[fi],'Fuc',parent_pos=6,anomer=Anomer.alpha)
p += 2
if p >= len(name):
return g
assert(name[p] == 'G')
ngal = int(name[p+1])
gal = [None]
assert (ngal <= nant)
for gi in range(1,ngal+1):
gal.append(self.add_mono(ant[gi],'Gal',parent_pos=4))
p += 2
if p >= len(name):
return g
assert(name[p] == 'S')
nsia = int(name[p+1])
sia = [None]
assert (nsia <= ngal)
for si in range(1,nsia+1):
sia.append(self.add_mono(gal[si],'Neu5Ac',parent_pos=6,child_pos=2,anomer=Anomer.alpha))
return g
| [
"[email protected]"
]
| |
3caf6a3c0c2152578cd3ce56cabc211fa6d8b6a5 | 3e1b46a7b4b71d24c40a53c9ceda310e4114ad91 | /allennlp_models/rc/transformer_qa/transformer_qa_predictor.py | 530ee5b38f8ae8a782dc3bff3c59453e24ecfd27 | [
"Apache-2.0"
]
| permissive | codehunk628/allennlp-models | dc1de94ec4607a05ddcb31a2e5a8af7bfaf9686e | 83a14c4f4bef0c3e99f47dd1f380b48cbbba0ba6 | refs/heads/master | 2022-07-06T18:41:42.443241 | 2020-05-08T21:55:27 | 2020-05-08T21:55:27 | 262,493,726 | 1 | 0 | Apache-2.0 | 2020-05-09T05:10:17 | 2020-05-09T05:10:17 | null | UTF-8 | Python | false | false | 3,662 | py | from typing import List, Dict, Any
from allennlp.models import Model
from overrides import overrides
from allennlp.common.util import JsonDict, sanitize
from allennlp.data import Instance, DatasetReader
from allennlp.predictors.predictor import Predictor
@Predictor.register("transformer_qa")
class TransformerQAPredictor(Predictor):
"""
Predictor for the :class:`~allennlp_rc.models.TransformerQA` model, and any
other model that takes a question and passage as input.
"""
def __init__(self, model: Model, dataset_reader: DatasetReader) -> None:
super(TransformerQAPredictor, self).__init__(model, dataset_reader)
self._next_qid = 1
def predict(self, question: str, passage: str) -> JsonDict:
"""
Make a machine comprehension prediction on the supplied input.
See https://rajpurkar.github.io/SQuAD-explorer/ for more information about the machine comprehension task.
Parameters
----------
question : ``str``
A question about the content in the supplied paragraph. The question must be answerable by a
span in the paragraph.
passage : ``str``
A paragraph of information relevant to the question.
Returns
-------
A dictionary that represents the prediction made by the system. The answer string will be under the
"best_span_str" key.
"""
return self.predict_json({"context": passage, "question": question})
def predict_json(self, inputs: JsonDict) -> JsonDict:
results = self.predict_batch_json([inputs])
assert len(results) == 1
return results[0]
@overrides
def _json_to_instance(self, json_dict: JsonDict) -> Instance:
raise NotImplementedError(
"This predictor maps a question to multiple instances. "
"Please use _json_to_instances instead."
)
def _json_to_instances(self, json_dict: JsonDict) -> List[Instance]:
result = list(
self._dataset_reader.make_instances(
qid=str(self._next_qid),
question=json_dict["question"],
answers=[],
context=json_dict["context"],
first_answer_offset=None,
)
)
self._next_qid += 1
return result
@overrides
def _batch_json_to_instances(self, json_dicts: List[JsonDict]) -> List[Instance]:
instances = []
for json_dict in json_dicts:
instances.extend(self._json_to_instances(json_dict))
return instances
@overrides
def predict_batch_json(self, inputs: List[JsonDict]) -> List[JsonDict]:
instances = self._batch_json_to_instances(inputs)
result = self.predict_batch_instance(instances)
assert len(result) == len(inputs)
return result
@overrides
def predict_batch_instance(self, instances: List[Instance]) -> List[JsonDict]:
outputs = self._model.forward_on_instances(instances)
# group outputs with the same question id
qid_to_output: Dict[str, Dict[str, Any]] = {}
for instance, output in zip(instances, outputs):
qid = instance["metadata"]["id"]
output["id"] = qid
output["answers"] = instance["metadata"]["answers"]
if qid in qid_to_output:
old_output = qid_to_output[qid]
if old_output["best_span_scores"] < output["best_span_scores"]:
qid_to_output[qid] = output
else:
qid_to_output[qid] = output
return [sanitize(o) for o in qid_to_output.values()]
| [
"[email protected]"
]
| |
eb8e6e115f2490e2dbf8a872be81ed5f9929010a | f097c3488bcfd1e5d0a566f41cbac8980795aa0f | /tests/academics/models/logistics/test_instructor.py | 6aa08e826353df34aba76235d9be2a7a1f5e23d5 | [
"MIT"
]
| permissive | compserv/hknweb | fdce5d5a8e5402ce64f6d93adcea9b43fc920874 | 76f91d2c118bd017d3b714b805d08b5c49c5693e | refs/heads/master | 2023-04-29T12:58:00.253960 | 2023-02-16T01:47:17 | 2023-02-16T01:47:17 | 110,480,397 | 21 | 113 | MIT | 2023-09-03T01:46:27 | 2017-11-13T00:10:26 | Python | UTF-8 | Python | false | false | 330 | py | from django.test import TestCase
from tests.academics.utils import ModelFactory
class InstructorModelTests(TestCase):
def setUp(self):
instructor_id = "my instructor id"
instructor = ModelFactory.create_instructor(instructor_id)
self.instructor = instructor
def test_basic(self):
pass
| [
"[email protected]"
]
| |
c7d5b5f11fa82336e84e969ac3395ffe1c5bfe22 | 19acab66e4986a0b690fa643796d7d6736fe8b2c | /xPOO/filtering/filtsig.py | c77ab976dbaaf8227d62c0d85855d1926cfeeece | []
| no_license | gitter-badger/brainpipe | 184e11b12a0c5ad7ed6c22957a56b9f70b2d7487 | b381894190e6887d7814ca88f0dbb3e2448c759f | refs/heads/master | 2020-12-24T10:03:51.397655 | 2016-01-26T22:45:24 | 2016-01-26T22:45:24 | 50,481,078 | 0 | 0 | null | 2016-01-27T04:17:45 | 2016-01-27T04:17:45 | null | UTF-8 | Python | false | false | 8,563 | py | """
Design a filter, filt a signal, extract the phase, amplitude or power
"""
import numpy as n
from scipy.signal import filtfilt, butter, bessel, hilbert, hilbert2, detrend
from .filtsup import fir_order, fir1, morlet
__all__ = [
'filtDesign', 'filtvec',
]
__author__ = 'Etienne Combrisson'
class filtDesign(object):
"""Design a filter
Parameters
----------
filtname : string, optional [def : 'fir1']
Name of the filter. Possible values are:
- 'fir1' : Window-based FIR filter design
- 'butter' : butterworth filter
- 'bessel' : bessel filter
cycle : int, optional [def : 3]
Number of cycle to use for the filter. This parameter
is only avaible for the 'fir1' method
order : int, optional [def : 3]
Order of the 'butter' or 'bessel' filter
axis : int, optional [def : 0]
Filter accross the dimension 'axis'
"""
def __init__(self, filtname='fir1', cycle=3, order=3, axis=0):
if filtname not in ['fir1', 'butter', 'bessel', 'wavelet']:
raise ValueError('No "filtname" called "'+str(filtname)+'"'
' is defined. Choose between "fir1", "butter", '
'"bessel"')
self.filtname = filtname
self.cycle = cycle
self.order = order
self.axis = axis
def _getFiltDesign(self, sf, f, npts):
"""Get the designed filter
sf : sample frequency
f : frequency vector/list [ex : f = [2,4]]
npts : number of points
"""
if type(f) != n.ndarray:
f = n.array(f)
if self.filtname == 'fir1':
fOrder = fir_order(sf, npts, f[0], cycle=self.cycle)
b, a = fir1(fOrder, f/(sf / 2))
elif self.filtname == 'butter':
b, a = butter(self.order, [(2*f[0])/sf,
(2*f[1])/sf], btype='bandpass')
fOrder = None
elif self.filtname == 'bessel':
b, a = bessel(self.order, [(2*f[0])/sf,
(2*f[1])/sf], btype='bandpass')
fOrder = None
def filSignal(x): return filtfilt(b, a, x, padlen=fOrder,
axis=self.axis)
return filSignal
class filtvec(filtDesign):
"""Design a filter
Parameters
----------
method : string
Method to transform the signal. Possible values are:
- 'hilbert' : apply a hilbert transform to each column
- 'hilbert1' : hilbert transform to a whole matrix
- 'hilbert2' : 2D hilbert transform
- 'wavelet' : wavelet transform
- 'filter' : filtered signal
kind : string
Type of information to extract to the transformed signal.
Possible values are:
- 'signal' : return the transform signal
- 'phase' : phase of the the transform signal
- 'amplitude' : amplitude of the transform signal
- 'power' : power of the transform signal
filtname : string, optional [def : 'fir1']
Name of the filter. Possible values are:
- 'fir1' : Window-based FIR filter design
- 'butter' : butterworth filter
- 'bessel' : bessel filter
cycle : int, optional [def : 3]
Number of cycle to use for the filter. This parameter
is only avaible for the 'fir1' method
order : int, optional [def : 3]
Order of the 'butter' or 'bessel' filter
axis : int, optional [def : 0]
Filter accross the dimension 'axis'
dtrd : bool, optional [def : Flase]
Detrend the signal
wltWidth : int, optional [def : 7]
Width of the wavelet
wltCorr : int, optional [def : 3]
Correction of the edgde effect of the wavelet
Method
----------
getMeth : get the list of methods
sf : sample frequency
f : frequency vector/list [ex : f = [ [2,4], [5,7], [8,13] ]]
npts : number of points
-> Return a list of methods. The length of the list depend on the
length of the frequency list "f".
applyMeth : apply the list of methods
x : array signal, [x] = npts x ntrials
fMeth : list of methods
-> Return a 3D array nFrequency x npts x ntrials
"""
def __init__(self, method, kind, filtname='fir1', cycle=3, order=3,
axis=0, dtrd=False, wltWidth=7, wltCorr=3):
if method not in ['hilbert', 'hilbert1', 'hilbert2', 'wavelet',
'filter']:
raise ValueError('No "method" called "'+str(method)+'" is defined.'
' Choose between "hilbert", "hilbert1", '
'"hilbert2", "wavelet", "filter"')
if kind not in ['signal', 'phase', 'amplitude', 'power']:
raise ValueError('No "kind" called "'+str(self.kind)+'"'
' is defined. Choose between "signal", "phase", '
'"amplitude", "power"')
self.method = method
self.kind = kind
self.wltWidth = wltWidth
self.wltCorr = wltCorr
self.dtrd = dtrd
self.filtname = filtname
self.cycle = cycle
self.order = order
self.axis = axis
super().__init__(filtname=filtname, cycle=cycle, order=order,
axis=axis)
def _getTransform(self, sf, f, npts):
fDesign = self._getFiltDesign(sf, f, npts)
if self.method == 'hilbert': # Hilbert method
def hilb(x):
xH = n.zeros(x.shape)*1j
xF = fDesign(x)
for k in range(0, x.shape[1]):
xH[:, k] = hilbert(xF[:, k])
return xH
return hilb
elif self.method == 'hilbert1': # Hilbert method 1
def hilb1(x): return hilbert(fDesign(x))
return hilb1
elif self.method == 'hilbert2': # Hilbert method 2
def hilb2(x): return hilbert2(fDesign(x))
return hilb2
elif self.method == 'wavelet': # Wavelet method
def wav(x): return morlet(x, sf, (f[0]+f[1])/2,
wavelet_width=self.wltWidth)
return wav
elif self.method == 'filter': # Filter the signal
def fm(x): return fDesign(x)
return fm
def _getKind(self):
if self.kind == 'signal': # Unmodified signal
def sig_k(x): return x
return sig_k
elif self.kind == 'phase': # phase of the filtered signal
def phase_k(x): return n.angle(x)
return phase_k
elif self.kind == 'amplitude': # amplitude of the filtered signal
def amp_k(x): return abs(x)
return amp_k
elif self.kind == 'power': # power of the filtered signal
def pow_k(x): return n.square(abs(x))
return pow_k
def getMeth(self, sf, f, npts):
"""Get the methods
sf : sample frequency
f : frequency vector/list [ ex : f = [[2,4],[5,7]] ]
npts : number of points
-> Return a list of methods
"""
if type(f[0]) == int:
f = [f]
xKind = self._getKind()
fmeth = []
for k in f:
def fme(x, fce=k): return xKind(self._getTransform(
sf, fce, npts)(x))
fmeth.append(fme)
return fmeth
def applyMeth(self, x, fMeth):
"""Apply the methods
x : array signal
fMeth : list of methods
-> 3D array of the transform signal
"""
npts, ntrial = x.shape
nFce = len(fMeth)
xf = n.zeros((nFce, npts, ntrial))
# Detrend the signal :
if self.dtrd:
x = detrend(x, axis=0)
# Apply methods :
for k in range(0, nFce): # For each frequency in the tuple
xf[k, ...] = fMeth[k](x)
# Correction for the wavelet (due to the wavelet width):
if (self.method == 'wavelet') and (self.wltCorr is not None):
w = 3*self.wltWidth
xf[:, 0:w, :] = xf[:, w+1:2*w+1, :]
xf[:, npts-w:npts, :] = xf[:, npts-2*w-1:npts-w-1, :]
return xf
| [
"[email protected]"
]
| |
b96f9396c26d6c99d8a20b9366d3bccc8f4187df | a034d4ba39789e4a351112c46dd04a38180cd06c | /appengine/findit/infra_api_clients/swarming/swarming_task_request.py | 278735e0edeb5230cee5099b1d451c223d7ae774 | [
"BSD-3-Clause"
]
| permissive | asdfghjjklllllaaa/infra | 050ad249ab44f264b4e2080aa9537ce74aafb022 | 8f63af54e46194cd29291813f2790ff6e986804d | refs/heads/master | 2023-01-10T21:55:44.811835 | 2019-07-01T14:03:32 | 2019-07-01T14:03:32 | 194,691,941 | 1 | 0 | BSD-3-Clause | 2023-01-07T07:12:37 | 2019-07-01T14:45:29 | Python | UTF-8 | Python | false | false | 5,119 | py | # Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from libs.list_of_basestring import ListOfBasestring
from libs.structured_object import StructuredObject
class SwarmingTaskInputsRef(StructuredObject):
"""Contains information on the locations of the binaries to run against."""
# A hash represented the ioslated input pointing to the binaries to test.
isolated = basestring
# The url to the server the isolated inputs reside on.
isolatedserver = basestring
namespace = basestring
class SwarmingTaskProperties(StructuredObject):
"""Fields populated in swarming task requests."""
caches = list
command = basestring
env_prefixes = list
dimensions = list
env = list
# The maximum amount of time the swarming task is allowed to run before being
# terminated returned as a string representation of an int.
execution_timeout_secs = basestring
extra_args = ListOfBasestring
# String representation of int.
grace_period_secs = basestring
idempotent = bool
# Information pointing to the location of the test binaries.
inputs_ref = SwarmingTaskInputsRef
# String representaiton of int.
io_timeout_secs = basestring
class SwarmingTaskRequest(StructuredObject):
"""Represents a task request on Swarming server."""
# The created timestamp according to Swarming, returned as a string
# representation of a timestamp.
created_ts = basestring
# String representation of int.
expiration_secs = basestring
# The name of the swarming task.
name = basestring
parent_task_id = basestring
# The priority of the swarming task. The lower the number, the higher the
# priority, represented as a string.
priority = basestring
service_account = basestring
tags = ListOfBasestring
user = basestring
properties = SwarmingTaskProperties
# Pub/Sub parameters
pubsub_topic = basestring
pubsub_auth_token = basestring
pubsub_userdata = basestring
@staticmethod
def GetSwarmingTaskRequestTemplate():
"""Returns a template SwarmingTaskRequest object with default values."""
return SwarmingTaskRequest(
created_ts=None,
expiration_secs='3600',
name='',
parent_task_id='',
priority='150',
properties=SwarmingTaskProperties(
caches=[],
command=None,
dimensions=[],
env=[],
env_prefixes=[],
execution_timeout_secs='3600',
extra_args=ListOfBasestring(),
grace_period_secs='30',
io_timeout_secs='1200',
idempotent=True,
inputs_ref=SwarmingTaskInputsRef(
isolated=None, isolatedserver=None, namespace=None)),
pubsub_auth_token=None,
pubsub_topic=None,
pubsub_userdata=None,
service_account=None,
tags=ListOfBasestring(),
user='')
@classmethod
def FromSerializable(cls, data):
"""Deserializes the given data into a SwarmingTaskRequest.
Because Swarming frequently adds new fields to task requests, maintaining
a strict 1:1 mapping between Findit and Swarming is not feasible. Instead
when deserializing a swarming task request, only consider the fields that
are necessary.
Args:
data (dict): The dict mapping from defined attributes to their values.
Returns:
An instance of the given class with attributes set to the given data.
"""
properties = data.get('properties', {})
inputs_ref = properties.get('inputs_ref', {})
return SwarmingTaskRequest(
created_ts=data.get('created_ts'),
expiration_secs=str(data.get('expiration_secs')),
name=data.get('name'),
parent_task_id=data.get('parent_task_id'),
priority=str(data.get('priority')),
properties=SwarmingTaskProperties(
caches=properties.get('caches'),
command=properties.get('command'),
dimensions=properties.get('dimensions') or [],
env=properties.get('env') or [],
env_prefixes=properties.get('env_prefixes') or [],
execution_timeout_secs=str(
properties.get('execution_timeout_secs')),
extra_args=ListOfBasestring.FromSerializable(
properties.get('extra_args') or []),
grace_period_secs=str(properties.get('grace_period_secs')),
io_timeout_secs=str(properties.get('io_timeout_secs')),
idempotent=properties.get('idempotent'),
inputs_ref=SwarmingTaskInputsRef(
isolated=inputs_ref.get('isolated'),
isolatedserver=inputs_ref.get('isolatedserver'),
namespace=inputs_ref.get('namespace'))),
pubsub_auth_token=data.get('pubsub_auth_token'),
pubsub_topic=data.get('pubsub_topic'),
pubsub_userdata=data.get('pubsub_userdata'),
service_account=data.get('service_account'),
tags=ListOfBasestring.FromSerializable(data.get('tags') or []),
user=data.get('user'))
| [
"[email protected]"
]
| |
ad346a7474716451000c9a1b097d3ada0ac109a9 | d13a069fda8ce2e0a202eb43266af7558355cdc6 | /ROJASCUBAS/app24.py | 24c86c1d8f04cbe20399b5b3f06f7386c6fc1e6f | []
| no_license | CARLOSC10/T09_LIZA.DAMIAN_ROJAS.CUBAS | 879aba53c1db9ed2bfc4c37da3bf1cbd9df14d97 | 357ec4a3c266fa4ddf8d13b1ecb2af0feb604755 | refs/heads/master | 2020-11-27T04:52:52.004005 | 2019-12-20T18:10:12 | 2019-12-20T18:10:12 | 229,309,705 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 182 | py | import os
import libreria
cliente=os.sys.argv[1]
total_pagar=float(os.sys.argv[2])
consumo_energia=float(os.sys.argv[3])
libreria.MOSTRAR_RECIBO(cliente,total_pagar,consumo_energia)
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.