blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d56b4958a050b2475eab08b22b64f06f36724f1c
|
c0973d6939ef419ed3d261d95167d537499a553a
|
/tests/test_forex_strategy.py
|
34ee96c1d1ee8c48e63da8ba1979bb1dabdcf0dc
|
[
"MIT"
] |
permissive
|
mj3428/OnePy
|
0c6e4be9b4bb36ae66b566dfa85cd44bae2a07de
|
8dc13fc21502daa5786aecaa4451ccba32fc8a14
|
refs/heads/master
| 2020-04-05T10:28:33.550915 | 2018-11-08T04:07:05 | 2018-11-08T04:07:05 | 134,518,682 | 0 | 0 |
MIT
| 2018-05-23T05:38:12 | 2018-05-23T05:38:11 | null |
UTF-8
|
Python
| false | false | 5,956 |
py
|
from collections import defaultdict
import OnePy as op
from OnePy.custom_module.cleaner_talib import Talib
class DemoTestStrategy(op.StrategyBase):
def __init__(self):
super().__init__()
self.params = dict(sma1=25,
sma2=9)
self.set_params(self.params)
def set_params(self, params: dict):
self.params = params
self.sma1 = Talib(ind='SMA', frequency='H1',
params=dict(timeperiod=params['sma1']),
buffer_day=20).calculate
self.sma2 = Talib(ind='SMA', frequency='D',
params=dict(timeperiod=params['sma2']),
buffer_day=20).calculate
def handle_bar(self):
for ticker in self.env.tickers:
sma1 = self.sma1(ticker)
sma2 = self.sma2(ticker)
if sma1 > sma2:
self.buy(1, ticker, takeprofit=10, stoploss=10)
self.buy(1, ticker, takeprofit_pct=0.01, trailingstop=10)
self.buy(1, ticker, price_pct=0.1, takeprofit_pct=0.01)
self.short(1, ticker, takeprofit=10, trailingstop_pct=0.03)
self.short(1, ticker, stoploss_pct=0.02)
else:
self.sell(1, ticker, price_pct=0.1)
self.sell(99, ticker)
self.cover(3, ticker, price_pct=0.02)
self.cancel_tst(ticker, 'long', takeprofit=True)
self.cancel_pending(ticker, 'long', above_price=0)
START, END = '2016-01-05', '2016-01-21'
FREQUENCY = 'M30'
TICKER_LIST = ['EUR_USD']
INITIAL_CASH = 2000
go = op.backtest.forex(TICKER_LIST, FREQUENCY,
INITIAL_CASH, START, END, 'oanda')
DemoTestStrategy()
# forward_analysis(go, START, END, 2, 3)
# go.forward_analysis.run(START, 3, 2, 5)
# go.show_today_signals()
# go.sunny()
# go.output.save_result('backtest_forex.pkl')
# go.output.summary2()
# go.output.analysis.trade_analysis()
# go.output.plot('EUR_USD')
# go.output.plot(TICKER_LIST, 'plotly')
# || 正在初始化OnePy
# || =============== OnePy初始化成功! ===============
# || 开始寻找OnePiece之旅~~~
# || cleaners警告,可能遇到周末导致无法next
# || cleaners警告,可能遇到周末导致无法next
# || cleaners警告,可能遇到周末导致无法next
# || cleaners警告,可能遇到周末导致无法next
# ||
# ||
# || +--------------------------------+
# || |Fromdate | 2016-01-05|
# || |Todate | 2016-01-21|
# || |Initial_Value | $2000.00|
# || |Final_Value | $1991.33|
# || |Total_Return | -0.433%|
# || |Max_Drawdown | 2.725%|
# || |Max_Duration | 14 days|
# || |Max_Drawdown_Date | 2016-01-20|
# || |Sharpe_Ratio | -1.37|
# || +--------------------------------+
# || +---------------------------------------+
# || |Start_date | 2016-01-05|
# || |End_date | 2016-01-21|
# || |Initial_balance | $2000.00|
# || |End_balance | $1991.33|
# || |Total_return | -0.43%|
# || |Total_net_pnl | -$8.67|
# || |Total_commission | $0.19|
# || |Total_trading_days | 15 days|
# || |Max_drawdown | 2.73%|
# || |Max_drawdown_date | 2016-01-20|
# || |Max_duration_in_drawdown | 14 days|
# || |Max_margin | $14.76|
# || |Max_win_holding_pnl | $13.26|
# || |Max_loss_holding_pnl | -$37.73|
# || |Sharpe_ratio | -1.37|
# || |Sortino_ratio | -1.96|
# || |Number_of_trades | 1264|
# || |Number_of_daily_trades | 84.27|
# || |Number_of_profit_days | 15 days|
# || |Number_of_loss_days | 0 days|
# || |Avg_daily_pnl | -$0.58|
# || |Avg_daily_commission | $0.01|
# || |Avg_daily_return | -0.03%|
# || |Avg_daily_std | -0.03%|
# || |Annual_compound_return | -7.52%|
# || |Annual_average_return | -7.82%|
# || |Annual_std | -0.48%|
# || |Annual_pnl | -$145.61|
# || +---------------------------------------+
# || All Trades Long Trades Short Trades
# || Total_number_of_trades 1264 632 632
# || Total_net_pnl -$7.84 -$11.54 $3.69
# || Ratio_avg_win_avg_loss 0.85 0.75 0.87
# || Profit_factor 0.69 0.29 1.43
# || Percent_profitable 45.02% 28.16% 61.87%
# || Number_of_winning_trades 569 178 391
# || Number_of_losing_trades 693 454 239
# || Max_holding_period 4.85 days 4.85 days 4.62 days
# || Max_consecutive_winning_trade 126 47 126
# || Max_consecutive_losing_trade 104 102 95
# || Largest_winning_trade $0.11 $0.11 $0.10
# || Largest_losing_trade -$0.12 -$0.10 -$0.12
# || Gross_profit $17.39 $4.75 $12.64
# || Gross_loss -$25.04 -$16.19 -$8.85
# || Gross_commission $0.19 $0.09 $0.09
# || Expectancy_adjusted_ratio -0.17 -0.51 0.16
# || Expectancy -$0.01 -$0.02 $0.01
# || Avg_winning_trade $0.03 $0.03 $0.03
# || Avg_net_pnl_per_trade -$0.01 -$0.02 $0.01
# || Avg_losing_trade -$0.04 -$0.04 -$0.04
# || Avg_holding_period 2.14 days 1.96 days 3.10 days
# || python tests/test_forex_strategy.py 4.84s user 0.35s system 92% cpu 5.622 total
# || [Finished in 5 seconds]
|
[
"[email protected]"
] | |
036fab2dffd9e95949f40381d15eede2b578ec55
|
000f57fa43ecf9f5353ca80ced3ad505698dbecb
|
/imagelib/images/admin.py
|
a94d0a8d37ef907f4ed79f5c646191a01efdafd6
|
[] |
no_license
|
kamal0072/imagegallary
|
846e9ef43f6e0c42c98a4a4ad5cb22faef295936
|
91effde764710fd9bfc31b7dec238d143833e31e
|
refs/heads/master
| 2023-05-02T07:32:35.805297 | 2021-05-21T05:35:05 | 2021-05-21T05:35:05 | 359,541,161 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 182 |
py
|
from django.contrib import admin
from .models import MyImage
@admin.register(MyImage)
class MyImageAdmin(admin.ModelAdmin):
list_display=['id','first_name','photo','date']
|
[
"[email protected]"
] | |
a890bdf5f55e412c518b273f855b36f0315200bf
|
e7a87d9eca87d8be7b23b3a57c1d49f0ad6d20bc
|
/django_evolution/compat/models.py
|
7c108ce3cd7000cd75d3b0a8dab6f26b6b38c3a9
|
[
"BSD-2-Clause"
] |
permissive
|
beanbaginc/django-evolution
|
19a775a223b61861f503925216fb236b822122c0
|
756eedeacc41f77111a557fc13dee559cb94f433
|
refs/heads/master
| 2023-06-22T07:25:32.401292 | 2022-11-10T03:23:50 | 2022-11-10T03:23:50 | 14,189,401 | 22 | 13 | null | 2015-01-07T01:15:08 | 2013-11-07T00:04:43 |
Python
|
UTF-8
|
Python
| false | false | 9,727 |
py
|
"""Compatibility functions for model-related operations.
This provides functions for working with models or importing moved fields.
These translate to the various versions of Django that are supported.
"""
from __future__ import unicode_literals
from django.db import models
from django.db.models.fields import related
try:
# Django >= 1.7
from django.apps.registry import apps
from django.contrib.contenttypes.fields import (GenericForeignKey,
GenericRelation)
cache = None
all_models = apps.all_models
get_model = apps.get_model
_get_models = None
except ImportError:
# Django < 1.7
from django.db.models.loading import (cache, get_model,
get_models as _get_models)
from django.contrib.contenttypes.generic import (GenericForeignKey,
GenericRelation)
all_models = cache.app_models
apps = None
try:
# Django >= 1.8
from django.core.exceptions import FieldDoesNotExist
except ImportError:
# Django < 1.8
from django.db.models.fields import FieldDoesNotExist
def get_models(app_mod=None, include_auto_created=False):
"""Return the models belonging to an app.
Args:
app_mod (module, optional):
The application module.
include_auto_created (bool, optional):
Whether to return auto-created models (such as many-to-many
models) in the results.
Returns:
list:
The list of modules belonging to the app.
"""
if apps:
# Django >= 1.7
if app_mod is None:
return apps.get_models(include_auto_created=include_auto_created)
for app_config in apps.get_app_configs():
if app_config.models_module is app_mod:
return [
model
for model in app_config.get_models(
include_auto_created=include_auto_created)
if not model._meta.abstract
]
return []
else:
# Django < 1.7
models = _get_models(app_mod,
include_auto_created=include_auto_created)
if app_mod is not None:
# Avoids a circular import.
from django_evolution.utils.apps import get_app_name
app_mod_name = get_app_name(app_mod)
models = [
model
for model in models
if model.__module__.startswith(app_mod_name)
]
return models
def set_model_name(model, name):
"""Set the name of a model.
Args:
model (django.db.models.Model):
The model to set the new name on.
name (str):
The new model name.
"""
if hasattr(model._meta, 'model_name'):
# Django >= 1.7
model._meta.model_name = name
else:
# Django < 1.7
model._meta.module_name = name
def get_model_name(model):
"""Return the model's name.
Args:
model (django.db.models.Model):
The model for which to return the name.
Returns:
str: The model's name.
"""
if hasattr(model._meta, 'model_name'):
# Django >= 1.7
return model._meta.model_name
else:
# Django < 1.7
return model._meta.module_name
def get_field_is_hidden(field):
"""Return whether a field is hidden.
Version Added:
2.2
Args:
field (django.db.models.Field):
The field to check.
Returns:
bool:
``True`` if the field is hidden. ``False`` if it is not.
"""
if hasattr(field, 'hidden'):
# Django >= 1.8
return field.hidden
else:
# Django < 1.8
if hasattr(field, 'rel'):
return field.rel.is_hidden()
else:
return field.is_hidden()
def get_field_is_many_to_many(field):
"""Return whether a field is a Many-to-Many field.
Version Added:
2.2
Args:
field (django.db.models.Field):
The field to check.
Returns:
bool:
``True`` if the field is a Many-to-Many field. ``False`` if it is not.
"""
if hasattr(field, 'many_to_many'):
# Django >= 1.8
return field.many_to_many
else:
# Django < 1.8
return isinstance(field, (models.ManyToManyField,
related.ManyToManyRel))
def get_field_is_relation(field):
"""Return whether a field is a relation.
A field is a relation if it's an object like a
:py:class:`django.db.models.ForeignKey` or
:py:class:`django.db.models.ManyToManyField`, or if it's a relation
utility field like
:py:class:`django.db.models.fields.related.ForeignObjectRel` or
:py:class:`django.db.models.fields.related.ManyToOneRel`.
Version Added:
2.2
Args:
field (django.db.models.Field or
django.db.models.fields.related.ForeignObjectRel):
The field to check.
Returns:
bool:
``True`` if the field is a relation. ``False`` if it is not.
"""
if hasattr(field, 'is_relation'):
# Django >= 1.8
return field.is_relation
else:
# Django < 1.8
return (getattr(field, 'rel', None) is not None or
isinstance(field, (related.ForeignObjectRel,
related.ManyToManyRel)))
def get_rel_target_field(field):
"""Return the target field for a field's relation.
Warning:
Despite the name, this should only be called on a
:py:class:`ForeignKey` and not on a relation, in order to avoid
consistency issues in the data returned on Django >= 1.7.
Args:
field (django.db.models.Field):
The relation field.
Returns:
django.db.models.Field:
The field on the other end of the relation.
"""
if hasattr(field, 'target_field'):
# Django >= 1.7
return field.target_field
else:
# Django < 1.7
return field.related_field
def get_remote_field(field):
"""Return the remote field for a relation.
This will be an intermediary field, such as:
* :py:class:`django.db.models.fields.related.ForeignObjectRel`
* :py:class:`django.db.models.fields.related.ManyToOneRel`
* :py:class:`django.db.models.fields.related.OneToOneRel`
* :py:class:`django.db.models.fields.related.ManyToManyRel`
This is equivalent to ``rel`` prior to Django 1.9 and ``remote_field``
in 1.9 onward.
Version Changed:
2.2:
On Django < 1.9, a main relation field (like
:py:class:`django.db.models.ForeignKey`) will return the utility
relation, matching the behavior on >= 1.9.
Args:
field (django.db.models.Field):
The relation field.
Returns:
django.db.models.Field:
The remote field on the relation.
"""
if hasattr(field, 'remote_field'):
# Django >= 1.9
return field.remote_field
else:
# Django < 1.9
if hasattr(field, 'rel'):
return field.rel
elif isinstance(field, related.ManyToManyRel):
return getattr(field.to, field.related_name).related.field
elif isinstance(field, related.ForeignObjectRel):
return field.field
raise NotImplementedError('Unsupported field/relation type: %r'
% field)
def get_remote_field_model(rel):
"""Return the model a relation is pointing to.
This is equivalent to ``rel.to`` prior to Django 1.9 and
``remote_field.model`` in 1.9 onward.
Args:
rel (object):
The relation object. This is expected to be the result of a
:py:func:`get_remote_field` call.
Returns:
type:
The model the relation points to. This should be a subclass of
:py:meth:`django.db.models.Model`.
"""
if hasattr(rel, 'model'):
# Django >= 1.9
return rel.model
else:
# Django < 1.9
return rel.to
def get_remote_field_related_model(rel):
"""Return the model a relation is pointing from.
Version Added:
2.2
Args:
rel (object):
The relation object. This is expected to be the result of a
:py:func:`get_remote_field` call.
Returns:
type:
The model the relation points to. This should be a subclass of
:py:meth:`django.db.models.Model`.
"""
if hasattr(rel, 'related_model'):
# Django >= 1.9
return rel.related_model
else:
# Django < 1.9
if isinstance(rel, models.ForeignKey):
return rel.rel.get_related_field().model
elif isinstance(rel, models.ManyToManyField):
return rel.rel.to
elif isinstance(rel, related.ManyToOneRel):
return rel.field.model
elif isinstance(rel, related.ManyToManyRel):
return getattr(rel.to, rel.related_name).related.model
elif isinstance(rel, related.ForeignObjectRel):
return rel.get_related_field().model
raise NotImplementedError('Unsupported field/relation type: %r' % rel)
__all__ = [
'FieldDoesNotExist',
'GenericForeignKey',
'GenericRelation',
'all_models',
'get_field_is_hidden',
'get_field_is_many_to_many',
'get_field_is_relation',
'get_model',
'get_models',
'get_model_name',
'get_rel_target_field',
'get_remote_field',
'get_remote_field_model',
'get_remote_field_related_model',
'set_model_name',
]
|
[
"[email protected]"
] | |
2225a843e02ca5a21e862f98265377310449758d
|
cd90bbc775cbce9a7e0bc46cbb9437e3961e587f
|
/misc/advent/2017/23/b.py
|
4d6d170eea60181e64e6b15187262a04afe567f4
|
[] |
no_license
|
llimllib/personal_code
|
7b3f0483589e2928bf994184e3413f4b887e1f0c
|
4d4662d53e0ac293dea8a4208ccca4a1f272e64a
|
refs/heads/master
| 2023-09-05T04:02:05.075388 | 2023-09-01T12:34:09 | 2023-09-01T12:34:09 | 77,958 | 9 | 16 | null | 2023-08-16T13:54:39 | 2008-11-19T02:04:46 |
HTML
|
UTF-8
|
Python
| false | false | 1,685 |
py
|
from collections import defaultdict
import sys
def go(inp):
registers = defaultdict(int)
registers['a'] = 1
instructions = list(i.strip() for i in inp)
ptr = 0
i = 0
muls = 0
while ptr < len(instructions):
if i % 1000000 == 0:
pass
#print(registers)
inst, a, b = instructions[ptr].split(" ")
# print(ptr, inst, a, b)
if instructions[ptr] == "jnz g 2" and registers['d'] * registers['e'] == registers['b']:
print(registers)
if inst == "set":
try:
registers[a] = int(b)
except ValueError:
# this casee is not discussed in the documentation?
registers[a] = registers[b]
elif inst == "mul":
muls += 1
try:
registers[a] *= int(b)
except ValueError:
registers[a] *= registers[b]
elif inst == "sub":
try:
registers[a] -= int(b)
except ValueError:
registers[a] -= registers[b]
elif inst == "jnz":
try:
val = int(a)
except ValueError:
val = registers[a]
if val != 0:
try:
ptr += int(b)
continue
except ValueError:
# also not discussed in the docs
ptr += registers[b]
continue
else:
raise TypeError("ohno")
# print(registers)
ptr += 1
i += 1
print(registers['h'])
if __name__ == "__main__":
go(open('jimmied.txt'))
|
[
"[email protected]"
] | |
35971ac7e0caac70225bae1f0be790575d72bcc8
|
1be0090ac65ee3c4ad12c9152886169d15fd6d0d
|
/tests/test_shelter.py
|
17ad185362119c38c433f2483fbbc094c649d1f4
|
[] |
no_license
|
jreiher2003/Puppy-Adoption
|
ec652a5df68fd428605fc154b2e6fb1bf0f5d752
|
0d0bea549d174d903db9de1ca5be79412f333ea3
|
refs/heads/master
| 2021-01-10T09:26:38.652807 | 2016-04-03T18:32:17 | 2016-04-03T18:32:17 | 51,519,990 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,836 |
py
|
import unittest
from base import BaseTestCase
from app.models import Shelter
class TestShelterCase(BaseTestCase):
def test_shelter_new_page(self):
response = self.client.get('/new-shelter', content_type='html/text')
self.assertEqual(response.status_code, 200)
self.assertIn(b'Add a shelter', response.data)
def test_shelter_add_new(self):
response = self.client.post('/new-shelter', data=dict(name='Greatshelter', address="321 Notreal st.", city="Nocity", state="Alabama", zipCode=54321, website="http://www.notreal.com", maximum_capacity=6, current_capacity=2), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertIn(b"<strong>Congrats</strong> You just created a new shelter named <u>Greatshelter</u>.", response.data)
def test_shelter_add_new_error(self):
response = self.client.post('/new-shelter', data=dict(name='Greatshelter'),follow_redirects=True)
self.assertIn(b"This field is required.", response.data)
def test_shelter_database(self):
shelter = Shelter.query.filter_by(id=1).one()
self.assertEqual(shelter.id, 1)
self.assertEqual(shelter.name, 'Testshelter')
self.assertEqual(shelter.address, '123 Fake st.')
self.assertEqual(shelter.city, 'Fake')
self.assertEqual(shelter.state, 'Florida')
self.assertEqual(shelter.zipCode, '12345')
self.assertEqual(shelter.website, 'http://test.com')
self.assertEqual(shelter.maximum_capacity, 10)
self.assertEqual(shelter.current_capacity, 5)
def test_shelter_edit_page(self):
response = self.client.get('/1/testshelter/edit/', content_type='html/text')
self.assertEqual(response.status_code, 200)
self.assertIn(b'Edit a shelter', response.data)
def test_shelter_edit_post(self):
response = self.client.post('/1/testshelter/edit/', data=dict(name="Testshelter", address="123 Fake st.", city="Fake", state="Florida", zipCode="12345", website="http://test.com", maximum_capacity=10, current_capacity=6), follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertIn(b'<strong>Update</strong> on <u>Testshelter</u>.', response.data)
def test_shelter_delete_page(self):
response = self.client.get('/1/testshelter/delete/', content_type='html/text')
self.assertEqual(response.status_code, 200)
self.assertIn(b'Are you sure you want to close down <mark>Testshelter</mark>?', response.data)
def test_shelter_delete_post(self):
response = self.client.post('/1/testshelter/delete/', follow_redirects=True)
self.assertEqual(response.status_code, 200)
self.assertIn(b'<strong>Successfully</strong> deleted shelter <u>Testshelter</u>.', response.data)
|
[
"[email protected]"
] | |
c5cf16f641ee307011a7892379838fa61d48d9d0
|
2aace9bb170363e181eb7520e93def25f38dbe5c
|
/build/idea-sandbox/system/python_stubs/cache/55af9580d6a61621d0d6de6e2bbfd43d14841968ee1dd01113aeb5bd2473a4cc/pyexpat/model.py
|
e5b10af34a44317d9c92488636663935e2daa366
|
[] |
no_license
|
qkpqkp/PlagCheck
|
13cb66fd2b2caa2451690bb72a2634bdaa07f1e6
|
d229904674a5a6e46738179c7494488ca930045e
|
refs/heads/master
| 2023-05-28T15:06:08.723143 | 2021-06-09T05:36:34 | 2021-06-09T05:36:34 | 375,235,940 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 523 |
py
|
# encoding: utf-8
# module pyexpat.model
# from C:\Users\Doly\Anaconda3\lib\site-packages\tables\indexesextension.cp37-win_amd64.pyd
# by generator 1.147
""" Constants used to interpret content model information. """
# no imports
# Variables with simple values
XML_CQUANT_NONE = 0
XML_CQUANT_OPT = 1
XML_CQUANT_PLUS = 3
XML_CQUANT_REP = 2
XML_CTYPE_ANY = 2
XML_CTYPE_CHOICE = 5
XML_CTYPE_EMPTY = 1
XML_CTYPE_MIXED = 3
XML_CTYPE_NAME = 4
XML_CTYPE_SEQ = 6
__loader__ = None
__spec__ = None
# no functions
# no classes
|
[
"[email protected]"
] | |
acd76f51af16046ea805a215a3013626165e5c91
|
e440cbf65b3b3a390d851df876ecb859ddaf2c5b
|
/marl/environments/particles/multiagent/scenarios/simple_spread.py
|
df3cdec30af0fe28579adbe711aea76c550598db
|
[
"MIT"
] |
permissive
|
wangjie-ubuntu/badger-2019
|
9ead42f9f4d4ebf2a74f02cf0a53de88cbe96a43
|
ccd8f428ad8aafad24f16d8e36ea31f6ab403dda
|
refs/heads/master
| 2022-08-12T07:49:36.928571 | 2020-01-07T16:04:33 | 2020-01-07T16:04:33 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,012 |
py
|
import numpy as np
from marl.environments.particles.multiagent.core import World, Agent, Landmark
from marl.environments.particles.multiagent.scenarios.custom.configurable_scenario import ConfigurableScenario
class Scenario(ConfigurableScenario):
""" Cooperative Navigation task in the https://arxiv.org/pdf/1706.02275.pdf
3 agents, 3 landmarks, each agent should sit on one landmark, implicit communication.
"""
num_landmarks: int
def __init__(self):
# parameters overriden by the setup
super().__init__(num_agents=3)
self.num_landmarks = 3
def setup(self,
num_agents: int,
num_landmarks: int,
rollout_length: int):
super().setup()
self.num_agents = num_agents
self.num_landmarks = num_landmarks
self.episode_length = rollout_length
def make_world(self):
world = World()
# set any world properties first
world.dim_c = 2
# num_agents = 3
# num_landmarks = 3
world.collaborative = True
# add emergent_comm
world.agents = [Agent() for i in range(self.num_agents)]
for i, agent in enumerate(world.agents):
agent.name = 'agent %d' % i
agent.collide = True
agent.silent = True
agent.size = 0.15
# add landmarks
world.landmarks = [Landmark() for i in range(self.num_landmarks)]
for i, landmark in enumerate(world.landmarks):
landmark.name = 'landmark %d' % i
landmark.collide = False
landmark.movable = False
# make initial conditions
self.reset_world(world)
return world
def reset_world(self, world):
super().reset_world(world)
# random properties for emergent_comm
for i, agent in enumerate(world.agents):
agent.color = np.array([0.35, 0.35, 0.85])
# random properties for landmarks
for i, landmark in enumerate(world.landmarks):
landmark.color = np.array([0.25, 0.25, 0.25])
# set random initial states
for agent in world.agents:
agent.state.p_pos = np.random.uniform(-1, +1, world.dim_p)
agent.state.p_vel = np.zeros(world.dim_p)
agent.state.c = np.zeros(world.dim_c)
for i, landmark in enumerate(world.landmarks):
landmark.state.p_pos = np.random.uniform(-1, +1, world.dim_p)
landmark.state.p_vel = np.zeros(world.dim_p)
def benchmark_data(self, agent, world):
rew = 0
collisions = 0
occupied_landmarks = 0
min_dists = 0
for l in world.landmarks:
dists = [np.sqrt(np.sum(np.square(a.state.p_pos - l.state.p_pos))) for a in world.agents]
min_dists += min(dists)
rew -= min(dists)
if min(dists) < 0.1:
occupied_landmarks += 1
if agent.collide:
for a in world.agents:
if self.is_collision(a, agent):
rew -= 1
collisions += 1
return (rew, collisions, min_dists, occupied_landmarks)
def is_collision(self, agent1, agent2):
delta_pos = agent1.state.p_pos - agent2.state.p_pos
dist = np.sqrt(np.sum(np.square(delta_pos)))
dist_min = agent1.size + agent2.size
return True if dist < dist_min else False
def reward(self, agent, world):
# Agents are rewarded based on minimum agent distance to each landmark, penalized for collisions
rew = 0
for l in world.landmarks:
dists = [np.sqrt(np.sum(np.square(a.state.p_pos - l.state.p_pos))) for a in world.agents]
rew -= min(dists)
if agent.collide:
for a in world.agents:
if self.is_collision(a, agent):
rew -= 1
return rew
def observation(self, agent, world):
""" Composes the observation in the following manner, for each agent it is:
[vel_x, vel_y, pos_x, pos_y, [landmark positions], [other agent positions], comm. from others]
Communication not used now.
"""
super().observation(agent, world)
# get positions of all landmarks in this agent's reference frame
entity_pos = []
for entity in world.landmarks: # world.entities:
entity_pos.append(entity.state.p_pos - agent.state.p_pos)
# not used
# landmark colors
entity_color = []
for entity in world.landmarks: # world.entities:
entity_color.append(entity.color)
# communication and position of all other emergent_comm
comm = []
other_pos = []
for other in world.agents:
if other is agent: continue
comm.append(other.state.c)
other_pos.append(other.state.p_pos - agent.state.p_pos)
return np.concatenate([agent.state.p_vel] + [agent.state.p_pos] + entity_pos + other_pos + comm)
|
[
"[email protected]"
] | |
825a7169ffa219fdca77c3aebe9c57b21a2abdec
|
e7031386a884ae8ed568d8c219b4e5ef1bb06331
|
/venv/bin/django-admin.py
|
a886afcced915c9780aef5e25cbf66219dc35f94
|
[] |
no_license
|
ikbolpm/ultrashop-backend
|
a59c54b8c4d31e009704c3bf0e963085477092cf
|
290fa0ecdad40ec817867a019bff2ce82f08d6fe
|
refs/heads/dev
| 2022-11-30T21:49:17.965273 | 2020-09-24T10:16:12 | 2020-09-24T10:16:12 | 147,561,738 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 155 |
py
|
#!/var/www/sites/ultrashop/html/venv/bin/python3
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
|
[
"[email protected]"
] | |
43b7e368be72ded30b0f5741ec6aaeae9d297fc1
|
e535f59053b545b493c93c9945aa054ad1335178
|
/tests/test_scripts/test_gen_namespace.py
|
eedf7912de2a7e4ff6f8882cfc227d6842658f13
|
[
"CC0-1.0"
] |
permissive
|
pabloalarconm/linkml
|
9308669d5baba2a2c60fe79f31f737e87ed59295
|
5ef4b2f0e89698ffc0db693fdba68d1306438749
|
refs/heads/main
| 2023-08-25T14:41:58.419628 | 2021-10-02T02:04:06 | 2021-10-02T02:04:06 | 411,990,387 | 0 | 0 |
CC0-1.0
| 2021-09-30T08:49:50 | 2021-09-30T08:49:50 | null |
UTF-8
|
Python
| false | false | 1,107 |
py
|
import unittest
from types import ModuleType
import click
from linkml.generators import namespacegen
from tests.test_scripts.environment import env
from tests.utils.clicktestcase import ClickTestCase
from tests.utils.filters import metadata_filter
from tests.utils.python_comparator import compare_python
class GenNamespaceTestCase(ClickTestCase):
testdir = "gennamespace"
click_ep = namespacegen.cli
prog_name = "gen-namespace"
env = env
def test_help(self):
self.do_test("--help", 'help')
def test_meta(self):
self.maxDiff = None
self.do_test([], 'meta_namespaces.py', filtr=metadata_filter,
comparator=lambda exp, act: compare_python(exp, act, self.env.expected_path('meta_namespaces.py')))
self.do_test('-f py', 'meta_namespaces.py', filtr=metadata_filter,
comparator=lambda exp, act: compare_python(exp, act, self.env.expected_path('meta_namespaces.py')))
self.do_test('-f xsv', 'meta_error', expected_error=click.exceptions.BadParameter)
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
1440f3b31a140e37da83c49b68ae17d0efb1a5a3
|
38374bd02b3d88f26e3419fd94cebf292fa8460a
|
/jecta.py
|
175470b3eb92aea6391e5733f7f49d8026c0433d
|
[] |
no_license
|
thisismyrobot/jecta
|
fa6a78624a7f33f9804f297e377eb5ab9c84f85d
|
53339112229b35f24df1d30abac695e904276c12
|
refs/heads/master
| 2020-05-18T18:19:40.011909 | 2010-01-20T03:45:31 | 2010-01-20T03:45:31 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,136 |
py
|
import gtk
import signals
import widgets
import gobject
import database
class Jecta(object):
""" The Jecta application.
"""
def __init__(self):
#create signals
gobject.type_register(signals.Sender)
gobject.signal_new("jecta_data_received",
signals.Sender,
gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
(gobject.TYPE_STRING,))
gobject.signal_new("jecta_tag_and_data_received",
signals.Sender,
gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
(gobject.TYPE_STRING, gobject.TYPE_STRING))
gobject.signal_new("jecta_get_tag_for_data",
signals.Sender,
gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
(gobject.TYPE_STRING,))
gobject.signal_new("jecta_add_to_db",
signals.Sender,
gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
(gobject.TYPE_STRING, gobject.TYPE_STRING))
gobject.signal_new("jecta_dropper_clicked",
signals.Sender,
gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
())
gobject.signal_new("jecta_get_search_tag",
signals.Sender,
gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
())
gobject.signal_new("jecta_search_string_updated",
signals.Sender,
gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
(gobject.TYPE_STRING,))
gobject.signal_new("jecta_search_db",
signals.Sender,
gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
(gobject.TYPE_STRING,))
gobject.signal_new("jecta_search_results_ready",
signals.Sender,
gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,))
gobject.signal_new("jecta_display_search_results",
signals.Sender,
gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,))
#create the signal sender
sender = signals.Sender()
#create the controller
signals.Controller(sender)
#create windows, provide signal sender
drop_target = widgets.Dropper(sender)
widgets.Tagger(sender)
widgets.Searcher(sender)
database.Database(sender)
#show drop target
drop_target.show()
gtk.main()
if __name__ == "__main__":
Jecta()
|
[
"[email protected]"
] | |
3893f258c5c51873d6deb0b0d1e87221aca5ff0f
|
4f0385a90230c0fe808e8672bb5b8abcceb43783
|
/LNH/day5-teacher/7 包的使用/xxx/yyy/aaa/bbb/m3.py
|
02d6ad5d10dc921920dad9cbd99a58d08fcd69df
|
[] |
no_license
|
lincappu/pycharmlearningproject
|
4084dab7adde01db9fa82a12769a67e8b26b3382
|
b501523e417b61373688ba12f11b384166baf489
|
refs/heads/master
| 2023-07-10T05:21:15.163393 | 2023-06-29T14:02:35 | 2023-06-29T14:02:35 | 113,925,289 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 86 |
py
|
# from aaa.ccc.m4 import f4
from ..ccc.m4 import f4
def f3():
print('f3')
f4()
|
[
"[email protected]"
] | |
fd2c3d282da1505204c3bfa937bae13dff575513
|
cb69392e87f4faa67adb45b8f7937d834922dc60
|
/rl_utils/actor_critic.py
|
45ffb2478732af03108fa9b9cb5a871c1d06f36d
|
[] |
no_license
|
maxme1/rl
|
76c65a363f9d735af70ede4d3bf5b39e61a5ab41
|
d0506d96f30884259b88222a00b60f56d2b8e7a3
|
refs/heads/master
| 2021-01-24T13:28:26.617952 | 2020-05-04T13:13:58 | 2020-05-04T13:13:58 | 123,175,509 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,037 |
py
|
import torch
import torch.nn.functional as functional
from torch.autograd import Variable
from torch import nn
from rl_utils.interfaces.base import to_var
from rl_utils.utils import View
def calculate_loss(agent, memory, prepare_batch, gamma, entropy_weight, value_weight):
states, actions, rewards, done = prepare_batch(memory)
# TODO: add func for all this:
b, e = to_var(states[::2]), to_var(states[1::2], volatile=True)
actions = to_var(actions)
rewards = to_var(rewards).float()
done = torch.ByteTensor(done).cuda()
prob_logits, value = agent(b)
prob = functional.softmax(prob_logits, -1)
log_prob = functional.log_softmax(prob_logits, -1)
entropy = -(log_prob * prob).sum(1, keepdim=True)
log_prob = log_prob.gather(1, actions)
final_values = agent(e)[1]
final_values[done] = 0
final_values.volatile = False
cumulative_reward = final_values * gamma + rewards
value_loss = functional.mse_loss(value, cumulative_reward)
delta_t = cumulative_reward.data - value.data
policy_loss = - log_prob * Variable(delta_t) - entropy_weight * entropy
return policy_loss.mean() + value_weight * value_loss
def get_action(predict: Variable):
# predict == (prob_logits, value)
return functional.softmax(predict[0], -1).multinomial().data.cpu()[0]
class ActorCritic(nn.Module):
def __init__(self, input_channels, n_actions, n_features=3136):
super().__init__()
self.main_path = nn.Sequential(
nn.Conv2d(input_channels, 32, kernel_size=8, stride=4),
nn.ReLU(),
nn.Conv2d(32, 64, kernel_size=4, stride=2),
nn.ReLU(),
nn.Conv2d(64, 64, kernel_size=3, stride=1),
nn.ReLU(),
View(n_features),
nn.Linear(n_features, 512),
nn.ReLU(),
)
self.probs = nn.Linear(512, n_actions)
self.value = nn.Linear(512, 1)
def forward(self, x):
x = self.main_path(x)
return self.probs(x), self.value(x)
|
[
"[email protected]"
] | |
a1ef391297dbf1e19b7dd5ada859142e7a247bfb
|
2c38c2ea0328b75ba96a36346f71bd8ddeda3d35
|
/qa/cancel_direct_offline.py
|
93b77931436d654efd4d1274206fca4b23f65979
|
[
"MIT"
] |
permissive
|
TheButterZone/openbazaar-go
|
c6b76e6b7d4cb608f09c6f4dd5d62b97d5b1758d
|
afa185e7a929eb4ee659c53859a73b1dd53b3ae0
|
refs/heads/master
| 2021-06-27T06:24:54.645852 | 2017-09-09T03:21:30 | 2017-09-09T03:21:30 | 102,985,074 | 1 | 1 | null | 2017-09-09T21:08:08 | 2017-09-09T21:08:08 | null |
UTF-8
|
Python
| false | false | 7,643 |
py
|
import requests
import json
import time
from collections import OrderedDict
from test_framework.test_framework import OpenBazaarTestFramework, TestFailure
class CancelDirectOfflineTest(OpenBazaarTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 3
def run_test(self):
alice = self.nodes[0]
bob = self.nodes[1]
# generate some coins and send them to bob
time.sleep(4)
api_url = bob["gateway_url"] + "wallet/address"
r = requests.get(api_url)
if r.status_code == 200:
resp = json.loads(r.text)
address = resp["address"]
elif r.status_code == 404:
raise TestFailure("CancelDirectOfflineTest - FAIL: Address endpoint not found")
else:
raise TestFailure("CancelDirectOfflineTest - FAIL: Unknown response")
self.send_bitcoin_cmd("sendtoaddress", address, 10)
time.sleep(20)
# post listing to alice
with open('testdata/listing.json') as listing_file:
listing_json = json.load(listing_file, object_pairs_hook=OrderedDict)
api_url = alice["gateway_url"] + "ob/listing"
r = requests.post(api_url, data=json.dumps(listing_json, indent=4))
if r.status_code == 404:
raise TestFailure("CancelDirectOfflineTest - FAIL: Listing post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("CancelDirectOfflineTest - FAIL: Listing POST failed. Reason: %s", resp["reason"])
time.sleep(4)
# get listing hash
api_url = alice["gateway_url"] + "ipns/" + alice["peerId"] + "/listings.json"
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CancelDirectOfflineTest - FAIL: Couldn't get listing index")
resp = json.loads(r.text)
listingId = resp[0]["hash"]
# bob fetch listing to cache
api_url = bob["gateway_url"] + "ipfs/" + listingId
requests.get(api_url)
# shutdown alice
api_url = alice["gateway_url"] + "ob/shutdown"
requests.post(api_url, data="")
time.sleep(4)
# bob send order
with open('testdata/order_direct.json') as order_file:
order_json = json.load(order_file, object_pairs_hook=OrderedDict)
order_json["items"][0]["listingHash"] = listingId
api_url = bob["gateway_url"] + "ob/purchase"
r = requests.post(api_url, data=json.dumps(order_json, indent=4))
if r.status_code == 404:
raise TestFailure("CancelDirectOfflineTest - FAIL: Purchase post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("CancelDirectOfflineTest - FAIL: Purchase POST failed. Reason: %s", resp["reason"])
resp = json.loads(r.text)
orderId = resp["orderId"]
payment_address = resp["paymentAddress"]
payment_amount = resp["amount"]
if resp["vendorOnline"] == True:
raise TestFailure("CancelDirectOfflineTest - FAIL: Purchase returned vendor is online")
# check the purchase saved correctly
api_url = bob["gateway_url"] + "ob/order/" + orderId
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CancelDirectOfflineTest - FAIL: Couldn't load order from Bob")
resp = json.loads(r.text)
if resp["state"] != "AWAITING_PAYMENT":
raise TestFailure("CancelDirectOfflineTest - FAIL: Bob purchase saved in incorrect state")
if resp["funded"] == True:
raise TestFailure("CancelDirectOfflineTest - FAIL: Bob incorrectly saved as funded")
# fund order
spend = {
"address": payment_address,
"amount": payment_amount,
"feeLevel": "NORMAL"
}
api_url = bob["gateway_url"] + "wallet/spend"
r = requests.post(api_url, data=json.dumps(spend, indent=4))
if r.status_code == 404:
raise TestFailure("CancelDirectOfflineTest - FAIL: Spend post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("CancelDirectOfflineTest - FAIL: Purchase POST failed. Reason: %s", resp["reason"])
time.sleep(20)
# check bob detected payment
api_url = bob["gateway_url"] + "ob/order/" + orderId
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CancelDirectOfflineTest - FAIL: Couldn't load order from Bob")
resp = json.loads(r.text)
if len(resp["paymentAddressTransactions"]) <= 0:
raise TestFailure("CancelDirectOfflineTest - FAIL: Bob failed to detect his payment")
if resp["funded"] == False:
raise TestFailure("CancelDirectOfflineTest - FAIL: Bob incorrectly saved as unfunded")
if resp["state"] != "PENDING":
raise TestFailure("CancelDirectOfflineTest - FAIL: Bob purchase saved in incorrect state")
# bob cancel order
api_url = bob["gateway_url"] + "ob/ordercancel"
cancel = {"orderId": orderId}
r = requests.post(api_url, data=json.dumps(cancel, indent=4))
if r.status_code == 404:
raise TestFailure("CancelDirectOfflineTest - FAIL: Spend post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("CancelDirectOfflineTest - FAIL: Cancel POST failed. Reason: %s", resp["reason"])
time.sleep(4)
# bob check order canceled correctly
api_url = bob["gateway_url"] + "ob/order/" + orderId
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CancelDirectOfflineTest - FAIL: Couldn't load order from Bob")
resp = json.loads(r.text)
if resp["state"] != "CANCELED":
raise TestFailure("CancelDirectOfflineTest - FAIL: Bob failed to save as canceled")
if "refundAddressTransaction" not in resp or resp["refundAddressTransaction"] == {}:
raise TestFailure("CancelDirectOfflineTest - FAIL: Bob failed to detect outgoing payment")
# startup alice again
self.start_node(alice)
self.send_bitcoin_cmd("generate", 1)
time.sleep(45)
# check alice detected order
api_url = alice["gateway_url"] + "ob/order/" + orderId
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CancelDirectOfflineTest - FAIL: Couldn't load order from Alice %s", r.status_code)
resp = json.loads(r.text)
if resp["state"] != "CANCELED":
raise TestFailure("CancelDirectOfflineTest - FAIL: Alice failed to detect order cancellation")
# Check the funds moved into bob's wallet
api_url = bob["gateway_url"] + "wallet/balance"
r = requests.get(api_url)
if r.status_code == 200:
resp = json.loads(r.text)
confirmed = int(resp["confirmed"])
#unconfirmed = int(resp["unconfirmed"])
if confirmed <= 50 - payment_amount:
raise TestFailure("CancelDirectOfflineTest - FAIL: Bob failed to receive the multisig payout")
else:
raise TestFailure("CancelDirectOfflineTest - FAIL: Failed to query Bob's balance")
print("CancelDirectOfflineTest - PASS")
if __name__ == '__main__':
print("Running CancelDirectOfflineTest")
CancelDirectOfflineTest().main(["--regtest", "--disableexchangerates"])
|
[
"[email protected]"
] | |
7d912e6cab995381e5e36e40a34f11956d518293
|
fd67592b2338105e0cd0b3503552d188b814ad95
|
/test/test_models/test_voice_campaign.py
|
39c97218396b76330848b847fb334efdb7d0abde
|
[] |
no_license
|
E-goi/sdk-python
|
175575fcd50bd5ad426b33c78bdeb08d979485b7
|
5cba50a46e1d288b5038d18be12af119211e5b9f
|
refs/heads/master
| 2023-04-29T20:36:02.314712 | 2023-04-18T07:42:46 | 2023-04-18T07:42:46 | 232,095,340 | 5 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,039 |
py
|
# coding: utf-8
"""
APIv3 (New)
# Introduction This is our new version of API. We invite you to start using it and give us your feedback # Getting Started E-goi can be integrated with many environments and programming languages via our REST API. We've created a developer focused portal to give your organization a clear and quick overview of how to integrate with E-goi. The developer portal focuses on scenarios for integration and flow of events. We recommend familiarizing yourself with all of the content in the developer portal, before start using our rest API. The E-goi APIv3 is served over HTTPS. To ensure data privacy, unencrypted HTTP is not supported. Request data is passed to the API by POSTing JSON objects to the API endpoints with the appropriate parameters. BaseURL = api.egoiapp.com # RESTful Services This API supports 5 HTTP methods: * <b>GET</b>: The HTTP GET method is used to **read** (or retrieve) a representation of a resource. * <b>POST</b>: The POST verb is most-often utilized to **create** new resources. * <b>PATCH</b>: PATCH is used for **modify** capabilities. The PATCH request only needs to contain the changes to the resource, not the complete resource * <b>PUT</b>: PUT is most-often utilized for **update** capabilities, PUT-ing to a known resource URI with the request body containing the newly-updated representation of the original resource. * <b>DELETE</b>: DELETE is pretty easy to understand. It is used to **delete** a resource identified by a URI. # Authentication We use a custom authentication method, you will need a apikey that you can find in your account settings. Below you will see a curl example to get your account information: #!/bin/bash curl -X GET 'https://api.egoiapp.com/my-account' \\ -H 'accept: application/json' \\ -H 'Apikey: <YOUR_APY_KEY>' Here you can see a curl Post example with authentication: #!/bin/bash curl -X POST 'http://api.egoiapp.com/tags' \\ -H 'accept: application/json' \\ -H 'Apikey: <YOUR_APY_KEY>' \\ -H 'Content-Type: application/json' \\ -d '{`name`:`Your custom tag`,`color`:`#FFFFFF`}' # SDK Get started quickly with E-goi with our integration tools. Our SDK is a modern open source library that makes it easy to integrate your application with E-goi services. * <a href='https://github.com/E-goi/sdk-java'>Java</a> * <a href='https://github.com/E-goi/sdk-php'>PHP</a> * <a href='https://github.com/E-goi/sdk-python'>Python</a> * <a href='https://github.com/E-goi/sdk-ruby'>Ruby</a> * <a href='https://github.com/E-goi/sdk-javascript'>Javascript</a> * <a href='https://github.com/E-goi/sdk-csharp'>C#</a> # Stream Limits Stream limits are security mesures we have to make sure our API have a fair use policy, for this reason, any request that creates or modifies data (**POST**, **PATCH** and **PUT**) is limited to a maximum of **20MB** of content length. If you arrive to this limit in one of your request, you'll receive a HTTP code **413 (Request Entity Too Large)** and the request will be ignored. To avoid this error in importation's requests, it's advised the request's division in batches that have each one less than 20MB. # Timeouts Timeouts set a maximum waiting time on a request's response. Our API, sets a default timeout for each request and when breached, you'll receive an HTTP **408 (Request Timeout)** error code. You should take into consideration that response times can vary widely based on the complexity of the request, amount of data being analyzed, and the load on the system and workspace at the time of the query. When dealing with such errors, you should first attempt to reduce the complexity and amount of data under analysis, and only then, if problems are still occurring ask for support. For all these reasons, the default timeout for each request is **10 Seconds** and any request that creates or modifies data (**POST**, **PATCH** and **PUT**) will have a timeout of **60 Seconds**. Specific timeouts may exist for specific requests, these can be found in the request's documentation. # Callbacks A callback is an asynchronous API request that originates from the API server and is sent to the client in response to a previous request sent by that client. The API will make a **POST** request to the address defined in the URL with the information regarding the event of interest and share data related to that event. <a href='/usecases/callbacks/' target='_blank'>[Go to callbacks documentation]</a> ***Note:*** Only http or https protocols are supported in the Url parameter. <security-definitions/> # noqa: E501
The version of the OpenAPI document: 3.0.0
Generated by: https://openapi-generator.tech
"""
import unittest
import egoi_api
from egoi_api.model.voice_campaign import VoiceCampaign
from egoi_api import configuration
class TestVoiceCampaign(unittest.TestCase):
"""VoiceCampaign unit test stubs"""
_configuration = configuration.Configuration()
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
20743807dd344294a34fe1864a3dfc8fd8b498f9
|
745e6021a466797a04435cc4060836c955985b89
|
/apps/tickets/models.py
|
088b985aef25e7db82c4887619e2575e91c95e2b
|
[] |
no_license
|
anykate/ticketapi
|
f32cf5ed31065afab76a5765f583115f7e76eca5
|
de816dea453c4fc5b8fc04f9296c9e0ab91749aa
|
refs/heads/master
| 2020-07-03T03:00:15.230698 | 2019-08-11T16:31:34 | 2019-08-11T16:31:34 | 201,763,827 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,922 |
py
|
from django.db import models
import uuid
from django.contrib.auth.models import User
from django.utils.text import slugify
# Create your models here.
def generate_ticket_id():
return str(uuid.uuid4()).split("-")[-1] # generate unique ticket id
class Category(models.Model):
name = models.CharField(max_length=200)
slug = models.SlugField(
editable=False,
unique=True,
max_length=255
)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
def __str__(self):
return self.name
def _get_unique_slug(self):
slug = slugify(self.name)
unique_slug = slug
num = 1
while Category.objects.filter(slug=unique_slug).exists():
unique_slug = f'{slug}-{num}'
num += 1
return unique_slug
def save(self, *args, **kwargs):
if not self.slug:
self.slug = self._get_unique_slug()
super(Category, self).save(*args, **kwargs)
class Meta:
verbose_name_plural = 'categories'
ordering = ["-created"]
class Ticket(models.Model):
title = models.CharField(max_length=255)
user = models.ForeignKey(User, on_delete=models.CASCADE)
content = models.TextField()
category = models.ForeignKey(Category, on_delete=models.CASCADE)
ticket_id = models.CharField(max_length=255, blank=True)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
def __str__(self):
return f"{self.title} - {self.ticket_id}"
def save(self, *args, **kwargs):
if len(self.ticket_id.strip(" ")) == 0:
self.ticket_id = generate_ticket_id()
# Call the real save() method
super(Ticket, self).save(*args, **kwargs)
class Meta:
ordering = ["-created"]
|
[
"[email protected]"
] | |
be5c5e9e4c35cc10ddd8cbc5b8d36c8c0df48973
|
9680ba23fd13b4bc0fc3ce0c9f02bb88c6da73e4
|
/Brian Heinold (243) ile Python/p10611b_sınav.py
|
a7c32a2bf1b3f9310b20a51945735eec99e834d5
|
[] |
no_license
|
mnihatyavas/Python-uygulamalar
|
694091545a24f50a40a2ef63a3d96354a57c8859
|
688e0dbde24b5605e045c8ec2a9c772ab5f0f244
|
refs/heads/master
| 2020-08-23T19:12:42.897039 | 2020-04-24T22:45:22 | 2020-04-24T22:45:22 | 216,670,169 | 0 | 0 | null | null | null | null |
ISO-8859-9
|
Python
| false | false | 759 |
py
|
# coding:iso-8859-9 Türkçe
teklif1 = "Sayın "
teklif2 = "\n\nSize yeni Platin Artı İkramiyeli kartımızı %47.99 gibi\nçok özel bir tanıtım indirimiyle sunmaktan gurur duyuyorum.\n"
teklif3 = ", böyle bir teklif kimseye her gün pek sık yapılmaz;\nbu yüzden +90-800-314-1592 ücretsiz numaramızı hemen\naramanızı şiddetle tavsiye ediyorum.\nBöylesi indirimli tanıtım kampanya indirimini çok uzun süre devam\nettiremeyiz, "
teklif4 = ", bu yüzden hiç vakit yitirmeden\nhemen bizi aramalısınız!.."
giriş = input ("Açık ad soyadınızı giriniz: ")
if len(giriş) > 0:
try: ad = giriş[:giriş.index(' ')]
except ValueError: ad = giriş
print (teklif1, giriş, teklif2, ad, teklif3, ad, teklif4, sep="")
|
[
"[email protected]"
] | |
7e7b6b0a507848ab4173ff7b66c5f17459c3d342
|
8130c34d546c323d6d5d2ca6b4a67330af08828f
|
/.history/menu_app/views_20210104161734.py
|
1036941bea0c7c7320204735b5813ff6004c6e42
|
[] |
no_license
|
lienusrob/final
|
ba2dad086fc97b21b537ef12df834dfadd222943
|
f2726e31f1d51450e4aed8c74021c33679957b28
|
refs/heads/master
| 2023-02-15T01:36:54.463034 | 2021-01-07T12:47:05 | 2021-01-07T12:47:05 | 327,279,792 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,812 |
py
|
from .models import Cart, CartItem, MenuItem, ItemsCategory, Order, generate_order_id
from account_app.models import Profile
from .forms import AddToCartForm
from django.views.generic import ListView
from django.shortcuts import render, get_object_or_404, redirect
from django.urls import reverse
from django.utils import timezone
class MenuListView(ListView):
model = MenuItem
template_name = 'items/menu_list.html'
def menu_list_view(request):
item_list = MenuItem.objects.all()
context = {'item_list': item_list,
'item_categories':reversed(ItemsCategory.objects.all()),
'item_categories_side_nav':reversed(ItemsCategory.objects.all())}
return render(request, 'menu_app/menu_list.html', context)
def home(request):
category_menu = ItemsCategory.objects.all()
context = {'category_menu': category_menu}
return render (request, 'homepage.html', context)
def menu_item_detail(request, **kwargs):
item = MenuItem.objects.filter(id=kwargs.get('pk')).first()
context = {'item':item}
return render(request, 'menu_app/item_details.html', context)
def new_order_info(request):
user_profile = get_object_or_404(Profile, user=request.user)
order, created = Order.objects.get_or_create(customer=user_profile.user, is_ordered=False)
if created:
order.ref_code = generate_order_id()
order.save()
context = {'order':order}
return render(request, 'items/order_info.html', context)
def cart (request):
cart = Cart.objects.get(user = request.user, current = True)
cart_items = CartItem.objects.filter(cart = cart)
context = {'cart_items':cart_items}
return render (request, 'menu_app/cart.html', context )
def menu_details(request, name):
category = ItemsCategory.objects.get(name=name)
menu_details = MenuItem.objects.filter(category=category)
context = {'menu_details':menu_details, 'category':name, 'user':request.user}
if request.method=="POST":
form = AddToCartForm(request.POST or None)
form.cart = Cart.objects.get_or_create(user=request.user, current=True)
form.save()
#messages.success(request, "Item" "added to cart successfully!, please go to cart and check for items.")
return render(request, ('menu_app/menu_list.html'), context)
def cart(request):
cart = Cart.objects.get(user=request.user, current=True)
cart_items = CartItem.objects.filter(cart=cart)
#extras = Extras.objects.all()
context = {'cart_items':cart_items}
return render(request, 'menu_app/cart.html', context)
def view_cart(request):
"""A View that renders the cart contents page"""
return render(request, "cart.html")
def add_to_cart(request, id):
"""Add a quantity of the specified product to the cart"""
quantity = int(request.POST.get('quantity'))
cart = request.session.get('cart', {})
if id in cart:
cart[id] = int(cart[id]) + quantity
else:
cart[id] = cart.get(id, quantity)
request.session['cart'] = cart
return redirect('homepage')
def adjust_cart(request, id):
quantity = int(request.POST.get('quantity'))
cart = request.session.get('cart', {})
if quantity > 0:
cart[id] = quantity
else:
cart.pop(id)
request.session['cart'] = cart
return redirect('view_cart')
def orders (request):
cart = Cart.objects.get(user=request.user, current = True)
cart_items = CartItem.objects.filter(cart__pk__ = cart.pk)
if request.method == "POST":
for key, value in request.POST.items():
if key == "csrfmiddleweartoken":
continue
cart.current == False
cart.date_ordered= timezone.now()
cart.save()
order= orders (cart = cart)
|
[
"[email protected]"
] | |
91603cf08d37714b4e52f6dd5ab7176a319eee9d
|
038131f491c44ff30e9f403cb46ff5e5c91a5528
|
/amuse_util/data/__init__.py
|
b5a3ed9db49480c30fe8fd7fee51a4dee234e689
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
nstarman/amuse_util
|
3d94ec44f21d5bf0da8b97c50c90f180245fecc1
|
5086b5db10fe96e3d797a5ed59b76fefb41a61b1
|
refs/heads/master
| 2023-04-07T11:43:19.697900 | 2021-06-25T18:54:18 | 2021-06-25T18:54:18 | 239,362,836 | 2 | 1 |
NOASSERTION
| 2021-06-25T18:54:19 | 2020-02-09T19:39:31 |
Python
|
UTF-8
|
Python
| false | false | 1,039 |
py
|
# -*- coding: utf-8 -*-
# see LICENSE.rst
# ----------------------------------------------------------------------------
#
# TITLE : Data
# AUTHOR : Nathaniel Starkman
# PROJECT : amuse_util
#
# ----------------------------------------------------------------------------
"""Data Management.
Often data is packaged poorly and it can be difficult to understand how
the data should be read.
DON'T PANIC.
This module provides functions to read the contained data.
"""
__author__ = "Nathaniel Starkman"
# __credits__ = [""]
# __all__ = [
# ""
# ]
###############################################################################
# IMPORTS
###############################################################################
# CODE
###############################################################################
def function():
"""Docstring."""
pass
# /def
# ------------------------------------------------------------------------
###############################################################################
# END
|
[
"[email protected]"
] | |
0a1ddca836a6ecb459147efae425ba4ceb743d4d
|
db68e4cf7ae7c9880aecdcee48c8b41aecc6eb65
|
/torch/distributions/gamma.py
|
d9db0d8883e0f9463883409e8db28837eb9a4149
|
[
"BSD-2-Clause"
] |
permissive
|
mbp28/pytorch
|
175c8e1821dd6e4fda9d1d3f9e2edbd604bf4150
|
d450895a74b84672b02f3fbbaa7ccbdd9b6a3335
|
refs/heads/master
| 2021-01-25T09:08:33.799198 | 2017-12-14T17:31:58 | 2017-12-14T17:31:58 | 93,779,621 | 0 | 0 | null | 2017-06-08T18:26:15 | 2017-06-08T18:26:15 | null |
UTF-8
|
Python
| false | false | 1,814 |
py
|
from numbers import Number
import torch
from torch.autograd import Variable, Function
from torch.autograd.function import once_differentiable
from torch.distributions.distribution import Distribution
from torch.distributions.utils import expand_n, broadcast_all
class _StandardGamma(Function):
@staticmethod
def forward(ctx, alpha):
x = torch._C._standard_gamma(alpha)
ctx.save_for_backward(x, alpha)
return x
@staticmethod
@once_differentiable
def backward(ctx, grad_output):
x, alpha = ctx.saved_tensors
grad = torch._C._standard_gamma_grad(x, alpha)
return grad_output * grad
def _standard_gamma(alpha):
if not isinstance(alpha, Variable):
return torch._C._standard_gamma(alpha)
return _StandardGamma.apply(alpha)
class Gamma(Distribution):
r"""
Creates a Gamma distribution parameterized by shape `alpha` and rate `beta`.
Example::
>>> m = Gamma(torch.Tensor([1.0]), torch.Tensor([1.0]))
>>> m.sample() # Gamma distributed with shape alpha=1 and rate beta=1
0.1046
[torch.FloatTensor of size 1]
Args:
alpha (float or Tensor or Variable): shape parameter of the distribution
beta (float or Tensor or Variable): rate = 1 / scale of the distribution
"""
has_rsample = True
def __init__(self, alpha, beta):
self.alpha, self.beta = broadcast_all(alpha, beta)
def sample(self):
return _standard_gamma(self.alpha) / self.beta
def sample_n(self, n):
return _standard_gamma(expand_n(self.alpha, n)) / self.beta
def log_prob(self, value):
return (self.alpha * torch.log(self.beta) +
(self.alpha - 1) * torch.log(value) -
self.beta * value - torch.lgamma(self.alpha))
|
[
"[email protected]"
] | |
dfd950972cd9f6e06897317ef128b10a9abaf3ea
|
0f9f8e8478017da7c8d408058f78853d69ac0171
|
/python3/l0114_flatten_binary_tree_to_linked_list.py
|
bb97c41a16aab1a7c00df25c40d8b7ddb637cd41
|
[] |
no_license
|
sprax/1337
|
dc38f1776959ec7965c33f060f4d43d939f19302
|
33b6b68a8136109d2aaa26bb8bf9e873f995d5ab
|
refs/heads/master
| 2022-09-06T18:43:54.850467 | 2020-06-04T17:19:51 | 2020-06-04T17:19:51 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 822 |
py
|
from common import TreeNode
import common
class Solution:
def flatten(self, root: TreeNode) -> None:
"""
Do not return anything, modify root in-place instead.
"""
def flatten_and_return_end(root: TreeNode) -> TreeNode:
if not root:
return None
if not root.left and not root.right:
return root
if not root.left:
return flatten_and_return_end(root.right)
flatten_end = flatten_and_return_end(root.left)
if root.right:
flatten_end.right = root.right
flatten_end = flatten_and_return_end(root.right)
root.right = root.left
root.left = None
return flatten_end
flatten_and_return_end(root)
|
[
"[email protected]"
] | |
ce9c98fb960f27a183c05a296f22d83dfe5b8df4
|
ef3a7391b0a5c5d8e276355e97cbe4de621d500c
|
/venv/Lib/site-packages/caffe2/python/ideep/conv_transpose_test.py
|
be35dbd8a38205287c81fd5bdb172505e25bed67
|
[
"Apache-2.0"
] |
permissive
|
countBMB/BenjiRepo
|
143f6da5d198ea6f06404b4559e1f4528b71b3eb
|
79d882263baaf2a11654ca67d2e5593074d36dfa
|
refs/heads/master
| 2022-12-11T07:37:04.807143 | 2019-12-25T11:26:29 | 2019-12-25T11:26:29 | 230,090,428 | 1 | 1 |
Apache-2.0
| 2022-12-08T03:21:09 | 2019-12-25T11:05:59 |
Python
|
UTF-8
|
Python
| false | false | 2,701 |
py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import unittest
import numpy as np
from hypothesis import assume, given, settings
import hypothesis.strategies as st
from caffe2.python import core, workspace
import caffe2.python.hypothesis_test_util as hu
import caffe2.python.ideep_test_util as mu
@unittest.skipIf(not workspace.C.use_mkldnn, "No MKLDNN support.")
class ConvTransposeTest(hu.HypothesisTestCase):
@given(stride=st.integers(1, 2),
pad=st.integers(0, 3),
kernel=st.integers(1, 5),
adj=st.integers(0, 2),
size=st.integers(7, 10),
input_channels=st.integers(1, 8),
output_channels=st.integers(1, 8),
batch_size=st.integers(1, 3),
use_bias=st.booleans(),
training_mode=st.booleans(),
compute_dX=st.booleans(),
**mu.gcs)
@settings(max_examples=2, timeout=100)
def test_convolution_transpose_gradients(self, stride, pad, kernel, adj,
size, input_channels,
output_channels, batch_size,
use_bias, training_mode,
compute_dX, gc, dc):
training = 1 if training_mode else 0
assume(adj < stride)
X = np.random.rand(
batch_size, input_channels, size, size).astype(np.float32) - 0.5
w = np.random.rand(
input_channels, output_channels, kernel, kernel)\
.astype(np.float32) - 0.5
b = np.random.rand(output_channels).astype(np.float32) - 0.5
op = core.CreateOperator(
"ConvTranspose",
["X", "w", "b"] if use_bias else ["X", "w"],
["Y"],
stride=stride,
kernel=kernel,
pad=pad,
adj=adj,
training_mode=training,
no_gradient_to_input=not compute_dX,
)
inputs = [X, w, b] if use_bias else [X, w]
self.assertDeviceChecks(dc, op, inputs, [0], threshold=0.001)
if training_mode:
if use_bias and compute_dX:
# w, b, X
outputs_to_check = [1, 2, 0]
elif use_bias:
# w, b
outputs_to_check = [1, 2]
elif compute_dX:
# w, X
outputs_to_check = [1, 0]
else:
# w
outputs_to_check = [1]
for i in outputs_to_check:
self.assertGradientChecks(gc, op, inputs, i, [0])
if __name__ == "__main__":
unittest.main()
|
[
"[email protected]"
] | |
3d4e3f8f4bb86020735ae4da37c49cd476e9fd41
|
1b12e6096c47312b67fa6ff223216945d2efb70c
|
/sandbox/ipython/mydemo.py
|
79b0812a7041ad301e881c180f2f641b25d1d440
|
[
"Apache-2.0"
] |
permissive
|
rboman/progs
|
6e3535bc40f78d692f1f63b1a43193deb60d8d24
|
03eea35771e37d4b3111502c002e74014ec65dc3
|
refs/heads/master
| 2023-09-02T17:12:18.272518 | 2023-08-31T15:40:04 | 2023-08-31T15:40:04 | 32,989,349 | 5 | 2 |
Apache-2.0
| 2022-06-22T10:58:38 | 2015-03-27T14:04:01 |
MATLAB
|
UTF-8
|
Python
| false | false | 127 |
py
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
from IPython.lib.demo import Demo
mydemo = Demo('myscript.py')
#mydemo()
|
[
"[email protected]"
] | |
22f210847b373c906655532a6d7e88e2ba23996e
|
e25dca25850ee8ee4ff929cba26ad66bfc6f15bb
|
/slowedml/phylip.py
|
9fd44efaa6e268a049b531a3634bd36fb64ad3cf
|
[] |
no_license
|
argriffing/slowedml
|
704db5a4d15b8a5b5eb464e480bc7dd45ad56dc5
|
02907cd08210e4cf550885eb42ec5372b0f45c72
|
refs/heads/master
| 2021-01-01T17:52:02.647617 | 2013-02-19T16:34:12 | 2013-02-19T16:34:12 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,501 |
py
|
"""
Read a tiny subset of phylip interleaved alignment files.
"""
def read_interleaved_codon_alignment(fin):
"""
Yield columns of the alignment.
This function is not as stream-oriented as its interface may suggest.
In particular, it eats tons of memory for no good reason.
@param fin: a stream open for reading
"""
# read rows of string sequences
rows = []
for line in fin:
row = line.split()
if row:
rows.append(row)
# init the list of columns
col_list = []
# get the number of taxa and the total number of nucleotides
header_row = rows[0]
s_ntaxa, s_nnucs = rows[0][:2]
ntaxa = int(s_ntaxa)
nnucs = int(s_nnucs)
# read the taxon names from the first paragraph
taxon_names = []
for row in rows[1:1+ntaxa]:
taxon_names.append(row[0])
# check that the data is in the expected format
if len(rows) % ntaxa == 1:
nparagraphs = (len(rows) - 1) / ntaxa
else:
raise Exception
# yield a column consisting of the taxon names
yield tuple(taxon_names)
# go through the input rows, paragraph by paragraph
for i in range(nparagraphs):
# the first paragraph has taxon names prefixed to its rows
paragraph = rows[i*ntaxa + 1 : (i+1)*ntaxa + 1]
if i == 0:
paragraph = [row[1:] for row in paragraph]
# convert the paragraph into codon columns
for column in zip(*paragraph):
yield column
|
[
"[email protected]"
] | |
0e34cbf3b1e05ed06cc5806383597034a2a8b89e
|
6b2a8dd202fdce77c971c412717e305e1caaac51
|
/solutions_5662291475300352_0/Python/Astrae/1C.py
|
6537ddd1a9860397cfa68b4bd67388263edcca3f
|
[] |
no_license
|
alexandraback/datacollection
|
0bc67a9ace00abbc843f4912562f3a064992e0e9
|
076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf
|
refs/heads/master
| 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,074 |
py
|
# -*- coding: utf-8 -*-
"""
Created on Fri Apr 10 21:34:08 2015
@author: Fred
"""
import numpy as np
def main(ifn='C-small-1-attempt0.in',ofn='output.txt'):
with open(ifn) as inf: # ouvre l'input et le renomme en inf
with open(ofn,'w') as ouf: # crée l'output dans lequel on va écrire
noc = int(inf.readline().strip()) # permet de lire la 1ere ligne
# en général le nbr de cas
# le .strip() permet de virer les espace
for tnoc in range(noc): # boucle en fonction du nbr de cas
ouf.write("Case #%d: " %(tnoc+1)) # case dans laquelle on écrit en sortie
# on commence par lire le nombre de tribus
i=int(inf.readline().strip()) # convertit en liste une ligne en virant les espaces
L1=[0]*i
L2=[0]*i
L3=[0]*i
for k in range(i):
a=inf.readline().strip().split(' ')
L1[k]=int(a[0])
L2[k]=int(a[1])
L3[k]=int(a[2])
print L1
print L2
print L3
# nbr de marcheurs
N=sum(L2)
print N
goal=[] # on va mettre en combien de temps chaque marcheur termine 1 marche
for k in range(len(L1)): # pour chaque groupe de marcheur
pos=L1[k]
for j in range(L2[k]): # pour chaque marcheur
v=float(360)/(L3[k]+j) # vitesse du j-ieme marcheur du groupe
goal=goal+[(360.-L1[k])/v]
print goal
# ensuite pour chaque marcheur, il faudrait savoir combien de tour il fait en un temps t
nbrtour=[] # on va mettre en combien de temps chaque marcheur termine 1 marche
t=max(goal) # temps minimal
for k in range(len(L1)): # pour chaque groupe de marcheur
pos=L1[k]
for j in range(L2[k]): # pour chaque marcheur
v=float(360)/(L3[k]+j) # vitesse du j-ieme marcheur du groupe
nbrtour=nbrtour+[int(floor(((v*t+L1[k]))/360))]
print nbrtour
resultat=0
if N==1: # si un seul marcheur
resultat=0
elif N==2:
if max(nbrtour)>1:
resultat=1
ouf.write("%d\n" %resultat) # recopie le nombre puis saute une ligne
|
[
"[email protected]"
] | |
0d408e89af86671233cb41aa984c3d6dcb8a796c
|
e72ff96f633d065d23d16f550f0f7efc8901e10c
|
/blog/migrations/0001_initial.py
|
7f6a73d3cac557af73584b5dc07e3005f01e3c4f
|
[
"MIT"
] |
permissive
|
mhadiahmed/eCommerce
|
5e9801978a62829e27566bfbc358cc3e2bb600ae
|
68e6013d7d66b2a44e256b65956c507bdd7d1bd1
|
refs/heads/master
| 2021-01-19T11:08:51.437886 | 2018-08-28T15:08:34 | 2018-08-28T15:08:34 | 87,930,716 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,227 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-01-04 19:47
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=120)),
('Type', models.CharField(choices=[('Null', 'Null'), ('Phone', 'Phone'), ('Car', 'Car'), ('Laptop', 'Laptop'), ('jops', 'Jops'), ('Electronic', 'Electronic'), ('Clothes', 'Clothes'), ('Makeup', 'Makeup'), ('Furnishings', 'Furnishings'), ('books', 'books'), ('sports', 'sports'), ('Property', 'Property'), ('Other', 'Other')], default='Null', max_length=120)),
('company', models.CharField(max_length=120)),
('dis', models.TextField(default='in here you w,ll write all the discribtion about your product')),
('image', models.ImageField(blank=True, height_field='height_field', null=True, upload_to='', width_field='width_field')),
('width_field', models.IntegerField(default=0)),
('height_field', models.IntegerField(default=0)),
('case', models.CharField(choices=[('Null', 'Null'), ('New', 'New'), ('Old', 'Old'), ('Second Hand', 'Second Hand'), ('Other', 'Other')], default=99, max_length=120)),
('price', models.BigIntegerField(default=0)),
('address', models.CharField(max_length=120)),
('draft', models.BooleanField(default=False)),
('date', models.DateTimeField(auto_now=True)),
('puplis', models.DateTimeField(auto_now_add=True)),
('auth', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-date', '-puplis'],
},
),
]
|
[
"[email protected]"
] | |
5bc79658513a89728f29d61dcd7edb8969d4b19e
|
bcf74743a974159566d2d6a1a4158088a64df760
|
/wcivf/apps/people/migrations/0010_auto_20170306_1206.py
|
11dff493427a935a0ac0823cf8d0b7bb3d7f99b0
|
[] |
no_license
|
madcsaba/WhoCanIVoteFor
|
16d8b946f236358285de34c9248cde81be15433c
|
bb658e3f7f705fe81265149d1a50e33ba04c3fec
|
refs/heads/master
| 2021-04-06T20:19:20.254734 | 2018-03-12T10:30:08 | 2018-03-12T10:30:08 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 488 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-06 12:06
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('elections', '0015_auto_20170304_1354'),
('people', '0009_auto_20170303_1823'),
]
operations = [
migrations.AlterUniqueTogether(
name='personpost',
unique_together=set([('person', 'post', 'election')]),
),
]
|
[
"[email protected]"
] | |
5dfa55cdd8c387c3fe142aa742ed535aea613cf9
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/303/usersdata/299/100765/submittedfiles/minha_bib.py
|
a1cc038e961e8aaf3777812018c47bdcb9fef29c
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,608 |
py
|
# -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
def sorteio(x,y):
import random
sort=random.randint(0,2)
if sort==0:
return(0)
if sort==2:
return(2)
def sorteio2(x,y):
import random
sort=random.randint(0,2)
if sort==0:
return(0)
if sort==2:
return(2)
else:
return(1)
def verificar_vitoria(x):
#Vitoria na linha
for i in range(1,3,1):
for j in range(1,3,1):
if x[i-1][j-1]==x[i][j]==" ":
cont+=1
#analise linha
for i in range(1,3,1):
for j in range(1,3,1):
if x[i][j-1]==x[i][j]:
cont+=0
else:
cont+=1
#Vitoria na coluna
for j in range(1,3,1):
for i in range(1,3,1):
if x[i-1][j]==x[i][j]:
cont+=0
else:
cont+=1
#Vitoria na diagonal
if x[0][0]==x[1][1] and x[1][1]==x[2][2]:
cont+=0
elif x[2][0]==x[1][1] and x[1][1]==x[0][2]:
cont+=0
else:
cont+=1
if cont==0:
return True
else;
return False
def maquinainteligente(x):
from minha_bib import sorteio2
#linhas
#primeira
if x[0][0]==x[0][2]:
return '01'
elif x[0][1]==x[0][2]:
return '00'
elif x[0][0]==x[0][1]:
return '02'
#segunda
elif x[1][0]==x[1][1]:
return '12'
elif x[1][1]==x[1][2]:
return '10'
elif x[1][0]==x[1][2]:
return'11'
#terceira
elif x[2][0]==x[2][1]:
return '22'
elif x[2][1]==x[2][2]:
return '20'
elif x[2][0]==x[2][2]:
return '21'
#colunas
#primeira
elif x[0][0]==x[1][0]:
return '20'
elif x[1][0]==x[2][0]:
return '00'
elif x[0][0]==x[2][0]:
return '10'
#segunda
elif x[0][1]==x[1][1]:
return '21'
elif x[1][1]==x[2][1]:
return '01'
elif x[0][1]==x[2][1]:
return '11'
#terceira
elif x[0][2]==x[1][2]:
return '22'
elif x[1][2]==x[2][2]:
return '02'
elif x[0][2]==x[2][2]:
return '12'
#diagonal
elif x[0][0]==x[1][1]:
return '22'
elif x[1][1]==x[2][2]:
return '00'
elif x[0][0]==x[2][2]:
return '11'
#segunda
elif x[2][0]==x[1][1]:
return '02'
elif x[1][1]==x[0][2]:
return '20'
elif x[2][0]==x[0][2]:
return '11'
else:
return '11'
|
[
"[email protected]"
] | |
68ce3ac1f75f709080e7a2848a47b98cf44a14ce
|
c426f4d6f7e5b9f9a6527ae48b2a8932f6e9a000
|
/mayan/apps/storage/admin.py
|
072790cdd14a67e38c195d64ac8f06a24de0519f
|
[
"Apache-2.0"
] |
permissive
|
telsch/Mayan-EDMS
|
208d6dbc08e2fa76f3132c8761f247e02768333e
|
5f2bdfbf1f3d109e403b2d1f3f81b79bdb9cc35f
|
refs/heads/master
| 2023-03-21T13:02:40.721699 | 2021-03-14T05:02:54 | 2021-03-14T05:02:54 | 254,727,379 | 0 | 1 | null | 2020-04-10T20:15:32 | 2020-04-10T20:15:31 | null |
UTF-8
|
Python
| false | false | 280 |
py
|
from django.contrib import admin
from .models import SharedUploadedFile
@admin.register(SharedUploadedFile)
class SharedUploadedFileAdmin(admin.ModelAdmin):
date_hierarchy = 'datetime'
list_display = ('file', 'filename', 'datetime',)
readonly_fields = list_display
|
[
"[email protected]"
] | |
ecd4f5beea18b9c1d6195018fcc353f0c32f5dbf
|
c7b669b7352c9eee3d17f9e57b2fc0df0907f3eb
|
/Day01/ex06/my_sort.py
|
fba457e1793d06ff3df89dfcb97e89091546e0d6
|
[] |
no_license
|
m1n-q/django-inside
|
89da9514c7b4d6d2d883df720b317ce4ea536590
|
9b70915cd798285de096974f9eb271f338756250
|
refs/heads/main
| 2023-07-18T00:18:15.327135 | 2021-09-02T09:22:31 | 2021-09-02T09:22:31 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 760 |
py
|
def sort_name():
d = {
'Hendrix': '1942',
'Allman': '1946',
'King': '1925',
'Clapton': '1945',
'Johnson': '1911',
'Berry': '1926',
'Vaughan': '1954',
'Cooder': '1947',
'Page': '1944',
'Richards': '1943',
'Hammett': '1962',
'Cobain': '1967',
'Garcia': '1942',
'Beck': '1944',
'Santana': '1947',
'Ramone': '1948',
'White': '1975',
'Frusciante': '1970',
'Thompson': '1949',
'Burton': '1939',
}
l = []
for k, v in d.items():
l.append((v, k))
l.sort()
for y, n in l:
print(n)
if __name__ == '__main__':
sort_name()
|
[
"[email protected]"
] | |
934096cedb5dd7e57b0fe700e2d0d53edde2c14a
|
da85d4caf3e5e1c9df8839fafd51f960f02daadd
|
/FabricUI/_version.py
|
df068b6b98498c361cd2a5f5a6b5c6ac6cdc9472
|
[
"Apache-2.0"
] |
permissive
|
shuaih7/FabricUI
|
6efe58f3dbefebbd49607094a28bf2d7bc9314ca
|
6501e8e6370d1f90174002f5768b5ef63e8412bc
|
refs/heads/main
| 2023-04-13T10:07:42.090043 | 2021-04-13T02:55:12 | 2021-04-13T02:55:12 | 314,152,777 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 192 |
py
|
""" package version """
VERSION_TAG = "1.0.0"
__version__ = VERSION_TAG
def print_version():
""" Print full version information """
print("FabricUI version: {0}".format(VERSION_TAG))
|
[
"[email protected]"
] | |
c896e55601b73e9c564bb1308954f7f623f82312
|
e44169033ae1dd01397b5ceeccb84ee6fc7a0009
|
/challenge5/shiyanlou/shiyanlou/spiders/github.py
|
1d4243d0864ba0a909fa0ffca404952c01495a62
|
[] |
no_license
|
Yao-Phoenix/data_challenge
|
6d4f7bf666c92bdc19c04cdfd4d17ba8767a6056
|
287ae2c1b786596b0c9b5a36d54426e38970ba76
|
refs/heads/master
| 2021-02-20T23:55:41.852331 | 2020-03-31T02:19:05 | 2020-03-31T02:19:05 | 245,347,232 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,004 |
py
|
# -*- coding: utf-8 -*-
import scrapy
from shiyanlou.items import ShiyanlouItem
from scrapy.linkextractors import LinkExtractor
from scrapy.spider import Rule
class GithubSpider(scrapy.spiders.CrawlSpider):
name = 'github'
allowed_domains = ['github.com']
start_urls = ['https://github.com/shiyanlou?tab=repositories']
rules = (Rule(
LinkExtractor(
allow='https://github.com/shiyanlou\?after=*'),
callback='parse_item',
follow=True),
Rule(
LinkExtractor(
allow='https://github.com/shiyanlou\?tab=repositories$'),
callback='parse_item',
follow=True),)
def parse_item(self, response):
item = ShiyanlouItem()
for data in response.css('li.col-12'):
item['repo_name'] = data.xpath('.//h3/a/text()').extract_first().strip()
item['update_time'] = data.xpath('.//relative-time/@datetime'
).extract_first()
yield item
|
[
"[email protected]"
] | |
449bf91a1aa96f17143c3ca9ec1f7cc62665bbc9
|
3562aee827438794ec17250f033818fa699d9c56
|
/config/settings/local.py
|
166824d1e6eeb7bd8df3b95e2a43af9cd26e27af
|
[
"MIT"
] |
permissive
|
feastkl-app/attendance-tracker-v2
|
509d51142a9882e7b591b1349bce944bf2274f55
|
38670cbf6e1f1a44becc94fb3c90ec33fda68b9b
|
refs/heads/master
| 2021-05-02T03:47:35.547933 | 2018-02-10T04:49:21 | 2018-02-10T04:49:21 | 120,903,586 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,039 |
py
|
"""
Local settings for Attendance Tracker v2 project.
- Run in Debug mode
- Use console backend for emails
- Add Django Debug Toolbar
- Add django-extensions as app
"""
from .base import * # noqa
# DEBUG
# ------------------------------------------------------------------------------
DEBUG = env.bool('DJANGO_DEBUG', default=True)
TEMPLATES[0]['OPTIONS']['debug'] = DEBUG
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
SECRET_KEY = env('DJANGO_SECRET_KEY', default='ogk7kTeUPzZhqOIgpsLT2c3KMUknJFkge60M0cqz6cM6zSih5s')
# Mail settings
# ------------------------------------------------------------------------------
EMAIL_PORT = 1025
EMAIL_HOST = 'localhost'
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND',
default='django.core.mail.backends.console.EmailBackend')
# CACHING
# ------------------------------------------------------------------------------
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
# django-debug-toolbar
# ------------------------------------------------------------------------------
MIDDLEWARE += ['debug_toolbar.middleware.DebugToolbarMiddleware', ]
INSTALLED_APPS += ['debug_toolbar', ]
INTERNAL_IPS = ['127.0.0.1', '10.0.2.2', ]
DEBUG_TOOLBAR_CONFIG = {
'DISABLE_PANELS': [
'debug_toolbar.panels.redirects.RedirectsPanel',
],
'SHOW_TEMPLATE_CONTEXT': True,
}
# django-extensions
# ------------------------------------------------------------------------------
INSTALLED_APPS += ['django_extensions', ]
# TESTING
# ------------------------------------------------------------------------------
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# Your local stuff: Below this line define 3rd party library settings
# ------------------------------------------------------------------------------
|
[
"[email protected]"
] | |
7521983485a9ed8ab0dbbdc43354ae8de7901ef3
|
a2eaa3decc385dea227da8a99203f767f32cf941
|
/scientific_expedition/call_to_home.py
|
514d1f866aac2f91ffa05e36acff4c7404569275
|
[] |
no_license
|
vlad-bezden/py.checkio
|
94db32111eeeb2cd90c7b3c4606ea72cf2bb6678
|
6cd870ca3056cc9dcdce0ad520c27e92311719b3
|
refs/heads/master
| 2021-07-01T18:39:35.955671 | 2020-10-05T00:56:38 | 2020-10-05T00:56:38 | 93,111,389 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,912 |
py
|
"""Call to Home
https://py.checkio.org/en/mission/calls-home/
Nicola believes that Sophia calls to Home too much and her
phone bill is much too expensive. He took the bills for Sophia's
calls from the last few days and wants to calculate how much it costs.
The bill is represented as an array with information about the calls.
Help Nicola to calculate the cost for each of Sophia calls.
Each call is represented as a string with date, time and duration
of the call in seconds in the follow format:
"YYYY-MM-DD hh:mm:ss duration"
The date and time in this information are the start of the call.
Space-Time Communications Co. has several rules on how
to calculate the cost of calls:
First 100 (one hundred) minutes in one day are priced at 1 coin per minute;
After 100 minutes in one day, each minute costs 2 coins per minute;
All calls are rounded up to the nearest minute.
For example 59 sec ≈ 1 min, 61 sec ≈ 2 min;
Calls count on the day when they began.
For example if a call was started 2014-01-01 23:59:59,
then it counted to 2014-01-01;
For example:
2014-01-01 01:12:13 181
2014-01-02 20:11:10 600
2014-01-03 01:12:13 6009
2014-01-03 12:13:55 200
First day -- 181s≈4m -- 4 coins;
Second day -- 600s=10m -- 10 coins;
Third day -- 6009s≈101m + 200s≈4m -- 100 + 5 * 2 = 110 coins;
Total -- 124 coins.
Input: Information about calls as a tuple of strings.
Output: The total cost as an integer.
Precondition: 0 < len(calls) ≤ 30
0 < call_duration ≤ 7200
The bill is sorted by datetime.
"""
from itertools import groupby
from math import ceil
from typing import Tuple
def total_cost(calls: Tuple[str, ...]) -> int:
"""Calculate total cost.
ceil(int(m[20:]) / 60) - rounds to the nearest minute
max(mins, mins * 2 - 100) - calculates cost
"""
return sum(
max(mins, mins * 2 - 100)
for mins in (
sum(ceil(int(m[20:]) / 60) for m in t)
for _, t in groupby(calls, lambda i: i[:10])
)
)
if __name__ == "__main__":
result = total_cost(
(
"2014-01-01 01:12:13 181",
"2014-01-02 20:11:10 600",
"2014-01-03 01:12:13 6009",
"2014-01-03 12:13:55 200",
)
)
assert result == 124, "Base example"
result = total_cost(
(
"2014-02-05 01:00:00 1",
"2014-02-05 02:00:00 1",
"2014-02-05 03:00:00 1",
"2014-02-05 04:00:00 1",
)
)
assert result == 4, "Short calls but money..."
result = total_cost(
(
"2014-02-05 01:00:00 60",
"2014-02-05 02:00:00 60",
"2014-02-05 03:00:00 60",
"2014-02-05 04:00:00 6000",
)
)
assert result == 106, "Precise calls"
print("PASSED!!!")
|
[
"[email protected]"
] | |
c8a369c28a65789362fbf679e7d7872192bb9b8e
|
e21837ee462fb31a088bd903ecdbb96631020d0a
|
/Arcade/The core/Forest Edge/concatenateArray.py
|
dcf7dcfd033db1a5cb4ec723c39db8792f792b53
|
[] |
no_license
|
delta94/Code_signal-
|
7965ee96a858425c65c7a51a47833d80c8e6d8d3
|
1383a528b4353b7b8db4a6634ea0caa2b5895f9d
|
refs/heads/master
| 2020-07-31T05:38:22.158742 | 2019-04-12T18:46:16 | 2019-04-12T18:46:16 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 286 |
py
|
"""
Given two arrays of integers a and b, obtain the array formed by the elements of a followed by the elements of b.
Example
For a = [2, 2, 1] and b = [10, 11], the output should be
concatenateArrays(a, b) = [2, 2, 1, 10, 11].
"""
def concatenateArrays(a, b):
return a + b
|
[
"[email protected]"
] | |
44eb9d21029ed8f03f9e1da60e1eca309c0cd116
|
a70da47d4a09865bf6c44b8d61069c2724cdc9a4
|
/friendface/web/external.py
|
29f7f98911492854bd69af5dd5c3b88bfab24078
|
[
"BSD-2-Clause"
] |
permissive
|
eblade/friendface
|
f1251fd59894c2330567e3872c565049c2c13471
|
152e7d743cf7a4f75c95a24176127e901cfcb652
|
refs/heads/master
| 2021-01-15T15:30:58.870507 | 2016-01-19T07:55:51 | 2016-01-19T07:55:51 | 48,060,039 | 0 | 0 | null | 2016-01-19T07:33:43 | 2015-12-15T18:00:41 |
JavaScript
|
UTF-8
|
Python
| false | false | 1,122 |
py
|
# -*- coding: utf-8 -*-
from bottle import HTTPResponse
from .api import Api
class ExternalApi(Api):
def __init__(self, session, app):
self.session = session
self.app = app
# Set Up External Endpoints
app.route('/m/<message_id>', 'GET', self.get_message_by_id)
app.route('/b/<message_id>', 'GET', self.get_branch_by_id)
# override
def get_thread_name(self):
return 'external_api'
def get_message_by_id(self, message_id):
message = self.session.get_message(message_id)
if message is None:
return HTTPResponse('Unknown message', 404)
body, headers = message.to_http()
raise HTTPResponse(
body=body,
status=200,
headers=headers,
)
def get_branch_by_id(self, message_id):
branch = self.session.get_branch(message_id)
if branch is None:
return HTTPResponse('Unknown branch', 404)
raise HTTPResponse(
branch.to_uri_list(),
status=200,
headers={'Content-Type': 'text/uri-list'},
)
|
[
"[email protected]"
] | |
0ad751b2366c9467449cd21b0531e918cb224cde
|
954ceac52dfe831ed7c2b302311a20bb92452727
|
/python/tvm/driver/tvmc/runner.py
|
216f3bb2653bc3676b0066490964781385287abf
|
[
"Apache-2.0",
"LLVM-exception",
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"Zlib",
"Unlicense",
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
tqchen/tvm
|
a0e4aefe8b8dccbdbe6f760549bed6e9545ad4a1
|
678d01dd4a4e75ef6186ce356bb1a20e584a7b24
|
refs/heads/main
| 2023-08-10T02:21:48.092636 | 2023-02-25T18:22:10 | 2023-02-25T18:22:10 | 100,638,323 | 23 | 8 |
Apache-2.0
| 2023-02-20T16:28:46 | 2017-08-17T19:30:37 |
Python
|
UTF-8
|
Python
| false | false | 26,134 |
py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Provides support to run compiled networks both locally and remotely.
"""
from contextlib import ExitStack
import logging
import pathlib
from typing import Dict, Optional, Union
from tarfile import ReadError
import argparse
import sys
import json
import numpy as np
import tvm
from tvm import rpc
from tvm.runtime import vm
from tvm.autotvm.measure import request_remote
from tvm.contrib import graph_executor as executor
from tvm.contrib.debugger import debug_executor
from tvm.runtime import profiler_vm
from tvm.relay.param_dict import load_param_dict
from . import TVMCException
from .arguments import TVMCSuppressedArgumentParser
from .project import (
get_project_options,
get_and_check_options,
get_project_dir,
)
from .main import register_parser
from .model import TVMCPackage, TVMCResult
from .result_utils import get_top_results
from .tracker import tracker_host_port_from_cli
try:
import tvm.micro.project as project
from tvm.micro.project import TemplateProjectError
from tvm.micro.project_api.client import ProjectAPIServerNotFoundError
SUPPORT_MICRO = True
except (ImportError, AttributeError) as exception:
SUPPORT_MICRO = False
# pylint: disable=invalid-name
logger = logging.getLogger("TVMC")
@register_parser
def add_run_parser(subparsers, main_parser, json_params):
"""Include parser for 'run' subcommand"""
# Use conflict_handler='resolve' to allow '--list-options' option to be properly overriden when
# augmenting the parser with the micro device options (i.e. when '--device micro').
parser = subparsers.add_parser("run", help="run a compiled module", conflict_handler="resolve")
parser.set_defaults(func=drive_run)
# TODO --device needs to be extended and tested to support other targets,
# like 'webgpu', etc (@leandron)
parser.add_argument(
"--device",
choices=["cpu", "cuda", "cl", "metal", "vulkan", "rocm", "micro"],
default="cpu",
help="target device to run the compiled module. Defaults to 'cpu'",
)
parser.add_argument(
"--fill-mode",
choices=["zeros", "ones", "random"],
default="random",
help="fill all input tensors with values. In case --inputs/-i is provided, "
"they will take precedence over --fill-mode. Any remaining inputs will be "
"filled using the chosen fill mode. Defaults to 'random'",
)
parser.add_argument("-i", "--inputs", help="path to the .npz input file")
parser.add_argument("-o", "--outputs", help="path to the .npz output file")
parser.add_argument(
"--print-time",
action="store_true",
help="record and print the execution time(s). Enabling print-time will result "
" in (1 + repeat * number) executions of the model. (non-micro devices only)",
)
parser.add_argument(
"--print-top",
metavar="N",
type=int,
help="print the top n values and indices of the output tensor",
)
parser.add_argument(
"--profile",
action="store_true",
help="generate profiling data from the runtime execution. "
"Using --profile requires the Graph Executor Debug enabled on TVM. "
"Profiling may also have an impact on inference time, "
"making it take longer to be generated. (non-micro devices only)",
)
parser.add_argument(
"--end-to-end",
action="store_true",
help="Measure data transfers as well as model execution. This can provide a "
"more realistic performance measurement in many cases. Requires "
"'--print-time' to be specified.",
)
parser.add_argument(
"--repeat",
metavar="N",
type=int,
default=1,
help="How many times to repeat the run. Requires '--print-time' to be "
"specified. Defaults to '1'",
)
parser.add_argument(
"--number",
metavar="N",
type=int,
default=1,
help="The number of runs to measure within each repeat. Requires "
"'--print-time' to be specified. Defaults to '1'",
)
parser.add_argument(
"--rpc-key",
help="the RPC tracker key of the target device. (non-micro devices only)",
)
parser.add_argument(
"--rpc-tracker",
help="hostname (required) and port (optional, defaults to 9090) of the RPC tracker, "
"e.g. '192.168.0.100:9999'. (non-micro devices only)",
)
parser.add_argument(
"PATH",
help="path to the compiled module file or to the project directory if '--device micro' "
"is selected.",
)
parser.add_argument(
"--list-options",
action="store_true",
help="show all run options and option choices when '--device micro' is selected. "
"(micro devices only)",
)
disposable_parser = TVMCSuppressedArgumentParser(main_parser)
try:
known_args, _ = disposable_parser.parse_known_args()
except TVMCException:
return
if vars(known_args).get("device") != "micro":
# No need to augment the parser for micro targets.
return
if SUPPORT_MICRO is False:
sys.exit(
"'--device micro' is not supported. "
"Please build TVM with micro support (USE_MICRO ON)!"
)
project_dir = get_project_dir(known_args.PATH)
try:
project_ = project.GeneratedProject.from_directory(project_dir, None)
except ProjectAPIServerNotFoundError:
sys.exit(f"Error: Project API server not found in {project_dir}!")
except TemplateProjectError:
sys.exit(
f"Error: Project directory error. That usually happens when model.tar is not found."
)
project_info = project_.info()
options_by_method = get_project_options(project_info)
mlf_path = project_info["model_library_format_path"]
parser.formatter_class = (
argparse.RawTextHelpFormatter
) # Set raw help text so customized help_text format works
parser.set_defaults(valid_options=options_by_method["open_transport"], mlf_path=mlf_path)
required = any([opt["required"] for opt in options_by_method["open_transport"]])
nargs = "+" if required else "*"
help_text_by_option = [opt["help_text"] for opt in options_by_method["open_transport"]]
help_text = "\n\n".join(help_text_by_option) + "\n\n"
parser.add_argument(
"--project-option", required=required, metavar="OPTION=VALUE", nargs=nargs, help=help_text
)
parser.add_argument(
"--list-options",
action="help",
help="show this help message with platform-specific options and exit.",
)
for one_entry in json_params:
parser.set_defaults(**one_entry)
def drive_run(args):
"""Invoke runner module with command line arguments
Parameters
----------
args: argparse.Namespace
Arguments from command line parser.
"""
path = pathlib.Path(args.PATH)
options = None
project_dir = None
if args.device == "micro":
# If it's a micro device, then grab the model.tar path from Project API instead.
# args.PATH will be used too since it points to the project directory. N.B.: there is no
# way to determine the model.tar path from the project dir or vice-verse (each platform
# is free to put model.tar whereever it's convenient).
project_dir = path
path = pathlib.Path(args.mlf_path)
# Check for options unavailable for micro targets.
if args.rpc_key or args.rpc_tracker:
raise TVMCException(
"--rpc-key and/or --rpc-tracker can't be specified for micro targets."
)
if args.device != "micro":
raise TVMCException(
f"Device '{args.device}' not supported. "
"Only device 'micro' is supported to run a model in MLF, "
"i.e. when '--device micro'."
)
if args.profile:
raise TVMCException("--profile is not currently supported for micro devices.")
if args.print_time:
raise TVMCException("--print-time is not currently supported for micro devices.")
# Get and check options for micro targets.
options = get_and_check_options(args.project_option, args.valid_options)
else:
# Check for options only availabe for micro targets.
if args.list_options:
raise TVMCException(
"--list-options is only availabe on micro targets, i.e. when '--device micro'."
)
try:
tvmc_package = TVMCPackage(package_path=path, project_dir=project_dir)
except IsADirectoryError:
raise TVMCException(f"File {path} must be an archive, not a directory.")
except FileNotFoundError:
raise TVMCException(f"File {path} does not exist.")
except ReadError:
raise TVMCException(f"Could not read model from archive {path}!")
rpc_hostname, rpc_port = tracker_host_port_from_cli(args.rpc_tracker)
try:
inputs = np.load(args.inputs) if args.inputs else {}
except IOError as ex:
raise TVMCException("Error loading inputs file: %s" % ex)
result = run_module(
tvmc_package,
args.device,
hostname=rpc_hostname,
port=rpc_port,
rpc_key=args.rpc_key,
inputs=inputs,
fill_mode=args.fill_mode,
benchmark=args.print_time,
repeat=args.repeat,
number=args.number,
profile=args.profile,
end_to_end=args.end_to_end,
options=options,
)
if args.print_time:
stat_table = result.format_times()
# print here is intentional
print(stat_table)
if args.print_top:
top_results = get_top_results(result, args.print_top)
# print here is intentional
print(top_results)
if args.outputs:
# Save the outputs
result.save(args.outputs)
def get_input_info(graph_str: str, params: Dict[str, tvm.nd.NDArray]):
"""Return the 'shape' and 'dtype' dictionaries for the input
tensors of a compiled module.
.. note::
We can't simply get the input tensors from a TVM graph
because weight tensors are treated equivalently. Therefore, to
find the input tensors we look at the 'arg_nodes' in the graph
(which are either weights or inputs) and check which ones don't
appear in the params (where the weights are stored). These nodes
are therefore inferred to be input tensors.
.. note::
There exists a more recent API to retrieve the input information
directly from the module. However, this isn't supported when using
with RPC due to a lack of support for Array and Map datatypes.
Therefore, this function exists only as a fallback when RPC is in
use. If RPC isn't being used, please use the more recent API.
Parameters
----------
graph_str : str
JSON graph of the module serialized as a string.
params : dict
Parameter dictionary mapping name to value.
Returns
-------
shape_dict : dict
Shape dictionary - {input_name: tuple}.
dtype_dict : dict
dtype dictionary - {input_name: dtype}.
"""
shape_dict = {}
dtype_dict = {}
params_dict = load_param_dict(params)
param_names = [k for (k, v) in params_dict.items()]
graph = json.loads(graph_str)
for node_id in graph["arg_nodes"]:
node = graph["nodes"][node_id]
# If a node is not in the params, infer it to be an input node
name = node["name"]
if name not in param_names:
shape_dict[name] = graph["attrs"]["shape"][1][node_id]
dtype_dict[name] = graph["attrs"]["dltype"][1][node_id]
return shape_dict, dtype_dict
def generate_tensor_data(shape: tuple, dtype: str, fill_mode: str):
"""Generate data to produce a tensor of given shape and dtype.
Random data generation depends on the dtype. For int8 types,
random integers in the range 0->255 are generated. For all other
types, random floats are generated in the range -1->1 and then
cast to the appropriate dtype.
This is used to quickly generate some data to input the models, as
a way to check that compiled module is sane for running.
Parameters
----------
shape : tuple
The shape of the tensor.
dtype : str
The dtype of the tensor.
fill_mode : str
The fill-mode to use, either "zeros", "ones" or "random".
Returns
-------
tensor : np.array
The generated tensor as a np.array.
"""
if fill_mode == "zeros":
tensor = np.zeros(shape=shape, dtype=dtype)
elif fill_mode == "ones":
tensor = np.ones(shape=shape, dtype=dtype)
elif fill_mode == "random":
if "int8" in dtype:
tensor = np.random.randint(128, size=shape, dtype=dtype)
else:
tensor = np.random.uniform(-1, 1, size=shape).astype(dtype)
else:
raise TVMCException("unknown fill-mode: {}".format(fill_mode))
return tensor
def make_inputs_dict(
shape_dict: tvm.container.Map,
dtype_dict: tvm.container.Map,
inputs: Optional[Dict[str, np.ndarray]] = None,
fill_mode: str = "random",
):
"""Make the inputs dictionary for a graph.
Use data from 'inputs' where specified. For input tensors
where no data has been given, generate data according to the
chosen fill-mode.
Parameters
----------
shape_dict : Map
Shape dictionary - {input_name: tuple}.
dtype_dict : Map
dtype dictionary - {input_name: dtype}.
inputs : dict, optional
A dictionary that maps input names to numpy values.
fill_mode : str, optional
The fill-mode to use when generating tensor data.
Can be either "zeros", "ones" or "random".
Returns
-------
inputs_dict : dict
Complete inputs dictionary - {input_name: np.array}.
"""
logger.debug("creating inputs dict")
if inputs is None:
inputs = {}
# First check all the keys in inputs exist in the graph
for input_name in inputs:
if input_name not in shape_dict.keys():
raise TVMCException(
"the input tensor '{}' is not in the graph. Expected inputs: '{}'".format(
input_name, list(shape_dict.keys())
)
)
# Now construct the input dict, generating tensors where no
# data already exists in 'inputs'
inputs_dict = {}
for input_name in shape_dict:
if input_name in inputs.keys():
logger.debug("setting input '%s' with user input data", input_name)
inputs_dict[input_name] = inputs[input_name]
else:
# container.ShapleTuple -> tuple
shape = tuple(shape_dict[input_name])
# container.String -> str
dtype = str(dtype_dict[input_name])
logger.debug(
"generating data for input '%s' (shape: %s, dtype: %s), using fill-mode '%s'",
input_name,
shape,
dtype,
fill_mode,
)
data = generate_tensor_data(shape, dtype, fill_mode)
inputs_dict[input_name] = data
return inputs_dict
def run_module(
tvmc_package: TVMCPackage,
device: str,
hostname: Optional[str] = None,
port: Union[int, str] = 9090,
rpc_key: Optional[str] = None,
inputs: Optional[Dict[str, np.ndarray]] = None,
fill_mode: str = "random",
benchmark: bool = False,
repeat: int = 10,
number: int = 10,
profile: bool = False,
end_to_end: bool = False,
options: dict = None,
):
"""Run a compiled graph executor module locally or remotely with
optional input values.
If input tensors are not specified explicitly, they can be filled
with zeroes, ones or random data.
Parameters
----------
tvmc_package: TVMCPackage
The compiled model package object that will be run.
device: str,
the device (e.g. "cpu" or "cuda") to be targeted by the RPC
session, local or remote).
hostname : str, optional
The hostname of the target device on which to run.
port : int, optional
The port of the target device on which to run.
rpc_key : str, optional
The tracker key of the target device. If this is set, it
will be assumed that remote points to a tracker.
inputs : dict, optional
A dictionary that maps input names to numpy values. If not provided,
inputs will be generated using the fill_mode argument.
fill_mode : str, optional
The fill-mode to use when generating data for input tensors.
Valid options are "zeros", "ones" and "random".
Defaults to "random".
benchmark : bool, optional
Whether to benchmark the execution of the module. Enabling benchmark will
result in (1 + repeat * number) executions of the model.
repeat : int, optional
How many times to repeat the run. Requires `benchmark` to be set to True.
number : int, optional
The number of runs to measure within each repeat.
Requires `benchmark` to be set to True.
profile : bool
Whether to profile the run with the debug executor.
end_to_end : bool
Whether to measure the time of memory copies as well as model
execution. Turning this on can provide a more realistic estimate
of how long running the model in production would take.
Requires `benchmark` to be set to True.
Returns
-------
TVMCResult
The results of the run, including the output data.
"""
if not isinstance(tvmc_package, TVMCPackage):
raise TVMCException(
"This model doesn't seem to have been compiled yet. "
"Try calling tvmc.compile on the model before running it."
)
with ExitStack() as stack:
# Currently only two package formats are supported: "classic" and
# "mlf". The later can only be used for micro targets, i.e. with microTVM.
if device == "micro":
if tvmc_package.type != "mlf":
raise TVMCException(f"Model {tvmc_package.package_path} is not a MLF archive.")
project_dir = get_project_dir(tvmc_package.project_dir)
# This is guaranteed to work since project_dir was already checked when
# building the dynamic parser to accommodate the project options, so no
# checks are in place when calling GeneratedProject.
project_ = project.GeneratedProject.from_directory(project_dir, options)
else:
if tvmc_package.type == "mlf":
raise TVMCException(
"You're trying to run a model saved using the Model Library Format (MLF). "
"MLF can only be used to run micro device ('--device micro')."
)
if hostname:
if isinstance(port, str):
port = int(port)
# Remote RPC
if rpc_key:
logger.debug("Running on remote RPC tracker with key %s.", rpc_key)
session = request_remote(rpc_key, hostname, port, timeout=1000)
else:
logger.debug("Running on remote RPC with no key.")
session = rpc.connect(hostname, port)
elif device == "micro":
# Remote RPC (running on a micro target)
logger.debug("Running on remote RPC (micro target).")
try:
session = tvm.micro.Session(project_.transport())
stack.enter_context(session)
except:
raise TVMCException("Could not open a session with the micro target.")
else:
# Local
logger.debug("Running a local session.")
session = rpc.LocalSession()
# Micro targets don't support uploading a model. The model to be run
# must be already flashed into the micro target before one tries
# to run it. Hence skip model upload for micro targets.
if device != "micro":
session.upload(tvmc_package.lib_path)
lib = session.load_module(tvmc_package.lib_name)
# TODO expand to other supported devices, as listed in tvm.rpc.client (@leandron)
logger.debug("Device is %s.", device)
if device == "cuda":
dev = session.cuda()
elif device == "cl":
dev = session.cl()
elif device == "metal":
dev = session.metal()
elif device == "vulkan":
dev = session.vulkan()
elif device == "rocm":
dev = session.rocm()
elif device == "micro":
dev = session.device
lib = session.get_system_lib()
else:
assert device == "cpu"
dev = session.cpu()
if tvmc_package.type == "vm":
assert inputs is not None, "vm runner requires inputs to be provided as a dict"
input_tensor = {}
for e, i in inputs.items():
input_tensor[e] = tvm.nd.array(i, dev)
if profile:
logger.debug("Creating vm with profile enabled.")
exe = profiler_vm.VirtualMachineProfiler(lib, dev)
res = exe.profile(**input_tensor, func_name="main")
# This print is intentional
print(res)
else:
exe = vm.VirtualMachine(lib, dev)
exe_outputs = exe.invoke("main", **input_tensor)
if benchmark:
times = exe.benchmark(
dev,
**input_tensor,
func_name="main",
repeat=repeat,
number=number,
end_to_end=end_to_end,
)
else:
exe.run(**input_tensor)
times = []
# Special handling if the output only has a single value
if not isinstance(exe_outputs, list):
exe_outputs = [exe_outputs]
outputs = {}
for i, val in enumerate(exe_outputs):
output_name = "output_{}".format(i)
outputs[output_name] = val.numpy()
else:
# TODO(gromero): Adjust for micro targets.
if profile:
logger.debug("Creating runtime with profiling enabled.")
module = debug_executor.create(tvmc_package.graph, lib, dev, dump_root="./prof")
else:
if device == "micro":
logger.debug("Creating runtime (micro) with profiling disabled.")
module = tvm.micro.create_local_graph_executor(tvmc_package.graph, lib, dev)
else:
logger.debug("Creating runtime with profiling disabled.")
module = executor.create(tvmc_package.graph, lib, dev)
logger.debug("Loading params into the runtime module.")
module.load_params(tvmc_package.params)
logger.debug("Collecting graph input shape and type:")
if isinstance(session, tvm.rpc.client.RPCSession):
# RPC does not support datatypes such as Array and Map,
# fallback to obtaining input information from graph json.
shape_dict, dtype_dict = get_input_info(tvmc_package.graph, tvmc_package.params)
else:
shape_dict, dtype_dict = module.get_input_info()
logger.debug("Graph input shape: %s", shape_dict)
logger.debug("Graph input type: %s", dtype_dict)
inputs_dict = make_inputs_dict(shape_dict, dtype_dict, inputs, fill_mode)
logger.debug("Setting inputs to the module.")
module.set_input(**inputs_dict)
# Run must be called explicitly if profiling
if profile:
logger.info("Running the module with profiling enabled.")
report = module.profile()
# This print is intentional
print(report)
if not benchmark or device == "micro":
# TODO(gromero): Fix time_evaluator() for micro targets. Once it's
# fixed module.benchmark() can be used instead and this if/else can
# be removed.
module.run()
times = []
else:
# Call the benchmarking function of the executor.
# Optionally measure e2e data transfers from the
# CPU to device memory overheads (e.g. PCIE
# overheads if the device is a discrete GPU).
if end_to_end:
dev = session.cpu()
times = module.benchmark(dev, number=number, repeat=repeat, end_to_end=end_to_end)
logger.debug("Collecting the output tensors.")
num_outputs = module.get_num_outputs()
outputs = {}
for i in range(num_outputs):
output_name = "output_{}".format(i)
outputs[output_name] = module.get_output(i).numpy()
return TVMCResult(outputs, times)
|
[
"[email protected]"
] | |
3c6719eb8f4251d97d68d4d931cb3d06c0651282
|
3de87aebbd1db2b1a241415d169e4d338216975d
|
/tests/filter_integration_tests/test_filters_with_mongo_storage.py
|
ada988c2f1602bb7d1285a65a7b018f3fe8c2f74
|
[
"BSD-3-Clause"
] |
permissive
|
SeppPenner/ChatterBot
|
42543b49343ac7336371ddbf5dbd70177b2725c1
|
4a1201c0697ebc787cbf348e372ced1fdceb4efb
|
refs/heads/master
| 2021-05-11T07:37:56.466278 | 2020-02-20T08:58:38 | 2020-02-20T08:58:38 | 118,025,056 | 1 | 0 |
BSD-3-Clause
| 2020-02-20T08:58:39 | 2018-01-18T19:00:50 |
Python
|
UTF-8
|
Python
| false | false | 981 |
py
|
from tests.base_case import ChatBotMongoTestCase
class RepetitiveResponseFilterTestCase(ChatBotMongoTestCase):
"""
Test case for the RepetitiveResponseFilter class.
"""
def test_filter_selection(self):
"""
Test that repetitive responses are filtered out of the results.
"""
from chatterbot.filters import RepetitiveResponseFilter
from chatterbot.trainers import ListTrainer
self.chatbot.filters = (RepetitiveResponseFilter(), )
self.chatbot.set_trainer(ListTrainer)
self.chatbot.train([
'Hello',
'Hi',
'Hello',
'Hi',
'Hello',
'Hi, how are you?',
'I am good.'
])
first_response = self.chatbot.get_response('Hello')
second_response = self.chatbot.get_response('Hello')
self.assertEqual(first_response.text, 'Hi')
self.assertEqual(second_response.text, 'Hi, how are you?')
|
[
"[email protected]"
] | |
50af79ea848568965634294186fcface1d2ec00d
|
5a1f77b71892745656ec9a47e58a078a49eb787f
|
/8_Cloudrip_Mountain/413-Medic_School/medical_school.py
|
89f7c27ff1d706f7e9c4bb89b27fe72157f9d8c4
|
[
"MIT"
] |
permissive
|
ripssr/Code-Combat
|
78776e7e67c033d131e699dfeffb72ca09fd798e
|
fbda1ac0ae4a2e2cbfce21492a2caec8098f1bef
|
refs/heads/master
| 2020-06-11T20:17:59.817187 | 2019-07-21T09:46:04 | 2019-07-21T09:46:04 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 545 |
py
|
def startsWith(phrase, word):
if len(word) > len(phrase):
return False
for i in range(len(word)):
if phrase[i] != word[i]:
return False
return True
def onHear(event):
if startsWith(event.speaker.id, "Exp"):
potion = pet.findNearestByType("potion")
if potion:
pet.fetch(potion)
pet.moveXY(28, 34)
pet.on("hear", onHear)
while True:
nearest = hero.findNearest(hero.findByType("mushroom"))
if nearest:
hero.moveXY(nearest.pos.x, nearest.pos.y)
|
[
"[email protected]"
] | |
181382f3f67c9810cdef2e21360bfe18a6ffac71
|
8df87b22b2689b93bbe418f2a7c7a69a2bebbd90
|
/other_solutions/exception2.py
|
c0caeb7214d4bb038af405a05f0b4ef331c03595
|
[] |
no_license
|
SHUHAIB-AREEKKAN/automate_boring_stuff-Solutions
|
e3f25ec85e5a2b99f431430081c55c12c09c007b
|
5df77b8b41d43f442671dd9595313f80c88be1d0
|
refs/heads/master
| 2021-01-25T08:27:42.810089 | 2017-06-08T15:43:28 | 2017-06-08T15:43:28 | 93,765,978 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 198 |
py
|
def spam(inputs):
return 42 / inputs
try:
print(spam(2))
print(spam(3))
print(spam(0))
print(spam(4))
except ZeroDivisionError:
print("occured zero division error")
|
[
"[email protected]"
] | |
69cbebcc23d85d8a69d474bc7321a1c3cd7515e4
|
bc28f8fe941caf281261afa1641868e743ecb5ab
|
/Commonlounge/ZCO12001.py
|
f10af53a2fed4813735914c2dbd168b8f2dafbd9
|
[] |
no_license
|
anubhavshrimal/CompetitiveProgrammingInPython
|
9fc6949fb3cd715cfa8544c17a63ffbe52677b37
|
2692c446d49ec62d4967ed78a7973400db7ce981
|
refs/heads/master
| 2021-07-05T08:17:15.182154 | 2018-05-29T02:26:25 | 2018-05-29T02:26:25 | 60,554,340 | 7 | 6 | null | 2021-05-24T17:46:16 | 2016-06-06T19:18:27 |
Python
|
UTF-8
|
Python
| false | false | 721 |
py
|
n = int(input())
seq = input().split()
depth = 0
start_index = 0
curr_seq = 0
# maximum depth value
max_depth = 0
# start index of max depth
depth_start = 0
# maximum sequence length
max_seq_len = 0
# start index of max sequence
max_seq_start = 0
for i, b in enumerate(seq, start=1):
if b == '1':
depth += 1
if depth == 1:
start_index = i
elif b == '2':
depth -= 1
curr_seq += 1
if max_depth < depth:
max_depth = depth
depth_start = i
if depth == 0:
if curr_seq > max_seq_len:
max_seq_len = curr_seq
max_seq_start = start_index
curr_seq = 0
print(max_depth, depth_start, max_seq_len, max_seq_start)
|
[
"[email protected]"
] | |
a1a63d340e315a9235d8ab66fb99ae5cbec6881f
|
f6c40808efe1cf9e5f2c76a29a1dd1bf86df8d4d
|
/prism/code_gen.py
|
77e98eddfaae72979089800d4f65eb489f600553
|
[] |
no_license
|
sahabi/versynth
|
55709f3a19ab229f9a995830a47add83f9cc6fc4
|
9983344c03a1ee887d9b145655497b9862663212
|
refs/heads/master
| 2021-07-21T00:03:52.295043 | 2017-10-31T20:01:56 | 2017-10-31T20:01:56 | 109,047,869 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,998 |
py
|
x_size = 4
y_size = 4
prism_code = "mdp\n"
prism_code += "module grid\n"
prism_code += " o_state : [0..{}] init 0;\n".format((x_size-1)*(y_size-1))
#prism_code += " a_x : [0..{}];\n".format(x_size-1)
#prism_code += " a_y : [0..{}];\n".format(y_size-1)
def stay(i,j,role):
return "({0}_state' = {state})".format(role,state=i+(x_size*j))
def left(i,j,role):
return "({0}_state' = {state})".format(role,state=(i-1)+(j*x_size))
def right(i,j,role):
return "({0}_state' = {state})".format(role,state=(i+1)+(x_size*j))
def up(i,j,role):
return "({0}_state' = {state})".format(role,state=i+((j+1)*x_size))
def down(i,j,role):
return "({0}_state' = {state})".format(role,state=i+((j-1)*x_size))
def current(i,j,role="o"):
return " [] {0}_state = {state} -> ".format(role,state=i+(j*x_size))
def is_bot_left(x,y):
return x == 0 == j
def is_top_right(x,y):
return x == x_size-1 and y == y_size-1
def is_top_left(x,y):
return x == 0 and y == y_size-1
def is_bot_right(x,y):
return x == x_size-1 and y == 0
def is_bot(y):
return y == 0
def is_top(y):
return y == y_size-1
def is_right(x):
return x == x_size-1
def is_left(x):
return x == 0
for i in range(x_size):
for j in range(y_size):
if is_bot_left(i,j):
prism_code += current(i,j,role="o")+\
"1/3 : "+right(i,j,role="o")+\
"+ 1/3 : "+up(i,j,role="o")+\
"+ 1/3 : "+stay(i,j,role="o")+\
";\n"
elif is_top_right(i,j):
prism_code += current(i,j,role="o")+\
"1/3 : "+left(i,j,role="o")+\
"+ 1/3 : "+down(i,j,role="o")+\
"+ 1/3 : "+stay(i,j,role="o")+\
";\n"
elif is_top_left(i,j):
prism_code += current(i,j,role="o")+\
"1/3 : "+right(i,j,role="o")+\
"+ 1/3 : "+down(i,j,role="o")+\
"+ 1/3 : "+stay(i,j,role="o")+\
";\n"
elif is_bot_right(i,j):
prism_code += current(i,j,role="o")+\
"1/3 : "+left(i,j,role="o")+\
"+ 1/3 : "+up(i,j,role="o")+\
"+ 1/3 : "+stay(i,j,role="o")+\
";\n"
elif is_bot(j):
prism_code += current(i,j,role="o")+\
"1/4 : "+left(i,j,role="o")+\
"+ 1/4 : "+up(i,j,role="o")+\
"+ 1/4 : "+stay(i,j,role="o")+\
"+ 1/4 : "+right(i,j,role="o")+\
";\n"
elif is_top(j):
prism_code += current(i,j,role="o")+\
"1/4 : "+left(i,j,role="o")+\
"+ 1/4 : "+down(i,j,role="o")+\
"+ 1/4 : "+stay(i,j,role="o")+\
"+ 1/4 : "+right(i,j,role="o")+\
";\n"
elif is_left(i):
prism_code += current(i,j,role="o")+\
"1/4 : "+down(i,j,role="oo")+\
"+ 1/4 : "+up(i,j,role="o")+\
"+ 1/4 : "+stay(i,j,role="o")+\
"+ 1/4 : "+right(i,j,role="o")+\
";\n"
elif is_right(i):
prism_code += current(i,j,role="o")+\
"1/4 : "+left(i,j,role="o")+\
"+ 1/4 : "+down(i,j,role="o")+\
"+ 1/4 : "+stay(i,j,role="o")+\
"+ 1/4 : "+up(i,j,role="o")+\
";\n"
else:
prism_code += current(i,j,role="o")+\
"1/5 : "+left(i,j,role="o")+\
"+ 1/5 : "+down(i,j,role="o")+\
"+ 1/5 : "+stay(i,j,role="o")+\
"+ 1/5 : "+up(i,j,role="o")+\
"+ 1/5 : "+right(i,j,role="o")+\
";\n"
prism_code = prism_code.format(y=j,py=j+1,ny=j-1,nx=i-1,px=i+1,x=i)
prism_code += "endmodule"
print prism_code
|
[
"[email protected]"
] | |
8b7797c26c13e1431cec28e5848ed994de0e8abc
|
f7dd190a665a4966db33dcc1cc461dd060ca5946
|
/venv/Lib/site-packages/django/core/management/commands/startproject.py
|
28867788a63c8555e1302fde581cfed0a80073fb
|
[] |
no_license
|
Darwin939/macmeharder_back
|
2cc35e2e8b39a82c8ce201e63d9f6a9954a04463
|
8fc078333a746ac7f65497e155c58415252b2d33
|
refs/heads/main
| 2023-02-28T12:01:23.237320 | 2021-02-02T17:37:33 | 2021-02-02T17:37:33 | 328,173,062 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 688 |
py
|
from django.core.management.templates import TemplateCommand
from ..utils import get_random_secret_key
class Command(TemplateCommand):
help = (
"Creates a Django project directory structure for the given project "
"name in the current directory or optionally in the given directory."
)
missing_args_message = "You must provide a project name."
def handle(self, **options):
project_name = options.pop('name')
target = options.pop('directory')
# Create a random SECRET_KEY to put it in the apps settings.
options['secret_key'] = get_random_secret_key()
super().handle('project', project_name, target, **options)
|
[
"[email protected]"
] | |
58a6d22d2cdddb49ec012d5c542d653ca9e1b958
|
2038ede147bf85734f64f7be110e64db0725c0d3
|
/gerapy/server/core/config.py
|
5d0205438643bf0d32473c81797ccc644e5cdeec
|
[
"MIT"
] |
permissive
|
hantmac/Gerapy
|
24d17388af57208113199657b7c9c30fe1513b0b
|
41b1221dba2b89b89ca98f6bfbcc045cafdac469
|
refs/heads/master
| 2020-04-26T01:51:19.997122 | 2019-06-03T03:10:58 | 2019-06-03T03:10:58 | 173,216,808 | 0 | 0 |
MIT
| 2019-03-01T01:51:31 | 2019-03-01T01:51:30 | null |
UTF-8
|
Python
| false | false | 327 |
py
|
import configparser
from os.path import join
def config(path, section, option, name='scrapy.cfg', default=None):
try:
cf = configparser.ConfigParser()
cfg_path = join(path, name)
cf.read(cfg_path)
return cf.get(section, option)
except configparser.NoOptionError:
return default
|
[
"[email protected]"
] | |
00bfd978db46af137fd5ad6cca417d3e8013d999
|
c65319b258ce8c629bb11d31ed62f77a134c448a
|
/plugins/uvwgen/UVWGenExplicit.py
|
d9eb9a9bede36d3de02814a843d108946d5dfa13
|
[] |
no_license
|
Zaurio/vb30
|
cacf3c254eaa7337e35e95572dfa2b83d4899ea9
|
3d75476c99a3dc91dec226bfb87f01aa598b0824
|
refs/heads/master
| 2020-04-05T23:31:26.220823 | 2015-06-22T12:03:09 | 2015-06-22T12:03:09 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,438 |
py
|
#
# V-Ray For Blender
#
# http://chaosgroup.com
#
# Author: Andrei Izrantcev
# E-Mail: [email protected]
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# All Rights Reserved. V-Ray(R) is a registered trademark of Chaos Software.
#
import bpy
TYPE = 'UVWGEN'
ID = 'UVWGenExplicit'
NAME = 'Explicit'
DESC = ""
PluginParams = (
{
'attr' : 'u',
'desc' : "the U input",
'type' : 'FLOAT_TEXTURE',
'default' : 1.0,
},
{
'attr' : 'v',
'desc' : "the V input",
'type' : 'FLOAT_TEXTURE',
'default' : 1.0,
},
{
'attr' : 'w',
'desc' : "the W input",
'type' : 'FLOAT_TEXTURE',
'default' : 1.0,
},
{
'attr' : 'uvw',
'desc' : "",
'type' : 'TEXTURE',
'default' : (0.0, 0.0, 0.0),
},
)
|
[
"[email protected]"
] | |
a8aa36f535dfb27aa46194530122ca010d024e83
|
12f83344cdfe561db39ad9106dbf263ccd919f7e
|
/Projects/miami_metro/debra/migrations/0060_auto__add_field_wishlistitem_savings__add_field_wishlistitem_promo_app.py
|
3fd817f0a47abe299891869197379b687f649af5
|
[] |
no_license
|
TopWebGhost/Angular-Influencer
|
ebcd28f83a77a92d240c41f11d82927b98bcea9e
|
2f15c4ddd8bbb112c407d222ae48746b626c674f
|
refs/heads/master
| 2021-01-19T10:45:47.039673 | 2016-12-05T01:59:26 | 2016-12-05T01:59:26 | 82,214,998 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 23,411 |
py
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'WishlistItem.savings'
db.add_column('debra_wishlistitem', 'savings', self.gf('django.db.models.fields.FloatField')(default='0', max_length=10), keep_default=False)
# Adding field 'WishlistItem.promo_applied'
db.add_column('debra_wishlistitem', 'promo_applied', self.gf('django.db.models.fields.related.ForeignKey')(default=None, to=orm['debra.Promoinfo'], null=True, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'WishlistItem.savings'
db.delete_column('debra_wishlistitem', 'savings')
# Deleting field 'WishlistItem.promo_applied'
db.delete_column('debra_wishlistitem', 'promo_applied_id')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'debra.brands': {
'Meta': {'object_name': 'Brands'},
'domain_name': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'logo_blueimg_url': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'logo_img_url': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'shopstyle_id': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'})
},
'debra.categories': {
'Meta': {'object_name': 'Categories'},
'brand': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['debra.Brands']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '100'})
},
'debra.categorymodel': {
'Meta': {'object_name': 'CategoryModel'},
'categoryId': ('django.db.models.fields.IntegerField', [], {'default': "'-111'", 'max_length': '50'}),
'categoryName': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'default': "'0'", 'to': "orm['debra.ProductModel']"})
},
'debra.colorsizemodel': {
'Meta': {'object_name': 'ColorSizeModel'},
'color': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '50'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'default': "'0'", 'to': "orm['debra.ProductModel']"}),
'size': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '50'})
},
'debra.combinationofuserops': {
'Meta': {'object_name': 'CombinationOfUserOps'},
'combination_id': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'how_many_out_of_stock': ('django.db.models.fields.IntegerField', [], {'default': "'0'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_out_of_stock': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'task_id': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'tracking_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'user_id': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'user_selection': ('django.db.models.fields.related.ForeignKey', [], {'default': "'2'", 'to': "orm['debra.UserOperations']", 'null': 'True', 'blank': 'True'})
},
'debra.items': {
'Meta': {'object_name': 'Items'},
'brand': ('django.db.models.fields.related.ForeignKey', [], {'default': "'1'", 'to': "orm['debra.Brands']"}),
'cat1': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '100'}),
'cat2': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '100'}),
'cat3': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '100'}),
'cat4': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '100'}),
'cat5': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '100'}),
'gender': ('django.db.models.fields.CharField', [], {'default': "'A'", 'max_length': '10'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'img_url_lg': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'img_url_md': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'img_url_sm': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'insert_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'pr_colors': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '600'}),
'pr_currency': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'pr_id': ('django.db.models.fields.IntegerField', [], {'default': '-1', 'max_length': '100'}),
'pr_instock': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '10'}),
'pr_retailer': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'pr_sizes': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '600'}),
'pr_url': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'price': ('django.db.models.fields.FloatField', [], {'default': "'20.00'", 'max_length': '10'}),
'product_model_key': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['debra.ProductModel']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'saleprice': ('django.db.models.fields.FloatField', [], {'default': "'10.00'", 'max_length': '10'})
},
'debra.pricingtasks': {
'Meta': {'object_name': 'PricingTasks'},
'combination_id': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'enqueue_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'finish_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'free_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'num_items': ('django.db.models.fields.IntegerField', [], {'default': "'1'"}),
'price': ('django.db.models.fields.FloatField', [], {'default': "'-11.0'", 'max_length': '10'}),
'proc_done': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'saleprice': ('django.db.models.fields.FloatField', [], {'default': "'-11.0'", 'max_length': '10'}),
'start_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'task_id': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'unique': 'True', 'max_length': '200'}),
'user_id': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'user_notify': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'debra.productmodel': {
'Meta': {'object_name': 'ProductModel'},
'brand': ('django.db.models.fields.related.ForeignKey', [], {'default': "'1'", 'to': "orm['debra.Brands']"}),
'cat1': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '25'}),
'cat10': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '25'}),
'cat2': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '25'}),
'cat3': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '25'}),
'cat4': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '25'}),
'cat5': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '25'}),
'cat6': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '25'}),
'cat7': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '25'}),
'cat8': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '25'}),
'cat9': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '25'}),
'description': ('django.db.models.fields.TextField', [], {'default': "'Nil'"}),
'err_text': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'gender': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '10'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'idx': ('django.db.models.fields.IntegerField', [], {'default': "'-11'", 'max_length': '10'}),
'img_url': ('django.db.models.fields.URLField', [], {'default': "'Nil'", 'max_length': '200'}),
'insert_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'price': ('django.db.models.fields.FloatField', [], {'default': "'-11.0'", 'max_length': '10'}),
'prod_url': ('django.db.models.fields.URLField', [], {'default': "'Nil'", 'max_length': '300'}),
'promo_text': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'saleprice': ('django.db.models.fields.FloatField', [], {'default': "'-11.0'", 'max_length': '10'})
},
'debra.promoinfo': {
'Meta': {'object_name': 'Promoinfo'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'd': ('django.db.models.fields.DateField', [], {}),
'exclude_category': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'free_shipping_lower_bound': ('django.db.models.fields.FloatField', [], {'default': '10000'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'item_category': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '100'}),
'promo_disc_amount': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'promo_disc_lower_bound': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'promo_disc_perc': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'promo_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'sex_category': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'store': ('django.db.models.fields.related.ForeignKey', [], {'default': "'1'", 'to': "orm['debra.Brands']"}),
'validity': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'where_avail': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'debra.promorawtext': {
'Meta': {'object_name': 'PromoRawText'},
'data_source': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'insert_date': ('django.db.models.fields.DateField', [], {}),
'raw_text': ('django.db.models.fields.TextField', [], {}),
'store': ('django.db.models.fields.related.ForeignKey', [], {'default': "'1'", 'to': "orm['debra.Brands']"})
},
'debra.promotionapplied': {
'Meta': {'object_name': 'PromotionApplied'},
'combination_id': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'promo': ('django.db.models.fields.related.ForeignKey', [], {'default': "'0'", 'to': "orm['debra.Promoinfo']", 'null': 'True', 'blank': 'True'}),
'savings': ('django.db.models.fields.FloatField', [], {'default': "'0.0'", 'max_length': '10'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'default': "'0'", 'to': "orm['debra.PricingTasks']", 'null': 'True', 'blank': 'True'})
},
'debra.ssitemstats': {
'Meta': {'object_name': 'SSItemStats'},
'brand': ('django.db.models.fields.related.ForeignKey', [], {'default': "'1'", 'to': "orm['debra.Brands']"}),
'category': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '10'}),
'gender': ('django.db.models.fields.CharField', [], {'default': "'A'", 'max_length': '10'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'price': ('django.db.models.fields.FloatField', [], {'default': "'-111.00'", 'max_length': '10'}),
'price_selection_metric': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'sale_cnt': ('django.db.models.fields.IntegerField', [], {'default': "'-11'", 'max_length': '10'}),
'saleprice': ('django.db.models.fields.FloatField', [], {'default': "'-111.00'", 'max_length': '10'}),
'tdate': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2012, 4, 8, 21, 38, 3, 945562)'}),
'total_cnt': ('django.db.models.fields.IntegerField', [], {'default': "'-11'", 'max_length': '10'})
},
'debra.storespecificitemcategory': {
'Meta': {'object_name': 'StoreSpecificItemCategory'},
'age_group': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '10'}),
'brand': ('django.db.models.fields.related.ForeignKey', [], {'default': "'1'", 'to': "orm['debra.Brands']"}),
'categoryName': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '100'}),
'gender': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '10'}),
'hash_val': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '33'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'debra.useridmap': {
'Meta': {'object_name': 'UserIdMap'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.CharField', [], {'default': "'-11.11.11.11'", 'max_length': '50'}),
'user_id': ('django.db.models.fields.IntegerField', [], {'default': "'-1111'", 'unique': 'True', 'max_length': '50'})
},
'debra.useroperations': {
'Meta': {'object_name': 'UserOperations'},
'calculated_price': ('django.db.models.fields.FloatField', [], {'default': "'-11.0'", 'max_length': '10'}),
'color': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'how_many_out_of_stock': ('django.db.models.fields.IntegerField', [], {'default': "'0'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'img_url': ('django.db.models.fields.URLField', [], {'default': "'Nil'", 'max_length': '1000'}),
'item': ('django.db.models.fields.related.ForeignKey', [], {'default': "'0'", 'to': "orm['debra.ProductModel']", 'null': 'True', 'blank': 'True'}),
'item_out_of_stock': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'operator_type': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'quantity': ('django.db.models.fields.IntegerField', [], {'default': "'-1'", 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'size': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'user_id': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'debra.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'about_me': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'access_token': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'blog_url': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'facebook_id': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True', 'null': 'True', 'blank': 'True'}),
'facebook_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'facebook_profile_url': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'raw_data': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}),
'website_url': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
'debra.wishlistitem': {
'Meta': {'object_name': 'WishlistItem'},
'calculated_price': ('django.db.models.fields.FloatField', [], {'default': "'-11.0'", 'max_length': '10'}),
'combination_id': ('django.db.models.fields.CharField', [], {'default': "'Nil'", 'max_length': '200'}),
'how_many_out_of_stock': ('django.db.models.fields.IntegerField', [], {'default': "'0'"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_buylist': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'item_out_of_stock': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'promo_applied': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['debra.Promoinfo']", 'null': 'True', 'blank': 'True'}),
'savings': ('django.db.models.fields.FloatField', [], {'default': "'0'", 'max_length': '10'}),
'user_id': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'user_selection': ('django.db.models.fields.related.ForeignKey', [], {'default': "'2'", 'to': "orm['debra.UserOperations']", 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['debra']
|
[
"[email protected]"
] | |
901e7e049f8ca3cfa56f942c4c0861432e4ad6ed
|
e63ab09f227459380c317aa1694cffd04255c807
|
/cheshire3/web/oai_utils.py
|
21df4073e90df00a0f54a32261ab276d67906c4c
|
[
"ICU",
"X11"
] |
permissive
|
bitwhite/cheshire3
|
91a0d2f8d2e79ac277ac4f7a3bea9efa911ce3d6
|
ca27bc2600d217e36a429ccfe064f11d9b200193
|
refs/heads/master
| 2021-05-27T03:50:09.456813 | 2013-10-10T13:47:16 | 2013-10-10T13:47:16 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,915 |
py
|
import sys
import urllib
import datetime
from lxml import etree
from cheshire3.record import LxmlRecord
# cheshire3.web package
from cheshire3.web.www_utils import cgi_encode
from cheshire3.web.sru_utils import fetch_data
# oaipmh package
from oaipmh.common import Header
NS_OAIPMH = 'http://www.openarchives.org/OAI/2.0/'
NS_XSI = 'http://www.w3.org/2001/XMLSchema-instance'
NS_OAIDC = 'http://www.openarchives.org/OAI/2.0/oai_dc/'
NS_DC = "http://purl.org/dc/elements/1.1/"
nsmap = {
None: NS_OAIPMH,
'xsi': NS_XSI,
'oai_dc': NS_OAIDC,
'dc': NS_DC
}
def headerFromLxmlElement(et):
identifier = et.xpath('string(//oai:identifier)', namespaces={'oai': NS_OAIPMH})
datestamp = et.xpath('string(//oai:datestamp)', namespaces={'oai': NS_OAIPMH})
datestamp = datetime.datetime.strptime(datestamp, '%Y-%m-%dT%H:%M:%SZ')
return Header(identifier, datestamp, [], None)
def getRecord(baseUrl, metadataPrefix, identifier):
"""Return (Header, metadata, about) tuple of record with specified identifier from the specified OAI-PMH server."""
args = {'verb': "GetRecord",
'metadataPrefix': metadataPrefix,
'identifier': identifier}
params = urllib.urlencode(args)
url = "{0}?{1}".format(baseUrl, params)
data = fetch_data(url)
try:
tree = etree.fromstring(data)
except:
sys.stderr.write(url + '\n')
sys.stderr.write(data + '\n')
sys.stderr.flush()
raise
hEl = tree.xpath('//oai:record[1]/oai:header', namespaces={'oai': NS_OAIPMH})[0]
header = headerFromLxmlElement(hEl)
recEl = tree.xpath('//oai:record[1]/oai:metadata/*', namespaces={'oai': NS_OAIPMH})[0]
recString = etree.tostring(recEl)
rec = LxmlRecord(recEl, xml=recString, docId=identifier, byteCount=len(recString))
return (header, rec, None)
def listIdentifiers(baseUrl, metadataPrefix, set=None, from_=None, until=None, cursor=0, batch_size=10):
"""Return a list of Headers with the given parameters from the specified OAI-PMH server."""
args = {'verb': "ListIdentifiers",
'metadataPrefix': metadataPrefix
}
if set is not None:
args['set'] = set
if from_ is not None:
args['from'] = str(from_)
if until is not None:
args['until'] = str(until)
params = urllib.urlencode(args)
url = "{0}?{1}".format(baseUrl, params)
data = fetch_data(url)
headers = []
while data is not None:
try:
tree = etree.fromstring(data)
except:
sys.stderr.write(url + '\n')
sys.stderr.write(data + '\n')
sys.stderr.flush()
raise
for h in tree.xpath('//oai:header', namespaces={'oai': NS_OAIPMH}):
headers.append(headerFromLxmlElement(h))
resTok = tree.xpath('string(//oai:resumptionToken)', namespaces={'oai': NS_OAIPMH})
if resTok:
params = urllib.urlencode({'verb': "ListIdentifiers",
'resumptionToken': resTok})
url = "{0}?{1}".format(baseUrl, params)
data = fetch_data(url)
else:
break
return headers
def listRecords(baseUrl, metadataPrefix, set=None, from_=None, until=None, cursor=0, batch_size=10):
"""Return a list of (Header, metadata, about) tuples for records which match the given parameters from the specified OAI-PMH server."""
args = {'verb': "ListRecords",
'metadataPrefix': metadataPrefix
}
if set is not None:
args['set'] = set
if from_ is not None:
args['from'] = str(from_)
if until is not None:
args['until'] = str(until)
params = urllib.urlencode(args)
url = "{0}?{1}".format(baseUrl, params)
data = fetch_data(url)
records = []
i = 0
while (data is not None):
try:
tree = etree.fromstring(data)
except:
print url
print data
raise
for recEl in tree.xpath('//oai:record', namespaces={'oai': NS_OAIPMH}):
if i < cursor:
i+=1
continue
hEl = recEl.xpath('//oai:header', namespaces={'oai': NS_OAIPMH})[0]
header = headerFromLxmlElement(hEl)
mdEl = recEl.xpath('//oai:metadata/*', namespaces={'oai': NS_OAIPMH})[0]
recString = etree.tostring(mdEl)
rec = LxmlRecord(mdEl, xml=recString, docId=header.identifier(), byteCount=len(recString))
records.append((header, rec, None))
i+=1
if (len(headers) >= batch_size):
return headers
resTok = tree.xpath('string(//oai:resumptionToken)', namespaces={'oai': NS_OAIPMH})
if resTok:
data = fetch_data(url + '&resumptionToken=' + cgi_encode(resTok))
else:
break
return records
|
[
"[email protected]"
] | |
7c1a3cc65ce60f6935f521faff84138422688e97
|
51f2492a5c207e3664de8f6b2d54bb93e313ca63
|
/codejam/2018-qualification/b-gen.py
|
ab3085506c3ea30bcf7c2b081167f31b1478bfea
|
[
"WTFPL",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
abeaumont/competitive-programming
|
23c5aabd587d7bb15a61efd3428838cb934233dd
|
a24c9b89941a59d344b51dc1010de66522b1a0dd
|
refs/heads/master
| 2023-09-01T09:50:58.267361 | 2023-07-31T18:00:10 | 2023-07-31T18:00:10 | 117,589,708 | 618 | 262 |
WTFPL
| 2023-07-12T17:36:20 | 2018-01-15T20:00:56 |
C++
|
UTF-8
|
Python
| false | false | 297 |
py
|
#!/usr/bin/env python3
# https://codejam.withgoogle.com/2018/challenges/00000000000000cb/dashboard/00000000000079cb
# Input generator
import random
t = 100
print(t)
for i in range(t):
n = random.randint(3, 100)
print(n)
print(' '.join([str(random.randint(0, 100)) for j in range(n)]))
|
[
"[email protected]"
] | |
a906710b0674f1b2a9c5ead838e6511aa3907a37
|
c3e872e0d019da55317b0ef5993f66bde8a6c021
|
/model/ppy.py
|
9d7719c09d775447ae061f2678a63aae17fe6428
|
[] |
no_license
|
liyonghelpme/wanderEmpire
|
0a10b4bbf61b89e4e4eeefc59ad794d02682684b
|
dea41cbabe04edd99c0412ebbc3f5d0f45142f09
|
refs/heads/master
| 2016-09-06T06:27:09.366029 | 2011-09-23T14:25:17 | 2011-09-23T14:25:17 | 2,444,406 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 222 |
py
|
from sqlalchemy import Table, Column
class Papayafriend(object):
def __init__(self,uid,papayaid,lev,user_kind):
self.uid=uid
self.papayaid=papayaid
self.lev=self
self.user_kind=user_kind
|
[
"[email protected]"
] | |
fde9051a268cde6daf399cea7a982312c8d7b75c
|
d109f8d0597c20ad69fe2d07809bdf74cf942339
|
/antinex_core/send_results_to_broker.py
|
522f209c69f55faf51674f6a18575a0ebbefb4a1
|
[
"Apache-2.0"
] |
permissive
|
eos21/antinex-core
|
f565b472ad09a24e267b2ce94521045ca51b7ccf
|
6d179f84300a642867997b55b1f7c5a1b4f8cfa0
|
refs/heads/master
| 2020-04-17T18:13:02.218873 | 2018-12-07T20:06:17 | 2018-12-07T20:06:17 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,860 |
py
|
import json
import pandas as pd
from celery import Celery
from spylunking.log.setup_logging import build_colorized_logger
from antinex_utils.consts import SUCCESS
from antinex_utils.consts import ERROR
from antinex_utils.consts import FAILED
log = build_colorized_logger(
name='send_results')
def send_results_to_broker(
loc,
final_results):
"""send_results_to_broker
:param loc: api-generated dictionary for where to send the results
:param final_results: prediction results from the worker
"""
log.info((
'sending back={}').format(
loc))
status = ERROR
org_model = None
org_rounded = None
org_train_scaler = None
org_test_scaler = None
org_train_scaler_dataset = None
org_test_scaler_dataset = None
if len(final_results) > 0 and len(final_results) > 0:
if final_results["data"]["sample_predictions"]:
final_results["data"]["sample_predictions"] = json.loads(
pd.Series(
final_results["data"]["sample_predictions"]).to_json(
orient="records"))
if final_results["data"]["rounded"]:
final_results["data"]["rounded"] = json.loads(
pd.Series(
final_results["data"]["rounded"]).to_json(
orient="records"))
final_results["data"].pop("predictions", None)
final_results["data"]["model_json"] = \
final_results["data"]["model"].to_json()
# remove values that cannot be serialized to json (for now)
org_model = final_results["data"].pop("model", None)
org_rounded = final_results["data"].pop("rounded_predictions", None)
org_train_scaler = final_results["data"].pop("scaler_train", None)
org_test_scaler = final_results["data"].pop("scaler_test", None)
org_train_scaler_dataset = final_results["data"].pop(
"scaled_train_dataset", None)
org_test_scaler_dataset = final_results["data"].pop(
"scaled_test_dataset", None)
source = loc["source"]
auth_url = loc["auth_url"]
ssl_options = loc["ssl_options"]
queue_name = loc["queue"]
task_name = loc["task_name"]
delivery_mode = loc["delivery_mode"]
manifest = loc["manifest"]
retries = loc.get(
"retries",
100000)
log.debug(("CORERES - sending response back to source={} "
"ssl={} queue={} task={}")
.format(
source,
ssl_options,
queue_name,
task_name))
send_data_to_rest_api = {
"results": final_results,
"manifest": manifest
}
broker_settings = {
"broker_url": auth_url,
"broker_use_ssl": ssl_options
}
try:
app = Celery("core-publish-results")
log.info(("creating celery app auth={} ssl={}")
.format(
auth_url,
ssl_options))
app.conf.update(
**broker_settings)
# Celery task routing and queue
log.info(("sending response queue={} task={} retries={}")
.format(
queue_name,
task_name,
retries))
task_id = app.send_task(
task_name,
args=[send_data_to_rest_api],
queue=queue_name,
delivery_mode=delivery_mode,
retries=retries)
log.info(("task.id={}")
.format(
task_id))
except Exception as e:
log.info(("Failed to publish to core req={} with ex={}")
.format(
str(final_results)[0:32],
e))
# try/ex
status = SUCCESS
log.info(("send_results_to_broker - done"))
else:
log.info(("CORERES - nothing to send back final_results={} ")
.format(
final_results))
status = FAILED
# publish to the core if enabled
# put this back into the results
if org_model:
final_results["data"]["model"] = org_model
if org_rounded:
final_results["data"]["rounded_predictions"] = org_rounded
# could be improved by checking assignment with a list
final_results["data"]["scaler_train"] = org_train_scaler
final_results["data"]["scaler_test"] = org_test_scaler
final_results["data"]["scaled_train_dataset"] = org_train_scaler_dataset
final_results["data"]["scaled_test_dataset"] = org_test_scaler_dataset
return status
# end of send_results_to_broker
|
[
"[email protected]"
] | |
66d3256171b55d2e6dedd66068e4d467e0c34f3c
|
2937d60b7f5259b4899ba5af08146bd874529a67
|
/67 Instnce variable.py
|
87ab62b129147867f1ad9f8bd4412eaf9e5157d1
|
[] |
no_license
|
gourav47/Let-us-learn-python
|
9a2302265cb6c47e74863359c79eef5a3078358a
|
b324f2487de65b2f073b54c8379c1b9e9aa36298
|
refs/heads/master
| 2021-06-27T03:33:27.483992 | 2021-01-07T12:26:16 | 2021-01-07T12:26:16 | 204,323,390 | 1 | 1 | null | 2020-07-19T14:25:12 | 2019-08-25T16:53:56 |
Python
|
UTF-8
|
Python
| false | false | 241 |
py
|
class Account:
## def __init__(self,a,b):
## self.accno=a
## self.balance=b
def f1(self,a,b):
self.accno=a
self.balance=b
acc1=Account()
acc1.accno=102
acc1.balance=6000
print(acc1.__dict__)
|
[
"[email protected]"
] | |
3e09f44e76f9a68074a58682c8b16158a488ad9f
|
18f8abb90efece37949f5b5758c7752b1602fb12
|
/py/django_tools/django-cms/cms/tests/placeholder.py
|
ed60ddcc850be233b62ac7d98434cf5826bc64ba
|
[
"BSD-3-Clause"
] |
permissive
|
marceltoben/evandrix.github.com
|
caa7d4c2ef84ba8c5a9a6ace2126e8fd6db1a516
|
abc3fbfb34f791f84e9a9d4dc522966421778ab2
|
refs/heads/master
| 2021-08-02T06:18:12.953567 | 2011-08-23T16:49:33 | 2011-08-23T16:49:33 | 2,267,457 | 3 | 5 | null | 2021-07-28T11:39:25 | 2011-08-25T11:18:56 |
C
|
UTF-8
|
Python
| false | false | 22,168 |
py
|
# -*- coding: utf-8 -*-
from __future__ import with_statement
from cms.api import add_plugin
from cms.exceptions import DuplicatePlaceholderWarning
from cms.models.placeholdermodel import Placeholder
from cms.plugins.text.models import Text
from cms.plugin_rendering import render_placeholder
from cms.plugins.text.cms_plugins import TextPlugin
from cms.test_utils.fixtures.fakemlng import FakemlngFixtures
from cms.test_utils.testcases import CMSTestCase
from cms.test_utils.util.context_managers import (SettingsOverride,
UserLoginContext)
from cms.test_utils.util.mock import AttributeObject
from cms.utils.placeholder import PlaceholderNoAction, MLNGPlaceholderActions
from cms.utils.plugins import get_placeholders
from cms.api import create_page, add_plugin
from cms.models import Placeholder
from cms.plugins.text.cms_plugins import TextPlugin
from django.conf import settings
from django.contrib import admin
from django.contrib.auth.models import User, Permission
from django.core.urlresolvers import reverse
from django.http import HttpResponseForbidden, HttpResponse
from django.template import TemplateSyntaxError, Template
from django.template.context import Context, RequestContext
from project.fakemlng.models import Translations
from project.placeholderapp.models import (Example1, Example2, Example3,
Example4, Example5)
class PlaceholderTestCase(CMSTestCase):
def setUp(self):
u = User(username="test", is_staff = True, is_active = True, is_superuser = True)
u.set_password("test")
u.save()
self._login_context = self.login_user_context(u)
self._login_context.__enter__()
def tearDown(self):
self._login_context.__exit__(None, None, None)
def test_placeholder_scanning_extend(self):
placeholders = get_placeholders('placeholder_tests/test_one.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'three']))
def test_placeholder_scanning_include(self):
placeholders = get_placeholders('placeholder_tests/test_two.html')
self.assertEqual(sorted(placeholders), sorted([u'child', u'three']))
def test_placeholder_scanning_double_extend(self):
placeholders = get_placeholders('placeholder_tests/test_three.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'new_three']))
def test_placeholder_scanning_complex(self):
placeholders = get_placeholders('placeholder_tests/test_four.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'child', u'four']))
def test_placeholder_scanning_super(self):
placeholders = get_placeholders('placeholder_tests/test_five.html')
self.assertEqual(sorted(placeholders), sorted([u'one', u'extra_one', u'two', u'three']))
def test_placeholder_scanning_nested(self):
placeholders = get_placeholders('placeholder_tests/test_six.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'new_two', u'new_three']))
def test_placeholder_scanning_duplicate(self):
placeholders = self.assertWarns(DuplicatePlaceholderWarning, "Duplicate placeholder found: `one`", get_placeholders, 'placeholder_tests/test_seven.html')
self.assertEqual(sorted(placeholders), sorted([u'one']))
def test_placeholder_scanning_extend_outside_block(self):
placeholders = get_placeholders('placeholder_tests/outside.html')
self.assertEqual(sorted(placeholders), sorted([u'new_one', u'two', u'base_outside']))
def test_fieldsets_requests(self):
response = self.client.get(reverse('admin:placeholderapp_example1_add'))
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse('admin:placeholderapp_example2_add'))
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse('admin:placeholderapp_example3_add'))
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse('admin:placeholderapp_example4_add'))
self.assertEqual(response.status_code, 200)
response = self.client.get(reverse('admin:placeholderapp_example5_add'))
self.assertEqual(response.status_code, 200)
def test_fieldsets(self):
request = self.get_request('/')
admins = [
(Example1, 2),
(Example2, 3),
(Example3, 3),
(Example4, 3),
(Example5, 4),
]
for model, fscount in admins:
ainstance = admin.site._registry[model]
fieldsets = ainstance.get_fieldsets(request)
form = ainstance.get_form(request, None)
phfields = ainstance._get_placeholder_fields(form)
self.assertEqual(len(fieldsets), fscount, (
"Asserting fieldset count for %s. Got %s instead of %s: %s. "
"Using %s." % (model.__name__, len(fieldsets),
fscount, fieldsets, ainstance.__class__.__name__)
))
for label, fieldset in fieldsets:
fields = list(fieldset['fields'])
for field in fields:
if field in phfields:
self.assertTrue(len(fields) == 1)
self.assertTrue('plugin-holder' in fieldset['classes'])
self.assertTrue('plugin-holder-nopage' in fieldset['classes'])
phfields.remove(field)
self.assertEqual(phfields, [])
def test_page_only_plugins(self):
ex = Example1(
char_1='one',
char_2='two',
char_3='tree',
char_4='four'
)
ex.save()
response = self.client.get(reverse('admin:placeholderapp_example1_change', args=(ex.pk,)))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'InheritPagePlaceholderPlugin')
def test_inter_placeholder_plugin_move(self):
ex = Example5(
char_1='one',
char_2='two',
char_3='tree',
char_4='four'
)
ex.save()
ph1 = ex.placeholder_1
ph2 = ex.placeholder_2
ph1_pl1 = add_plugin(ph1, TextPlugin, 'en', body='ph1 plugin1').cmsplugin_ptr
ph1_pl2 = add_plugin(ph1, TextPlugin, 'en', body='ph1 plugin2').cmsplugin_ptr
ph1_pl3 = add_plugin(ph1, TextPlugin, 'en', body='ph1 plugin3').cmsplugin_ptr
ph2_pl1 = add_plugin(ph2, TextPlugin, 'en', body='ph2 plugin1').cmsplugin_ptr
ph2_pl2 = add_plugin(ph2, TextPlugin, 'en', body='ph2 plugin2').cmsplugin_ptr
ph2_pl3 = add_plugin(ph2, TextPlugin, 'en', body='ph2 plugin3').cmsplugin_ptr
response = self.client.post(reverse('admin:placeholderapp_example5_move_plugin'), {
'placeholder': ph2.slot,
'placeholder_id': str(ph2.pk),
'plugin_id': str(ph1_pl2.pk),
'ids': "_".join([str(p.pk) for p in [ph2_pl1, ph1_pl2, ph2_pl2, ph2_pl3]])
})
self.assertEqual(response.status_code, 200)
self.assertEqual([ph1_pl1, ph1_pl3], list(ph1.cmsplugin_set.order_by('position')))
self.assertEqual([ph2_pl1, ph1_pl2, ph2_pl2, ph2_pl3], list(ph2.cmsplugin_set.order_by('position')))
def test_placeholder_scanning_fail(self):
self.assertRaises(TemplateSyntaxError, get_placeholders, 'placeholder_tests/test_eleven.html')
def test_placeholder_tag(self):
template = Template("{% load placeholder_tags %}{% render_placeholder placeholder %}")
ctx = Context()
self.assertEqual(template.render(ctx), "")
request = self.get_request('/')
rctx = RequestContext(request)
self.assertEqual(template.render(rctx), "")
placeholder = Placeholder.objects.create(slot="test")
rctx['placeholder'] = placeholder
self.assertEqual(template.render(rctx), "")
self.assertEqual(placeholder.cmsplugin_set.count(), 0)
add_plugin(placeholder, "TextPlugin", settings.LANGUAGES[0][0], body="test")
self.assertEqual(placeholder.cmsplugin_set.count(), 1)
rctx = RequestContext(request)
placeholder = self.reload(placeholder)
rctx['placeholder'] = placeholder
self.assertEqual(template.render(rctx).strip(), "test")
def test_placeholder_context_leaking(self):
TEST_CONF = {'test': {'extra_context': {'width': 10}}}
ph = Placeholder.objects.create(slot='test')
class NoPushPopContext(Context):
def push(self):
pass
pop = push
context = NoPushPopContext()
context['request'] = self.get_request()
with SettingsOverride(CMS_PLACEHOLDER_CONF=TEST_CONF):
render_placeholder(ph, context)
self.assertTrue('width' in context)
self.assertEqual(context['width'], 10)
ph.render(context, None)
self.assertTrue('width' in context)
self.assertEqual(context['width'], 10)
def test_placeholder_scanning_nested_super(self):
placeholders = get_placeholders('placeholder_tests/nested_super_level1.html')
self.assertEqual(sorted(placeholders), sorted([u'level1', u'level2', u'level3', u'level4']))
class PlaceholderActionTests(FakemlngFixtures, CMSTestCase):
def test_placeholder_no_action(self):
actions = PlaceholderNoAction()
self.assertEqual(actions.get_copy_languages(), [])
self.assertFalse(actions.copy())
def test_mlng_placeholder_actions_get_copy_languages(self):
actions = MLNGPlaceholderActions()
fr = Translations.objects.get(language_code='fr')
de = Translations.objects.get(language_code='de')
en = Translations.objects.get(language_code='en')
fieldname = 'placeholder'
fr_copy_languages = actions.get_copy_languages(
fr.placeholder, Translations, fieldname
)
de_copy_languages = actions.get_copy_languages(
de.placeholder, Translations, fieldname
)
en_copy_languages = actions.get_copy_languages(
en.placeholder, Translations, fieldname
)
EN = ('en', 'English')
FR = ('fr', 'French')
self.assertEqual(fr_copy_languages, [EN])
self.assertEqual(de_copy_languages, [EN, FR])
self.assertEqual(en_copy_languages, [FR])
def test_mlng_placeholder_actions_copy(self):
actions = MLNGPlaceholderActions()
fr = Translations.objects.get(language_code='fr')
de = Translations.objects.get(language_code='de')
self.assertEqual(fr.placeholder.cmsplugin_set.count(), 1)
self.assertEqual(de.placeholder.cmsplugin_set.count(), 0)
new_plugins = actions.copy(de.placeholder, 'fr', 'placeholder', Translations, 'de')
self.assertEqual(len(new_plugins), 1)
de = self.reload(de)
fr = self.reload(fr)
self.assertEqual(fr.placeholder.cmsplugin_set.count(), 1)
self.assertEqual(de.placeholder.cmsplugin_set.count(), 1)
def test_mlng_placeholder_actions_empty_copy(self):
actions = MLNGPlaceholderActions()
fr = Translations.objects.get(language_code='fr')
de = Translations.objects.get(language_code='de')
self.assertEqual(fr.placeholder.cmsplugin_set.count(), 1)
self.assertEqual(de.placeholder.cmsplugin_set.count(), 0)
new_plugins = actions.copy(fr.placeholder, 'de', 'placeholder', Translations, 'fr')
self.assertEqual(len(new_plugins), 0)
de = self.reload(de)
fr = self.reload(fr)
self.assertEqual(fr.placeholder.cmsplugin_set.count(), 1)
self.assertEqual(de.placeholder.cmsplugin_set.count(), 0)
def test_mlng_placeholder_actions_no_placeholder(self):
actions = MLNGPlaceholderActions()
Translations.objects.filter(language_code='nl').update(placeholder=None)
de = Translations.objects.get(language_code='de')
nl = Translations.objects.get(language_code='nl')
self.assertEqual(nl.placeholder, None)
self.assertEqual(de.placeholder.cmsplugin_set.count(), 0)
okay = actions.copy(de.placeholder, 'nl', 'placeholder', Translations, 'de')
self.assertEqual(okay, False)
de = self.reload(de)
nl = self.reload(nl)
nl = Translations.objects.get(language_code='nl')
de = Translations.objects.get(language_code='de')
class PlaceholderModelTests(CMSTestCase):
def get_mock_user(self, superuser):
return AttributeObject(
is_superuser=superuser,
has_perm=lambda string: False,
)
def get_mock_request(self, superuser=True):
return AttributeObject(
superuser=superuser,
user=self.get_mock_user(superuser)
)
def test_check_placeholder_permissions_ok_for_superuser(self):
ph = Placeholder.objects.create(slot='test', default_width=300)
result = ph.has_change_permission(self.get_mock_request(True))
self.assertTrue(result)
def test_check_placeholder_permissions_nok_for_user(self):
ph = Placeholder.objects.create(slot='test', default_width=300)
result = ph.has_change_permission(self.get_mock_request(False))
self.assertFalse(result)
def test_check_unicode_rendering(self):
ph = Placeholder.objects.create(slot='test', default_width=300)
result = unicode(ph)
self.assertEqual(result,u'test')
def test_excercise_get_attached_model(self):
ph = Placeholder.objects.create(slot='test', default_width=300)
result = ph._get_attached_model()
self.assertEqual(result, None) # Simple PH - no model
def test_excercise_get_attached_field_name(self):
ph = Placeholder.objects.create(slot='test', default_width=300)
result = ph._get_attached_field_name()
self.assertEqual(result, None) # Simple PH - no field name
def test_excercise_get_attached_models_notplugins(self):
ex = Example1(
char_1='one',
char_2='two',
char_3='tree',
char_4='four'
)
ex.save()
ph = ex.placeholder
result = list(ph._get_attached_models())
self.assertEqual(result, [Example1]) # Simple PH - Example1 model
add_plugin(ph, TextPlugin, 'en', body='en body')
result = list(ph._get_attached_models())
self.assertEqual(result, [Example1]) # Simple PH still one Example1 model
def test_excercise_get_attached_fields_notplugins(self):
ex = Example1(
char_1='one',
char_2='two',
char_3='tree',
char_4='four',
)
ex.save()
ph = ex.placeholder
result = [f.name for f in list(ph._get_attached_fields())]
self.assertEqual(result, ['placeholder']) # Simple PH - placeholder field name
add_plugin(ph, TextPlugin, 'en', body='en body')
result = [f.name for f in list(ph._get_attached_fields())]
self.assertEqual(result, ['placeholder']) # Simple PH - still one placeholder field name
class PlaceholderAdminTest(CMSTestCase):
placeholderconf = {'test': {
'limits': {
'global': 2,
'TextPlugin': 1,
}
}
}
def get_placeholder(self):
return Placeholder.objects.create(slot='test')
def get_admin(self):
admin.autodiscover()
return admin.site._registry[Example1]
def get_post_request(self, data):
request = self.get_request()
request.POST._mutable = True
request.POST.update(data)
request.POST._mutable = False
request.method = 'POST'
request.environ['METHOD'] = 'POST'
return request
def test_global_limit(self):
placeholder = self.get_placeholder()
admin = self.get_admin()
data = {
'plugin_type': 'LinkPlugin',
'placeholder': placeholder.pk,
'language': 'en',
}
superuser = self.get_superuser()
with UserLoginContext(self, superuser):
with SettingsOverride(CMS_PLACEHOLDER_CONF=self.placeholderconf):
request = self.get_post_request(data)
response = admin.add_plugin(request) # first
self.assertEqual(response.status_code, 200)
response = admin.add_plugin(request) # second
self.assertEqual(response.status_code, 200)
response = admin.add_plugin(request) # third
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, "This placeholder already has the maximum number of plugins.")
def test_type_limit(self):
placeholder = self.get_placeholder()
admin = self.get_admin()
data = {
'plugin_type': 'TextPlugin',
'placeholder': placeholder.pk,
'language': 'en',
}
superuser = self.get_superuser()
with UserLoginContext(self, superuser):
with SettingsOverride(CMS_PLACEHOLDER_CONF=self.placeholderconf):
request = self.get_post_request(data)
response = admin.add_plugin(request) # first
self.assertEqual(response.status_code, 200)
response = admin.add_plugin(request) # second
self.assertEqual(response.status_code, 400)
self.assertEqual(response.content, "This placeholder already has the maximum number (1) of TextPlugin plugins.")
class PlaceholderPluginPermissionTests(PlaceholderAdminTest):
def _testuser(self):
u = User(username="test", is_staff = True, is_active = True, is_superuser = False)
u.set_password("test")
u.save()
return u
def _create_example(self):
ex = Example1(
char_1='one',
char_2='two',
char_3='tree',
char_4='four'
)
ex.save()
self._placeholder = ex.placeholder
def _create_plugin(self):
self._plugin = add_plugin(self._placeholder, 'TextPlugin', 'en')
def _give_permission(self, user, model, permission_type, save=True):
codename = '%s_%s' % (permission_type, model._meta.object_name.lower())
user.user_permissions.add(Permission.objects.get(codename=codename))
def _delete_permission(self, user, model, permission_type, save=True):
codename = '%s_%s' % (permission_type, model._meta.object_name.lower())
user.user_permissions.remove(Permission.objects.get(codename=codename))
def _post_request(self, user):
data = {
'plugin_type': 'TextPlugin',
'placeholder': self._placeholder.pk,
'language': 'en',
}
request = self.get_post_request(data)
request.user = self.reload(user)
return request
def test_plugin_add_requires_permissions(self):
"""User wants to add a plugin to the example app placeholder but has no permissions"""
self._create_example()
normal_guy = self._testuser()
admin = self.get_admin()
request = self._post_request(normal_guy)
response = admin.add_plugin(request)
self.assertEqual(response.status_code, HttpResponseForbidden.status_code)
# The user gets the permission only for the plugin
self._give_permission(normal_guy, Text, 'add')
request = self._post_request(normal_guy)
response = admin.add_plugin(request)
self.assertEqual(response.status_code, HttpResponseForbidden.status_code)
# the user gets the permission only for the app
self._delete_permission(normal_guy, Text, 'add')
self._give_permission(normal_guy, Example1, 'add')
request = self._post_request(normal_guy)
response = admin.add_plugin(request)
self.assertEqual(response.status_code, HttpResponseForbidden.status_code)
# user gets permissions for the plugin and the app
self._give_permission(normal_guy, Text, 'add')
request = self._post_request(normal_guy)
response = admin.add_plugin(request)
self.assertEqual(response.status_code, HttpResponse.status_code)
def test_plugin_edit_requires_permissions(self):
"""User wants to edi a plugin to the example app placeholder but has no permissions"""
self._create_example()
self._create_plugin()
normal_guy = self._testuser()
admin = self.get_admin()
request = self._post_request(normal_guy)
response = admin.edit_plugin(request, self._plugin.id)
self.assertEqual(response.status_code, HttpResponseForbidden.status_code)
# The user gets the permission only for the plugin
self._give_permission(normal_guy, Text, 'change')
request = self._post_request(normal_guy)
response = admin.edit_plugin(request, self._plugin.id)
self.assertEqual(response.status_code, HttpResponseForbidden.status_code)
# the user gets the permission only for the app
self._delete_permission(normal_guy, Text, 'change')
self._give_permission(normal_guy, Example1, 'change')
request = self._post_request(normal_guy)
response = admin.edit_plugin(request, self._plugin.id)
self.assertEqual(response.status_code, HttpResponseForbidden.status_code)
# user gets permissions for the plugin and the app
self._give_permission(normal_guy, Text, 'change')
request = self._post_request(normal_guy)
response = admin.edit_plugin(request, self._plugin.id)
# It looks like it breaks here because of a missing csrf token in the request
# I have no idea how to fix this
self.assertEqual(response.status_code, HttpResponse.status_code)
|
[
"[email protected]"
] | |
3865fb102c37c18c4c7802879d66bab9f72e9e9a
|
c60d956f0def89f2afdc014fce2218deef6096f0
|
/tools/aws/nuke_clusters.py
|
5d87866522ffe08ae7dff467cb0015f36b531904
|
[
"Apache-2.0"
] |
permissive
|
criteo-forks/marathon
|
9691af7936ac624c0f3d8b3e51e140b6ce9276ff
|
02137d3b4fd9a56fc56966312a300982fb690300
|
refs/heads/criteo/1.9.109
| 2023-03-15T23:12:32.960763 | 2023-03-10T15:39:00 | 2023-03-10T15:39:00 | 50,347,255 | 1 | 10 |
Apache-2.0
| 2023-09-12T22:55:56 | 2016-01-25T11:48:51 |
Scala
|
UTF-8
|
Python
| false | false | 1,282 |
py
|
#!/usr/bin/env python3
import boto3
import logging
from botocore.exceptions import ClientError
from logging import config
from logging import config
logging.config.fileConfig('logging.conf')
logger = logging.getLogger(__name__)
def delete_stacks():
logger.info('Deleting stacks..')
cloudformation = boto3.resource('cloudformation')
for stack in cloudformation.stacks.all():
stack.delete()
logger.info('Done.')
def delete_volumes():
logger.info('Delete volumes.')
ec2 = boto3.resource('ec2')
for volume in ec2.volumes.all():
try:
volume.delete()
except ClientError:
logger.exception('Could not delete volume %s', volume.id)
logger.info('Done.')
def delete_key_pairs():
logger.info('Delete key pairs.')
ec2 = boto3.resource('ec2')
for pair in ec2.key_pairs.all():
pair.delete()
logger.info('Done.')
def nuke_clusters():
delete_stacks()
delete_volumes()
delete_key_pairs()
if __name__ == "__main__":
confirmation = input('You are about to nuke all test clusters. Enter "I know what I\'m doing" to continue:')
if confirmation == 'I know what I\'m doing':
boto3.setup_default_session(region_name='us-west-2')
nuke_clusters()
|
[
"[email protected]"
] | |
918bd485c6049381bb9201f1b48e27856578b348
|
03e3138f99f275d15d41a5c5bfb212f85d64d02e
|
/source/res/scripts/client/gui/prb_control/formatters/invites.py
|
c62271d89af244c3b541ba1d42afa71d2a772a56
|
[] |
no_license
|
TrenSeP/WorldOfTanks-Decompiled
|
e428728e7901146d0b599d02c930d70532232a97
|
1faa748acec1b7e435b657fd054ecba23dd72778
|
refs/heads/1.4.1
| 2020-04-27T08:07:49.813023 | 2019-03-05T17:37:06 | 2019-03-05T17:37:06 | 174,159,837 | 1 | 0 | null | 2019-03-06T14:33:33 | 2019-03-06T14:24:36 |
Python
|
UTF-8
|
Python
| false | false | 8,575 |
py
|
# Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/client/gui/prb_control/formatters/invites.py
from constants import PREBATTLE_TYPE_NAMES, PREBATTLE_TYPE
from constants import QUEUE_TYPE_NAMES
from debug_utils import LOG_ERROR
from gui import makeHtmlString
from gui.Scaleform.locale.INVITES import INVITES as I18N_INVITES
from gui.prb_control.formatters import getPrebattleFullDescription
from gui.prb_control.formatters import getBattleSessionStartTimeString
from gui.prb_control import prbDispatcherProperty, prbAutoInvitesProperty, prbInvitesProperty
from gui.prb_control.settings import PRB_INVITE_STATE
from helpers import dependency
from helpers import i18n, html
from messenger.ext import passCensor
from skeletons.gui.lobby_context import ILobbyContext
def getPrbName(prbType, lowercase=False):
try:
prbName = PREBATTLE_TYPE_NAMES[prbType]
if lowercase:
prbName = prbName.lower()
except KeyError:
LOG_ERROR('Prebattle name not found', prbType)
prbName = 'N/A'
return prbName
def getPreQueueName(queueType, lowercase=False):
try:
queueName = QUEUE_TYPE_NAMES[queueType]
if lowercase:
queueName = queueName.lower()
except KeyError:
LOG_ERROR('PreQueue name not found', queueType)
queueName = 'N/A'
return queueName
def getPrbInviteStateName(state):
try:
stateName = PRB_INVITE_STATE.getKeyByValue(state)
except KeyError:
LOG_ERROR('State of prebattle invite not found', state)
stateName = 'N/A'
return stateName
@dependency.replace_none_kwargs(lobbyContext=ILobbyContext)
def getAcceptNotAllowedText(prbType, peripheryID, isInviteActive=True, isAlreadyJoined=False, lobbyContext=None):
key, kwargs = None, {}
if lobbyContext is not None:
isAnotherPeriphery = lobbyContext.isAnotherPeriphery(peripheryID)
else:
isAnotherPeriphery = False
if isInviteActive:
if isAlreadyJoined:
key = I18N_INVITES.invites_prebattle_alreadyjoined(getPrbName(prbType))
elif isAnotherPeriphery:
host = lobbyContext.getPeripheryName(peripheryID)
if host:
key = I18N_INVITES.invites_prebattle_acceptnotallowed('otherPeriphery')
kwargs = {'host': host}
else:
key = I18N_INVITES.invites_prebattle_acceptnotallowed('undefinedPeriphery')
if key:
text = i18n.makeString(key, **kwargs)
else:
text = ''
return text
@dependency.replace_none_kwargs(lobbyContext=ILobbyContext)
def getLeaveOrChangeText(funcState, invitePrbType, peripheryID, lobbyContext=None):
key, kwargs = None, {}
if lobbyContext is not None:
isAnotherPeriphery = lobbyContext.isAnotherPeriphery(peripheryID)
else:
isAnotherPeriphery = False
if funcState.doLeaveToAcceptInvite(invitePrbType):
if funcState.isInLegacy() or funcState.isInUnit():
entityName = getPrbName(funcState.entityTypeID)
elif funcState.isInPreQueue():
entityName = getPreQueueName(funcState.entityTypeID)
else:
LOG_ERROR('Can not resolve name of entity', funcState)
return ''
if isAnotherPeriphery:
key = I18N_INVITES.invites_note_change_and_leave(entityName)
kwargs = {'host': lobbyContext.getPeripheryName(peripheryID) or ''}
else:
key = I18N_INVITES.invites_note_leave(entityName)
elif isAnotherPeriphery:
key = I18N_INVITES.INVITES_NOTE_SERVER_CHANGE
kwargs = {'host': lobbyContext.getPeripheryName(peripheryID) or ''}
if key:
text = i18n.makeString(key, **kwargs)
else:
text = ''
return text
class InviteFormatter(object):
def getCtx(self, invite):
return {'sender': invite.senderFullName,
'receiver': invite.receiverFullName}
def getNote(self, invite):
pass
def getText(self, invite):
pass
class PrbInviteHtmlTextFormatter(InviteFormatter):
@prbDispatcherProperty
def prbDispatcher(self):
return None
@prbInvitesProperty
def prbInvites(self):
return None
def getIconName(self, invite):
return '{0:>s}InviteIcon'.format(getPrbName(invite.type, True))
def getTitle(self, invite):
if invite.senderFullName:
creatorName = makeHtmlString('html_templates:lobby/prebattle', 'inviteTitleCreatorName', ctx={'name': invite.senderFullName})
else:
creatorName = ''
return makeHtmlString('html_templates:lobby/prebattle', 'inviteTitle', ctx={'sender': creatorName}, sourceKey=getPrbName(invite.type))
def getComment(self, invite):
comment = passCensor(invite.comment)
return '' if not comment else makeHtmlString('html_templates:lobby/prebattle', 'inviteComment', {'comment': i18n.makeString(I18N_INVITES.INVITES_COMMENT, comment=html.escape(comment))})
def getNote(self, invite):
note = ''
if self.prbInvites.canAcceptInvite(invite):
if self.prbDispatcher:
note = getLeaveOrChangeText(self.prbDispatcher.getFunctionalState(), invite.type, invite.peripheryID)
else:
note = getAcceptNotAllowedText(invite.type, invite.peripheryID, invite.isActive(), invite.alreadyJoined)
if note:
note = makeHtmlString('html_templates:lobby/prebattle', 'inviteNote', {'note': note})
return note
def getState(self, invite):
key = I18N_INVITES.invites_state(getPrbInviteStateName(invite.getState()))
if not key:
return ''
state = i18n.makeString(key)
if state:
state = makeHtmlString('html_templates:lobby/prebattle', 'inviteState', {'state': state})
return state
def getText(self, invite):
result = []
text = self.getTitle(invite)
if text:
result.append(text)
text = self.getComment(invite)
if text:
result.append(text)
text = self.getNote(invite)
if text:
result.append(text)
text = self.getState(invite)
if text:
result.append(text)
return ''.join(result)
class PrbExternalBattleInviteHtmlTextFormatter(PrbInviteHtmlTextFormatter):
def getComment(self, invite):
comment = passCensor(invite.comment)
return '' if not comment else makeHtmlString('html_templates:lobby/prebattle', 'inviteComment', {'comment': html.escape(comment)})
def getPrbInviteHtmlFormatter(invite):
return PrbExternalBattleInviteHtmlTextFormatter() if invite.type == PREBATTLE_TYPE.EXTERNAL else PrbInviteHtmlTextFormatter()
class PrbInviteTitleFormatter(InviteFormatter):
def getText(self, _):
return i18n.makeString(I18N_INVITES.GUI_TITLES_INVITE)
class AutoInviteTextFormatter(InviteFormatter):
@prbDispatcherProperty
def prbDispatcher(self):
return None
@prbAutoInvitesProperty
def prbAutoInvites(self):
return None
def getNote(self, invite):
note = ''
if self.prbAutoInvites.canAcceptInvite(invite):
if self.prbAutoInvites:
note = getLeaveOrChangeText(self.prbDispatcher.getFunctionalState(), invite.prbType, invite.peripheryID)
else:
note = getAcceptNotAllowedText(invite.prbType, invite.peripheryID)
return note
def getText(self, invite):
return u'%s, %s' % (unicode(getPrebattleFullDescription(invite.description), 'utf-8'), unicode(getBattleSessionStartTimeString(invite.startTime), 'utf-8'))
class _PrbInviteInfo(object):
def as_dict(self):
raise NotImplementedError
class PrbAutoInviteInfo(_PrbInviteInfo):
def __init__(self, prbID):
self.__prbID = prbID
@prbAutoInvitesProperty
def prbAutoInvites(self):
return None
def getID(self):
return self.__prbID
def getTitle(self):
return PrbInviteTitleFormatter().getText(None)
def as_dict(self):
invite = self.prbAutoInvites.getInvite(self.__prbID)
canAccept = self.prbAutoInvites.canAcceptInvite(invite)
formatter = AutoInviteTextFormatter()
result = {'id': self.__prbID,
'text': formatter.getText(invite),
'comment': '',
'note': formatter.getNote(invite),
'canAccept': canAccept,
'canDecline': True,
'isAcceptVisible': True,
'isDeclineVisible': False}
return result
|
[
"[email protected]"
] | |
3f3be8c3c30ea19eda73851aba2fd32ab658a2b6
|
80b7f2a10506f70477d8720e229d7530da2eff5d
|
/ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocolstack/smdnsoptions_100fbc7c6145703d8f1639aa93d794c3.py
|
124140725e5532adad4991e8901e006f80681c36
|
[
"MIT"
] |
permissive
|
OpenIxia/ixnetwork_restpy
|
00fdc305901aa7e4b26e4000b133655e2d0e346a
|
c8ecc779421bffbc27c906c1ea51af3756d83398
|
refs/heads/master
| 2023-08-10T02:21:38.207252 | 2023-07-19T14:14:57 | 2023-07-19T14:14:57 | 174,170,555 | 26 | 16 |
MIT
| 2023-02-02T07:02:43 | 2019-03-06T15:27:20 |
Python
|
UTF-8
|
Python
| false | false | 9,202 |
py
|
# MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
if sys.version_info >= (3, 5):
from typing import List, Any, Union
class SmDnsOptions(Base):
"""Port group settings for SM_DNS Plugin
The SmDnsOptions class encapsulates a list of smDnsOptions resources that are managed by the user.
A list of resources can be retrieved from the server using the SmDnsOptions.find() method.
The list can be managed by using the SmDnsOptions.add() and SmDnsOptions.remove() methods.
"""
__slots__ = ()
_SDM_NAME = "smDnsOptions"
_SDM_ATT_MAP = {
"ObjectId": "objectId",
"PcpuLogLevel": "pcpuLogLevel",
}
_SDM_ENUM_MAP = {}
def __init__(self, parent, list_op=False):
super(SmDnsOptions, self).__init__(parent, list_op)
@property
def ObjectId(self):
# type: () -> str
"""
Returns
-------
- str: Unique identifier for this object
"""
return self._get_attribute(self._SDM_ATT_MAP["ObjectId"])
@property
def PcpuLogLevel(self):
# type: () -> str
"""
Returns
-------
- str: PCPU log level
"""
return self._get_attribute(self._SDM_ATT_MAP["PcpuLogLevel"])
@PcpuLogLevel.setter
def PcpuLogLevel(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["PcpuLogLevel"], value)
def update(self, PcpuLogLevel=None):
# type: (str) -> SmDnsOptions
"""Updates smDnsOptions resource on the server.
Args
----
- PcpuLogLevel (str): PCPU log level
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, PcpuLogLevel=None):
# type: (str) -> SmDnsOptions
"""Adds a new smDnsOptions resource on the server and adds it to the container.
Args
----
- PcpuLogLevel (str): PCPU log level
Returns
-------
- self: This instance with all currently retrieved smDnsOptions resources using find and the newly added smDnsOptions resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained smDnsOptions resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, ObjectId=None, PcpuLogLevel=None):
# type: (str, str) -> SmDnsOptions
"""Finds and retrieves smDnsOptions resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve smDnsOptions resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all smDnsOptions resources from the server.
Args
----
- ObjectId (str): Unique identifier for this object
- PcpuLogLevel (str): PCPU log level
Returns
-------
- self: This instance with matching smDnsOptions resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of smDnsOptions data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the smDnsOptions resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def CustomProtocolStack(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the customProtocolStack operation on the server.
Create custom protocol stack under /vport/protocolStack
customProtocolStack(Arg2=list, Arg3=enum, async_operation=bool)
---------------------------------------------------------------
- Arg2 (list(str)): List of plugin types to be added in the new custom stack
- Arg3 (str(kAppend | kMerge | kOverwrite)): Append, merge or overwrite existing protocol stack
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute(
"customProtocolStack", payload=payload, response_object=None
)
def DisableProtocolStack(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[str, None]
"""Executes the disableProtocolStack operation on the server.
Disable a protocol under protocolStack using the class name
disableProtocolStack(Arg2=string, async_operation=bool)string
-------------------------------------------------------------
- Arg2 (str): Protocol class name to disable
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns str: Status of the exec
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self.href}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute(
"disableProtocolStack", payload=payload, response_object=None
)
def EnableProtocolStack(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[str, None]
"""Executes the enableProtocolStack operation on the server.
Enable a protocol under protocolStack using the class name
enableProtocolStack(Arg2=string, async_operation=bool)string
------------------------------------------------------------
- Arg2 (str): Protocol class name to enable
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns str: Status of the exec
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self.href}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute(
"enableProtocolStack", payload=payload, response_object=None
)
|
[
"[email protected]"
] | |
4101f33c930b8f6e102d452e033ff5be1edb8a6f
|
a57fc2270ecd7738a0e569f2ec24069333dbb797
|
/createTable.py
|
a8a0062e8920e42c8622fc8189b6bf46cfdba540
|
[] |
no_license
|
1751660300/flaskProject_BYSJ
|
495a3a35d3a712f29b7cbf5c0e6ae79c7c3cd07f
|
20eb182020f345c8759a5932d0bbac72a69f38b5
|
refs/heads/master
| 2023-03-30T23:07:58.098939 | 2021-04-11T12:03:48 | 2021-04-11T12:03:48 | 347,541,601 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 302 |
py
|
# -*- coding:utf-8 -*-
from views import db
from views import init_app
from flask_migrate import Migrate, MigrateCommand
from flask_script import Manager
app = init_app()
db.create_all(app=app)
manager = Manager(app)
migrate = Migrate(app, db)
manager.add_command('db', MigrateCommand)
manager.run()
|
[
"[email protected]"
] | |
bef150c80b2f9c2c777b6f7c223061c7d747b293
|
b18f92a6a41a3d83e77848460d4a3f17e4fe677a
|
/algorithms/sorting/2_sort_vectors/solution/test_solution.py
|
88123a6d0bb419b9aa7bc5f05cf6421ae424e231
|
[] |
no_license
|
ByteAcademyCo/Exercises
|
de71b885a498ead8296e6107836f9a06ac399d4f
|
8332d0473ab35ee1d2975b384afda45c77ef943d
|
refs/heads/master
| 2022-05-25T23:01:59.466480 | 2022-03-14T13:12:10 | 2022-03-14T13:12:10 | 252,842,407 | 1 | 109 | null | 2022-03-14T13:12:11 | 2020-04-03T21:09:47 |
Python
|
UTF-8
|
Python
| false | false | 480 |
py
|
def test_solution():
import solution
vects1 = [((1, 3), (2, 6)), ((1, 5), (3, 4)), ((2, 6), (2, 9))]
ret1 = [((1, 5), (3, 4)), ((2, 6), (2, 9)), ((1, 3), (2, 6))]
vects2 = [((2, 6), (2, 9)), ((1, 3), (2, 6))] #sorted order
vects3 = [((1, 8), (2, 4))]
assert solution.sort_vectors(vects1) == ret1
assert solution.sort_vectors([]) == []
assert solution.sort_vectors(vects2) == vects2
assert solution.sort_vectors(vects3) == vects3
|
[
"[email protected]"
] | |
13ac2a126ba946d53dfa9976210f006f90500292
|
44bfa7c4c299558a0d8e948b117ac4a25c719b3b
|
/PaddleCV/tracking/pytracking/libs/tensorlist.py
|
24d6e3d2a9488e7c75286233e5c10929fa339f35
|
[
"Apache-2.0",
"GPL-3.0-only"
] |
permissive
|
littletomatodonkey/models
|
408b53a1948df677685b61e9be24e86a513ffc94
|
a60babdf382aba71fe447b3259441b4bed947414
|
refs/heads/develop
| 2022-10-21T18:46:19.606900 | 2020-04-27T14:36:08 | 2020-04-27T14:36:08 | 230,208,446 | 5 | 3 |
Apache-2.0
| 2021-12-20T12:23:37 | 2019-12-26T06:29:32 |
Python
|
UTF-8
|
Python
| false | false | 8,759 |
py
|
import functools
import numpy as np
from paddle.fluid import layers
from pytracking.libs.paddle_utils import clone as clone_fn
from pytracking.libs.paddle_utils import detach as detach_fn
from pytracking.libs.paddle_utils import PTensor
def matmul(a, b):
if isinstance(a, PTensor) or isinstance(b, PTensor):
return layers.matmul(a, b)
else:
return np.matmul(a, b)
class TensorList(list):
"""Container mainly used for lists of paddle tensors. Extends lists with paddle functionality."""
def __init__(self, list_of_tensors=list()):
super(TensorList, self).__init__(list_of_tensors)
def __getitem__(self, item):
if isinstance(item, int):
return super(TensorList, self).__getitem__(item)
elif isinstance(item, (tuple, list)):
return TensorList(
[super(TensorList, self).__getitem__(i) for i in item])
else:
return TensorList(super(TensorList, self).__getitem__(item))
def __add__(self, other):
if TensorList._iterable(other):
return TensorList([e1 + e2 for e1, e2 in zip(self, other)])
return TensorList([e + other for e in self])
def __radd__(self, other):
if TensorList._iterable(other):
return TensorList([e2 + e1 for e1, e2 in zip(self, other)])
return TensorList([other + e for e in self])
def __iadd__(self, other):
if TensorList._iterable(other):
for i, e2 in enumerate(other):
self[i] += e2
else:
for i in range(len(self)):
self[i] += other
return self
def __sub__(self, other):
if TensorList._iterable(other):
return TensorList([e1 - e2 for e1, e2 in zip(self, other)])
return TensorList([e - other for e in self])
def __rsub__(self, other):
if TensorList._iterable(other):
return TensorList([e2 - e1 for e1, e2 in zip(self, other)])
return TensorList([other - e for e in self])
def __isub__(self, other):
if TensorList._iterable(other):
for i, e2 in enumerate(other):
self[i] -= e2
else:
for i in range(len(self)):
self[i] -= other
return self
def __mul__(self, other):
if TensorList._iterable(other):
return TensorList([e1 * e2 for e1, e2 in zip(self, other)])
return TensorList([e * other for e in self])
def __rmul__(self, other):
if TensorList._iterable(other):
return TensorList([e2 * e1 for e1, e2 in zip(self, other)])
return TensorList([other * e for e in self])
def __imul__(self, other):
if TensorList._iterable(other):
for i, e2 in enumerate(other):
self[i] *= e2
else:
for i in range(len(self)):
self[i] *= other
return self
def __truediv__(self, other):
if TensorList._iterable(other):
return TensorList([e1 / e2 for e1, e2 in zip(self, other)])
return TensorList([e / other for e in self])
def __rtruediv__(self, other):
if TensorList._iterable(other):
return TensorList([e2 / e1 for e1, e2 in zip(self, other)])
return TensorList([other / e for e in self])
def __itruediv__(self, other):
if TensorList._iterable(other):
for i, e2 in enumerate(other):
self[i] /= e2
else:
for i in range(len(self)):
self[i] /= other
return self
def __matmul__(self, other):
if TensorList._iterable(other):
return TensorList([matmul(e1, e2) for e1, e2 in zip(self, other)])
return TensorList([matmul(e, other) for e in self])
def __rmatmul__(self, other):
if TensorList._iterable(other):
return TensorList([matmul(e2, e1) for e1, e2 in zip(self, other)])
return TensorList([matmul(other, e) for e in self])
def __imatmul__(self, other):
if TensorList._iterable(other):
for i, e2 in enumerate(other):
self[i] = matmul(self[i], e2)
else:
for i in range(len(self)):
self[i] = matmul(self[i], other)
return self
def __mod__(self, other):
if TensorList._iterable(other):
return TensorList([e1 % e2 for e1, e2 in zip(self, other)])
return TensorList([e % other for e in self])
def __rmod__(self, other):
if TensorList._iterable(other):
return TensorList([e2 % e1 for e1, e2 in zip(self, other)])
return TensorList([other % e for e in self])
def __pos__(self):
return TensorList([+e for e in self])
def __neg__(self):
return TensorList([-e for e in self])
def __le__(self, other):
if TensorList._iterable(other):
return TensorList([e1 <= e2 for e1, e2 in zip(self, other)])
return TensorList([e <= other for e in self])
def __ge__(self, other):
if TensorList._iterable(other):
return TensorList([e1 >= e2 for e1, e2 in zip(self, other)])
return TensorList([e >= other for e in self])
def view(self, *args):
def reshape(x):
if isinstance(x, PTensor):
return layers.reshape(x, args)
else:
return np.reshape(x, args)
return self.apply(reshape)
def clone(self):
def _clone(x):
if isinstance(x, PTensor):
return clone_fn(x)
else:
return x.copy()
return self.apply(_clone)
def detach(self):
return self.apply(detach_fn)
def sqrt(self):
def _sqrt(x):
if isinstance(x, PTensor):
return layers.sqrt(x)
else:
return np.sqrt(x)
return self.apply(_sqrt)
def abs(self):
def _abs(x):
if isinstance(x, PTensor):
return layers.abs(x)
else:
return np.abs(x)
return self.apply(_abs)
def size(self, axis=None):
def get_size(x):
if axis is None:
return x.shape
else:
return x.shape[axis]
return self.apply(get_size)
def concat(self, other):
return TensorList(super(TensorList, self).__add__(other))
def copy(self):
return TensorList(super(TensorList, self).copy())
def unroll(self):
if not any(isinstance(t, TensorList) for t in self):
return self
new_list = TensorList()
for t in self:
if isinstance(t, TensorList):
new_list.extend(t.unroll())
else:
new_list.append(t)
return new_list
def attribute(self, attr: str, *args):
return TensorList([getattr(e, attr, *args) for e in self])
def apply(self, fn):
return TensorList([fn(e) for e in self])
def __getattr__(self, name):
for e in self:
if not hasattr(e, name):
raise AttributeError('\'{}\' object has not attribute \'{}\''.
format(type(e), name))
def apply_attr(*args, **kwargs):
return TensorList([getattr(e, name)(*args, **kwargs) for e in self])
return apply_attr
@staticmethod
def _iterable(a):
return isinstance(a, (TensorList, list))
def tensor_operation(op):
def islist(a):
return isinstance(a, TensorList)
@functools.wraps(op)
def oplist(*args, **kwargs):
if len(args) == 0:
raise ValueError(
'Must be at least one argument without keyword (i.e. operand).')
if len(args) == 1:
if islist(args[0]):
return TensorList([op(a, **kwargs) for a in args[0]])
else:
# Multiple operands, assume max two
if islist(args[0]) and islist(args[1]):
return TensorList(
[op(a, b, *args[2:], **kwargs) for a, b in zip(*args[:2])])
if islist(args[0]):
return TensorList([op(a, *args[1:], **kwargs) for a in args[0]])
if islist(args[1]):
return TensorList(
[op(args[0], b, *args[2:], **kwargs) for b in args[1]])
# None of the operands are lists
return op(*args, **kwargs)
return oplist
|
[
"[email protected]"
] | |
2609cc5c2f2c7cc888d4a7c1af8ddfda9309ec6a
|
dd97a08267b2197a73c7b19f630ab2742ada837c
|
/python/ThirteenTeV/HSCPgluino_M_400_TuneCUETP8M1_13TeV_pythia8_cff.py
|
b26dbd233525388cb46d453f05695b303eb269d9
|
[] |
no_license
|
Mohammed2/genproductions
|
48dc93d15c070a02c9ce7c70060909587115e8f8
|
9e18bbd3be45e57b0ecaf3cbea94c8f50df939fa
|
refs/heads/master
| 2020-06-16T16:59:50.959353 | 2017-06-06T16:43:27 | 2017-06-06T16:43:27 | 92,869,604 | 1 | 0 | null | 2017-05-30T19:40:46 | 2017-05-30T19:40:46 | null |
UTF-8
|
Python
| false | false | 2,892 |
py
|
FLAVOR = 'gluino'
COM_ENERGY = 13000. # GeV
MASS_POINT = 400 # GeV
PROCESS_FILE = 'SimG4Core/CustomPhysics/data/RhadronProcessList.txt'
PARTICLE_FILE = 'Configuration/Generator/data/particles_%s_%d_GeV.txt' % (FLAVOR, MASS_POINT)
SLHA_FILE ='Configuration/Generator/data/HSCP_%s_%d_SLHA.spc' % (FLAVOR, MASS_POINT)
PDT_FILE = 'Configuration/Generator/data/hscppythiapdt%s%d.tbl' % (FLAVOR, MASS_POINT)
USE_REGGE = False
import FWCore.ParameterSet.Config as cms
from Configuration.Generator.Pythia8CommonSettings_cfi import *
from Configuration.Generator.Pythia8CUEP8M1Settings_cfi import *
generator = cms.EDFilter("Pythia8GeneratorFilter",
pythiaPylistVerbosity = cms.untracked.int32(0),
filterEfficiency = cms.untracked.double(-1),
pythiaHepMCVerbosity = cms.untracked.bool(False),
comEnergy = cms.double(COM_ENERGY),
crossSection = cms.untracked.double(-1),
maxEventsToPrint = cms.untracked.int32(0),
SLHAFileForPythia8 = cms.string('%s' % SLHA_FILE),
PythiaParameters = cms.PSet(
pythia8CommonSettingsBlock,
pythia8CUEP8M1SettingsBlock,
processParameters = cms.vstring(
'SUSY:all = off',
'SUSY:gg2gluinogluino = on',
'SUSY:qqbar2gluinogluino = on',
'RHadrons:allow = on',
'RHadrons:allowDecay = off',
'RHadrons:setMasses = on',
'RHadrons:probGluinoball = 0.1',
),
parameterSets = cms.vstring(
'pythia8CommonSettings',
'pythia8CUEP8M1Settings',
'processParameters'
)
)
)
generator.hscpFlavor = cms.untracked.string(FLAVOR)
generator.massPoint = cms.untracked.int32(MASS_POINT)
generator.particleFile = cms.untracked.string(PARTICLE_FILE)
generator.slhaFile = cms.untracked.string(SLHA_FILE)
generator.processFile = cms.untracked.string(PROCESS_FILE)
generator.pdtFile = cms.FileInPath(PDT_FILE)
generator.useregge = cms.bool(USE_REGGE)
dirhadrongenfilter = cms.EDFilter("MCParticlePairFilter",
Status = cms.untracked.vint32(1, 1),
MinPt = cms.untracked.vdouble(0., 0.),
MinP = cms.untracked.vdouble(0., 0.),
MaxEta = cms.untracked.vdouble(100., 100.),
MinEta = cms.untracked.vdouble(-100, -100),
ParticleCharge = cms.untracked.int32(0),
ParticleID1 = cms.untracked.vint32(1000993,1009213,1009313,1009323,1009113,1009223,1009333,1091114,1092114,1092214,1092224,1093114,1093214,1093224,1093314,1093324,1093334),
ParticleID2 = cms.untracked.vint32(1000993,1009213,1009313,1009323,1009113,1009223,1009333,1091114,1092114,1092214,1092224,1093114,1093214,1093224,1093314,1093324,1093334)
)
ProductionFilterSequence = cms.Sequence(generator*dirhadrongenfilter)
|
[
"[email protected]"
] | |
7a8b705e9a22b4dc776e0782342c3bd796ecaff1
|
60811c2d3f81b77f3b870b1ec0ace4b8f1bad19d
|
/tuple/pgm4.py
|
ba4f148175fd96bbb07f798554aa75225e2566e9
|
[] |
no_license
|
APARNAS1998/luminardjango1
|
b85c249dacb4d5e819d338e19fd8af48a2ea393e
|
8bd91a38223910c14270e0e21c2d890dc16e2117
|
refs/heads/master
| 2023-08-09T12:49:49.201051 | 2021-09-15T08:39:25 | 2021-09-15T08:39:25 | 403,527,408 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 86 |
py
|
mytuple=3,4.6,'dog'
print(mytuple)
a,b,c =mytuple
print(a)#3
print(b)#4.6
print(c)#dog
|
[
"[email protected]"
] | |
3da0c80029968573c5c25265c28b5924d3c20582
|
edd0dc36452366de13a04ec5ced3c502af336b06
|
/zhe/brainvision.py
|
5dea8ce8fa90e105fe77d3264dae7647075d6dcb
|
[] |
no_license
|
expertanalytics/Elektrosjokk
|
2de0a4d504d8cac40d07ce72c081a3fc3a69aaf6
|
325e731c04950d0c19f47bab1f65d8a3eb3a79ec
|
refs/heads/master
| 2021-09-19T02:05:17.726598 | 2021-08-05T09:15:31 | 2021-08-05T09:15:31 | 79,484,610 | 0 | 0 | null | 2018-10-25T12:19:09 | 2017-01-19T18:58:28 |
Python
|
UTF-8
|
Python
| false | false | 5,465 |
py
|
# License: BSD 3-clause
# Author: Boris Reuderink
#very little modification from the original script
#the regular expression stim_code = int(re.match(r'S\s*(\d+)', mdesc).group(1))
#now it matches correctly also markers without spaces ex: "S102"
# Adapted for python3 by Jakob Schreiner
import logging
import re
import itertools
import numpy as np
from pathlib import Path
from collections import namedtuple
from configparser import SafeConfigParser
from typing import (
Iterable,
List,
Tuple,
Optional
)
# TODO:
# - add encoding of commas (\1)
# - verify units for resolution in UTF8
log = logging.getLogger('__main__')
HeaderData = namedtuple(
"DataSpec", (
"sample_rate",
"channel_label_list",
"channel_resolution_list",
"eeg_file_name",
"marker_file_name"
))
MarkerData = namedtuple(
"MarkerData", (
"name",
"type",
"description",
"duration",
"channels"
))
def read_header(file_name: Path) -> HeaderData:
"""Read the data header.
The sample_rate, channel labels, channel resolution, eeg and marker file_names are
returned as a namedtuple.
Arguments:
file_name: Path to header file.
"""
with open(file_name) as file_handle:
# setup config reader
header = "Brain Vision Data Exchange Header File Version 1.0"
assert file_handle.readline().strip() == header
# Break when reachibng [Comment]
lines = itertools.takewhile( lambda x: '[Comment]' not in x, file_handle.readlines())
cfg = SafeConfigParser()
cfg.readfp(lines)
# Samplling interval is given in micro seconds. Convert to seconds -- Hence 1e6
sample_rate = 1e6/cfg.getfloat('Common Infos', 'SamplingInterval')
num_channels = cfg.getint('Common Infos', 'NumberOfChannels')
log.info(f"Found sample rate of {sample_rate:.2f} Hz, {num_channels:d} channels.")
# check binary format
assert cfg.get('Common Infos', 'DataOrientation') == 'MULTIPLEXED'
assert cfg.get('Common Infos', 'DataFormat') == 'BINARY'
assert cfg.get('Binary Infos', 'BinaryFormat') == 'INT_16'
# load channel labels
channel_label_list = ["UNKNOWN"]*num_channels
channel_resolution_list = [np.nan]*num_channels
for chan, props in cfg.items('Channel Infos'):
n = int(re.match(r'ch(\d+)', chan).group(1))
name, refname, resolution, unit = props.split(',')[:4]
del refname
channel_label_list[n - 1] = name
channel_resolution_list[n - 1] = float(resolution)
# locate EEG and marker files
eeg_file_name = cfg.get('Common Infos', 'DataFile')
marker_file_name = cfg.get('Common Infos', 'MarkerFile')
return HeaderData(
sample_rate=sample_rate,
channel_label_list=channel_label_list,
channel_resolution_list=channel_resolution_list,
eeg_file_name=eeg_file_name,
marker_file_name=marker_file_name
)
def read_eeg(file_name: Path, channel_resolution: Iterable[float]) -> np.ndarray:
"""Read the binary data file.
The eeg file must follow the specifications from the header (.vhdr).
Arguments:
file_name: Name of binary data file
channel_resolution: The resolution of each channel
Returns:
The eeg channels scaled by their respective resolution
"""
_channel_resolution = np.asarray(channel_resolution, dtype="f8")
num_channels = _channel_resolution.size
with open(file_name, 'rb') as file_handle:
raw = file_handle.read()
size = len(raw)//2 # TODO: Why 2?
ints = np.ndarray(
shape=(num_channels, size//num_channels),
dtype='<i2',
order='F',
buffer=raw
)
return ints*_channel_resolution[:, None]
def read_markers(file_name: Path) -> List[MarkerData]:
"""Parse the marker header and return the each key-value pair.
Arguments:
file_name: Path to marker header (*.vmrk).
"""
with open(file_name) as file_handle:
header = "Brain Vision Data Exchange Marker File, Version 1.0"
assert file_handle.readline().strip() == header
cfg = SafeConfigParser()
cfg.readfp(file_handle)
events = []
for marker, info in cfg.items("Marker Infos"):
events.append(MarkerData(*info.split(",")[:5]))
return events
def read_brainvis_triplet(
header_file_name: str,
marker_file_name: Optional[str] = None,
eeg_file_name: Optional[str] = None
) -> Tuple[HeaderData, List[MarkerData], np.ndarray]:
""" Read BrainVision Recorder header file, locate and read the marker and EEG file.
Returns a header dictionary, a matrix with events and the raw EEG.
This is a convenience wrapper around `read_header`, `read_eeg` and `read_markers`.
"""
header_path = Path(header_file_name)
assert header_path.exists(), header_path
header_spec = read_header(header_path)
if marker_file_name is None:
marker_fname = header_path.parent / header_spec.marker_file_name
marks = read_markers(marker_fname)
if eeg_file_name is None:
eeg_file_name = header_path.parent / header_spec.eeg_file_name
X = read_eeg(eeg_file_name, header_spec.channel_resolution_list)
return header_spec, marks, X
|
[
"[email protected]"
] | |
a99bebca4185df88160a3362c3cb41f270204030
|
fb118c335e893e92b7e88ab9af3e9b355c9ed2e0
|
/brain/session/data/dataset_to_dict.py
|
c052f8f35e28a330c5c76f6b678a2866edc3bc4c
|
[
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
mtamillow/machine-learning
|
65b4a170ef7d171c01c05d8fb964f5c81f74ad5d
|
f21a7ecabdb0f168b40ba49730b59f908e8bdf72
|
refs/heads/master
| 2021-01-24T01:35:47.652869 | 2016-06-10T03:09:45 | 2016-06-10T03:09:45 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,950 |
py
|
#!/usr/bin/python
'''@save_entity
Note: the term 'dataset' used throughout various comments in this file,
synonymously implies the user supplied 'file upload(s)', and XML url
references.
'''
from brain.converter.convert_dataset import Convert_Dataset
def dataset_dictionary(id_entity, upload):
'''@dataset_dictionary
This method converts the supplied csv, or xml file upload(s) to a uniform
dict object.
@flag_append, when false, indicates the neccessary 'dataset' was not
properly defined, causing this method to 'return', which essentially
stops the execution of the current session.
@upload, uploaded dataset(s).
'''
flag_append = True
dataset = []
observation_labels = []
list_error = []
try:
# web-interface: define flag to convert to dataset to json
if upload['dataset']['file_upload']:
for val in upload['dataset']['file_upload']:
# reset file-pointer
val['file'].seek(0)
# csv to dict
if val['type'] == 'csv':
try:
# conversion
converter = Convert_Dataset(val['file'])
converted = converter.csv_to_dict()
count_features = converter.get_feature_count()
labels = converter.get_observation_labels()
# assign observation labels
observation_labels.append(labels)
# build new (relevant) dataset
dataset.append({
'id_entity': id_entity,
'premodel_dataset': converted,
'count_features': count_features
})
except Exception as error:
list_error.append(error)
flag_append = False
# json to dict
elif val['type'] == 'json':
try:
# conversion
converter = Convert_Dataset(val['file'])
converted = converter.json_to_dict()
count_features = converter.get_feature_count()
labels = converter.get_observation_labels()
# assign observation labels
observation_labels.append(labels)
# build new (relevant) dataset
dataset.append({
'id_entity': id_entity,
'premodel_dataset': converted,
'count_features': count_features
})
except Exception as error:
list_error.append(error)
flag_append = False
# xml to dict
elif val['type'] == 'xml':
try:
# conversion
converter = Convert_Dataset(val['file'])
converted = converter.xml_to_dict()
count_features = converter.get_feature_count()
labels = converter.get_observation_labels()
# assign observation labels
observation_labels.append(labels)
# build new (relevant) dataset
dataset.append({
'id_entity': id_entity,
'premodel_dataset': converted,
'count_features': count_features
})
except Exception as error:
list_error.append(error)
flag_append = False
if not flag_append:
return False
# programmatic-interface
elif upload['dataset']['json_string']:
# conversion
dataset_json = upload['dataset']['json_string']
converter = Convert_Dataset(dataset_json, True)
converted = converter.json_to_dict()
count_features = converter.get_feature_count()
observation_labels.append(dataset_json.keys())
# build dataset
dataset.append({
'id_entity': id_entity,
'premodel_dataset': converted,
'count_features': count_features
})
except Exception as error:
list_error.append(error)
print error
# return results
if list_error:
return {
'dataset': dataset,
'observation_labels': observation_labels,
'error': list_error
}
else:
return {
'dataset': dataset,
'observation_labels': observation_labels,
'error': None
}
|
[
"[email protected]"
] | |
6232db56563263b899bb280c8b3ac86e38fa9ecc
|
2ee8c8ccb844bea31c21d7493a1d3504925f2b4f
|
/uwnet/whitenoise.py
|
ea6d6b9eb154c036143798f994a678927a45772e
|
[
"MIT"
] |
permissive
|
mhdella/uwnet
|
11b3273677d2b32d63e17d9df116b2065fc626ae
|
24427547c485247e5019b9f8afa6843a6be603e4
|
refs/heads/master
| 2020-07-05T01:12:10.970962 | 2019-06-18T07:14:10 | 2019-06-18T07:14:10 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,238 |
py
|
import argparse
import logging
import dask.array as da
import numpy as np
import xarray as xr
import torch
from uwnet.thermo import compute_apparent_source
logger = logging.getLogger('whitenoise')
def get_error(model):
from src.data import open_data
data = open_data("training")
data = data.isel(time=slice(0, 100)).compute()
srcs = model.predict(data)
q2 = compute_apparent_source(data.QT, data.FQT * 86400)
q1 = compute_apparent_source(data.SLI, data.FSLI * 86400)
return xr.Dataset({
'QT': q2 - srcs.QT,
'SLI': q1 - srcs.SLI
}).dropna('time')
def cholesky_factor(C):
return np.stack(np.linalg.cholesky(C[i]) for i in range(C.shape[0]))
class WhiteNoiseModel(object):
"""Generate random noise with correct covariance structure
"""
def fit(self, error):
self.time_step_ = float(error.time[1] - error.time[0])
X = da.concatenate([error.QT.data, error.SLI.data], axis=1)
# compute covariance
nz = X.shape[0]
nx = X.shape[-1]
n = nx * nz
C = da.einsum('tzyx,tfyx->yzf', X, X) / n
C = C.compute()
# shape is
# (y, feat, feat)
self.Q_ = cholesky_factor(C) * np.sqrt(self.time_step_)
return self
def __call__(self, state):
"""
Parameters
----------
state : dict
Returns
-------
tend : dict
physical tendencies
"""
sli_key = "liquid_ice_static_energy"
qt_key = "total_water_mixing_ratio"
nx = state[qt_key].shape[-1]
dt = state['dt'] / 86400
logger.info(f"Computing white noise tendency with {dt} days")
y = self.Q_.shape[0]
z = self.Q_.shape[1]
# dividing by sqrt(dt) ensures that
# output * dt = Q sqrt{dt} N
N = np.random.randn(y, nx, z) * np.sqrt(dt)
W = np.einsum('yzf,yxf->zyx', self.Q_, N)
dqt, dsli = np.split(W, 2, 0)
# perform time step
qt = state[qt_key] + dqt
sli = state[sli_key] + dsli
return {qt_key: qt, sli_key: sli}
def fit(model):
model = torch.load(model)
error = get_error(model)
return WhiteNoiseModel().fit(error)
|
[
"[email protected]"
] | |
a706ffafd91a2edbbc8f35fd4a95d5ca568abbae
|
74983098c5de53007bde6052a631845c781b5ba8
|
/forrester/forrester10/forrester.py
|
109a0941d6cab03cc0fed480ab326aa1bcf164aa
|
[] |
no_license
|
numairmansur/Experiments
|
94ccdd60f4c2cf538fab41556ac72405656c9d77
|
592f39916461c7a9f7d400fa26f849043d1377ed
|
refs/heads/master
| 2021-04-29T12:39:16.845074 | 2017-02-15T07:36:47 | 2017-02-15T07:36:47 | 78,043,284 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 691 |
py
|
import numpy as np
import sys
import math
import time
import csv
from hpolib.benchmarks.synthetic_functions import Forrester
from time import gmtime, strftime
def main(job_id, params):
print '!!! Entered Main !!!'
print 'Anything printed here will end up in the output directory for job #:', str(job_id)
print params
f = Forrester()
res = f.objective_function([params['x']])
print res
#with open('/home/numair/Downloads/testEnvFortheCluster/Experiments/forrester/run1.csv','a') as csvfile:
with open('/home/mansurm/Experiments/forrester/run10.csv','a') as csvfile:
writer = csv.writer(csvfile, delimiter=',')
writer.writerow([res['main'][0]])
return res['main'][0]
|
[
"[email protected]"
] | |
21aa7fda2068d5bc4eb2aff0f958e5958d8d9296
|
6f57761c60582c546423a2a08c769f18236fd153
|
/benchmarks/test_correlation_MG3_SD.py
|
595a1199b439267520a573446a1e282cbfec6b4b
|
[
"BSD-3-Clause",
"LGPL-2.0-or-later",
"BSD-2-Clause"
] |
permissive
|
LSSTDESC/CCL
|
30644922fead0b017c1056e628bec23cf6bc4dfb
|
29d46978445678d86a4bee485cb29d30246ff64a
|
refs/heads/master
| 2023-09-03T17:03:17.012019 | 2023-08-08T11:01:33 | 2023-08-08T11:01:33 | 57,389,367 | 118 | 68 |
BSD-3-Clause
| 2023-08-30T13:25:25 | 2016-04-29T14:08:38 |
C
|
UTF-8
|
Python
| false | false | 7,232 |
py
|
import os
import numpy as np
import pyccl as ccl
from pyccl.modified_gravity import MuSigmaMG
from scipy.interpolate import interp1d
import pytest
@pytest.fixture(scope='module', params=['fftlog', 'bessel'])
def corr_method(request):
errfacs = {'fftlog': 0.21, 'bessel': 0.05}
return request.param, errfacs[request.param]
@pytest.fixture(scope='module')
def set_up(request):
dirdat = os.path.dirname(__file__) + '/data'
h0 = 0.67702026367187500
logA = 3.05 # log(10^10 A_s)
# scale dependent MG cosmology
ccl.gsl_params.LENSING_KERNEL_SPLINE_INTEGRATION = False
ccl.gsl_params.INTEGRATION_LIMBER_EPSREL = 2.5E-5
ccl.gsl_params.INTEGRATION_EPSREL = 2.5E-5
cosmo = ccl.Cosmology(Omega_c=0.12/h0**2, Omega_b=0.0221/h0**2, Omega_k=0,
h=h0, A_s=np.exp(logA)/10**10, n_s=0.96, Neff=3.046,
m_nu=0.0, w0=-1, wa=0, T_CMB=2.7255,
mg_parametrization=MuSigmaMG(
mu_0=0.1, sigma_0=0.1,
c1_mg=1.1, c2_mg=1.1, lambda_mg=1),
transfer_function='boltzmann_isitgr',
matter_power_spectrum='linear')
# Ell-dependent correction factors
# Set up array of ells
fl = {}
lmax = 10000
nls = (lmax - 400)//20+141
ells = np.zeros(nls)
ells[:101] = np.arange(101)
ells[101:121] = ells[100] + (np.arange(20) + 1) * 5
ells[121:141] = ells[120] + (np.arange(20) + 1) * 10
ells[141:] = ells[140] + (np.arange(nls - 141) + 1) * 20
fl['lmax'] = lmax
fl['ells'] = ells
# Load dNdz's
z1, pz1 = np.loadtxt(dirdat + "/bin1_histo.txt", unpack=True)
z2, pz2 = np.loadtxt(dirdat + "/bin2_histo.txt", unpack=True)
# Set up the linear galaxy bias as used in generating benchmarks
bz1 = 1.45*np.ones_like(pz1)
bz2 = 1.55*np.ones_like(pz2)
# Initialize tracers
trc = {}
trc['g1'] = ccl.NumberCountsTracer(cosmo, has_rsd=False,
dndz=(z1, pz1), bias=(z1, bz1))
trc['g2'] = ccl.NumberCountsTracer(cosmo, has_rsd=False,
dndz=(z2, pz2), bias=(z2, bz2))
trc['l1'] = ccl.WeakLensingTracer(cosmo, dndz=(z1, pz1))
trc['l2'] = ccl.WeakLensingTracer(cosmo, dndz=(z2, pz2))
# Read benchmarks
bms = {}
bms['dd_11'] = np.loadtxt(dirdat+'/wtheta_linear_predictionSD.dat')[0:15]
bms['dd_22'] = np.loadtxt(dirdat+'/wtheta_linear_predictionSD.dat')[15:30]
bms['dl_11'] = np.loadtxt(dirdat+'/gammat_linear_predictionSD.dat')[0:15]
bms['dl_12'] = np.loadtxt(dirdat+'/gammat_linear_predictionSD.dat')[15:30]
bms['dl_21'] = np.loadtxt(dirdat+'/gammat_linear_predictionSD.dat')[30:45]
bms['dl_22'] = np.loadtxt(dirdat+'/gammat_linear_predictionSD.dat')[45:60]
bms['ll_11_p'] = np.loadtxt(dirdat+'/Xip_linear_predictionSD.dat')[0:15]
bms['ll_12_p'] = np.loadtxt(dirdat+'/Xip_linear_predictionSD.dat')[15:30]
bms['ll_22_p'] = np.loadtxt(dirdat+'/Xip_linear_predictionSD.dat')[30:45]
bms['ll_11_m'] = np.loadtxt(dirdat+'/Xim_linear_predictionSD.dat')[0:15]
bms['ll_12_m'] = np.loadtxt(dirdat+'/Xim_linear_predictionSD.dat')[15:30]
bms['ll_22_m'] = np.loadtxt(dirdat+'/Xim_linear_predictionSD.dat')[30:45]
theta = np.loadtxt(dirdat+'/theta_corr_MG.dat')
bms['theta'] = theta
# Read error bars
ers = {}
d = np.loadtxt("benchmarks/data/sigma_clustering_Nbin5",
unpack=True)
ers['dd_11'] = interp1d(d[0], d[1],
fill_value=d[1][0],
bounds_error=False)(theta)
ers['dd_22'] = interp1d(d[0], d[2],
fill_value=d[2][0],
bounds_error=False)(theta)
d = np.loadtxt("benchmarks/data/sigma_ggl_Nbin5",
unpack=True)
ers['dl_12'] = interp1d(d[0], d[1],
fill_value=d[1][0],
bounds_error=False)(theta)
ers['dl_11'] = interp1d(d[0], d[2],
fill_value=d[2][0],
bounds_error=False)(theta)
ers['dl_22'] = interp1d(d[0], d[3],
fill_value=d[3][0],
bounds_error=False)(theta)
ers['dl_21'] = interp1d(d[0], d[4],
fill_value=d[4][0],
bounds_error=False)(theta)
d = np.loadtxt("benchmarks/data/sigma_xi+_Nbin5",
unpack=True)
# We cut the largest theta angle from xip because of issues
# with the benchmark.
ers['ll_11_p'] = interp1d(d[0], d[1],
fill_value=d[1][0],
bounds_error=False)(theta)
ers['ll_22_p'] = interp1d(d[0], d[2],
fill_value=d[2][0],
bounds_error=False)(theta)
ers['ll_12_p'] = interp1d(d[0], d[3],
fill_value=d[3][0],
bounds_error=False)(theta)
d = np.loadtxt("benchmarks/data/sigma_xi-_Nbin5",
unpack=True)
ers['ll_11_m'] = interp1d(d[0], d[1],
fill_value=d[1][0],
bounds_error=False)(theta)
ers['ll_22_m'] = interp1d(d[0], d[2],
fill_value=d[2][0],
bounds_error=False)(theta)
ers['ll_12_m'] = interp1d(d[0], d[3],
fill_value=d[3][0],
bounds_error=False)(theta)
ccl.gsl_params.reload()
return cosmo, trc, bms, ers, fl
@pytest.mark.parametrize("t1,t2,bm,er,kind,pref",
[('g1', 'g1', 'dd_11', 'dd_11', 'NN', 1),
('g2', 'g2', 'dd_22', 'dd_22', 'NN', 1),
('g1', 'l1', 'dl_11', 'dl_11', 'NG', 1),
('g1', 'l2', 'dl_12', 'dl_12', 'NG', 1),
('g2', 'l1', 'dl_21', 'dl_21', 'NG', 1),
('g2', 'l2', 'dl_22', 'dl_22', 'NG', 1),
('l1', 'l1', 'll_11_p', 'll_11_p', 'GG+', 1),
('l1', 'l2', 'll_12_p', 'll_12_p', 'GG+', 1),
('l2', 'l2', 'll_22_p', 'll_22_p', 'GG+', 1),
('l1', 'l1', 'll_11_m', 'll_11_m', 'GG-', 1),
('l1', 'l2', 'll_12_m', 'll_12_m', 'GG-', 1),
('l2', 'l2', 'll_22_m', 'll_22_m', 'GG-', 1)])
def test_xi(set_up, corr_method, t1, t2, bm, er, kind, pref):
cosmo, trcs, bms, ers, fls = set_up
method, errfac = corr_method
# Debugging - define the same cosmology but in GR
cl = ccl.angular_cl(cosmo, trcs[t1], trcs[t2], fls['ells'])
ell = np.arange(fls['lmax'])
cli = interp1d(fls['ells'], cl, kind='cubic')(ell)
# Our benchmarks have theta in arcmin
# but CCL requires it in degrees:
theta_deg = bms['theta'] / 60.
xi = ccl.correlation(cosmo, ell=ell, C_ell=cli, theta=theta_deg, type=kind,
method=method)
xi *= pref
print(xi)
assert np.all(np.fabs(xi - bms[bm]) < ers[er] * errfac)
|
[
"[email protected]"
] | |
a689274e77fed8cb988eeed9338bc8ba75695894
|
ba596595866771383f0d178421031fcf43fab16f
|
/plugin.video.pakindia/default.py
|
a1289475eb471d5e70aa8e45cd31cac529dd74f5
|
[] |
no_license
|
kevintone/tdbaddon
|
ab7e5aea48a369dffd8046c068d95ef26ddc7d9a
|
10cbfceb375e1f5af4ade29a62c433cfa36cd883
|
refs/heads/master
| 2021-01-16T22:51:36.801741 | 2016-06-21T16:27:51 | 2016-06-21T16:27:51 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,136 |
py
|
import urllib,urllib2,sys,re,xbmcplugin,xbmcgui,xbmcaddon,xbmc,os
import json
ADDON = xbmcaddon.Addon(id='plugin.video.pakindia')
datapath = xbmc . translatePath ( ADDON . getAddonInfo ( 'profile' ) )
pak = os. path . join ( datapath , "pak" )
def CATEGORIES():
aa=open(pak).read()
match=re.compile('<programCategory>(.+?)</programCategory.+?<categoryImage>(.+?)</categoryImage>',re.DOTALL).findall(aa)
uniques =[]
print match
for name , iconimage in match:
if name not in uniques:
uniques.append(name)
addDir(name,name,1,iconimage)
xbmcplugin.addSortMethod(int(sys.argv[1]), xbmcplugin.SORT_METHOD_VIDEO_TITLE)
setView('movies', 'default')
def GetContent(url):
aa=open(pak).read()
link=aa.split('<items>')
for p in link:
try:
name=re.compile('<programTitle>(.+?)</programTitle>').findall(p)[0]
URL=re.compile('<programURL>(.+?)</programURL>').findall(p)[0]
iconimage=re.compile('<programImage>(.+?)</programImage>').findall(p)[0]
if '<programCategory>'+url in p:
addDir(name,URL,200,iconimage)
except:pass
def OPEN_URL(url):
req = urllib2.Request(url)
req.add_header('User-Agent', 'Pak%20TV/1.0 CFNetwork/758.2.8 Darwin/15.662')
response = urllib2.urlopen(req)
link=response.read()
response.close()
return link
def auth():
import base64
import time
TIME = time.time()
second= str(TIME).split('.')[0]
first =int(second)+69296929
token=base64.b64encode('%s@2nd2@%s' % (str(first),second))
DATA_URL='https://app.dynns.com/keys/Pak.php?token='+token
request = urllib2.Request(DATA_URL)
base64string = 'ZGlsZGlsZGlsOlBAa2lzdEBu'
request.add_header("User-Agent",ADDON.getSetting('pakuser'))
request.add_header("Authorization", "Basic %s" % base64string)
return urllib2.urlopen(request).read()
def getletter():
import string
letters='abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
import random
return random.choice(string.letters)
def getlanguage():
language=['en', 'en_gb', 'en_us', 'en_ca', 'en_in', 'fr', 'fr_CA', 'es', 'es_MX', 'pt', 'pt_BR', 'it', 'de', 'zh_Hans', 'zh_Hant', 'zh_HK', 'nl', 'ja', 'ko', 'vi', 'ru', 'sv', 'da', 'fi', 'nb', 'tr', 'el', 'id', 'ms', 'th', 'hi', 'hu', 'pl', 'cs', 'sk', 'uk', 'hr', 'ca', 'ro', 'he', 'ar']
import random
return random.choice(language)
def getuser():
from random import randint
number='1.0.0.%s%s%s' % (getletter().upper(),randint(0,20),randint(0,20))
agent='AppleCoreMedia/1.0.0.13E238 (iPhone; U; CPU OS 9_3_1 like Mac OS X; en_gb)'
return agent #% (number,randint(0,20),randint(0,20),randint(0,20),getlanguage())
def PLAY_STREAM(name,url,iconimage):
url =url+auth()
liz = xbmcgui.ListItem(name, iconImage='DefaultVideo.png', thumbnailImage=iconimage)
liz.setInfo(type='Video', infoLabels={'Title':name})
liz.setProperty("IsPlayable","true")
liz.setPath(url+'|User-Agent='+getuser())
xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, liz)
def get_params():
param=[]
paramstring=sys.argv[2]
if len(paramstring)>=2:
params=sys.argv[2]
cleanedparams=params.replace('?','')
if (params[len(params)-1]=='/'):
params=params[0:len(params)-2]
pairsofparams=cleanedparams.split('&')
param={}
for i in range(len(pairsofparams)):
splitparams={}
splitparams=pairsofparams[i].split('=')
if (len(splitparams))==2:
param[splitparams[0]]=splitparams[1]
return param
def addDir(name,url,mode,iconimage):
u=sys.argv[0]+"?url="+urllib.quote_plus(url)+"&mode="+str(mode)+"&name="+urllib.quote_plus(name)+"&iconimage="+urllib.quote_plus(iconimage)
ok=True
liz=xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
liz.setInfo( type="Video", infoLabels={ "Title": name} )
menu = []
if mode ==200:
liz.setProperty("IsPlayable","true")
ok=xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=False)
else:
menu.append(('Play All Videos','XBMC.RunPlugin(%s?name=%s&mode=2001&iconimage=None&url=%s)'% (sys.argv[0],name,url)))
liz.addContextMenuItems(items=menu, replaceItems=False)
xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]),url=u,listitem=liz,isFolder=True)
return ok
def setView(content, viewType):
if content:
xbmcplugin.setContent(int(sys.argv[1]), content)
if ADDON.getSetting('auto-view') == 'true':#<<<----see here if auto-view is enabled(true)
xbmc.executebuiltin("Container.SetViewMode(%s)" % ADDON.getSetting(viewType) )#<<<-----then get the view type
params=get_params()
url=None
name=None
mode=None
iconimage=None
try:
url=urllib.unquote_plus(params["url"])
except:
pass
try:
name=urllib.unquote_plus(params["name"])
except:
pass
try:
iconimage=urllib.unquote_plus(params["iconimage"])
except:
pass
try:
mode=int(params["mode"])
except:
pass
print "Mode: "+str(mode)
print "URL: "+str(url)
print "Name: "+str(name)
print "IconImage: "+str(iconimage)
#these are the modes which tells the plugin where to go
if mode==None or url==None or len(url)<1:
print ""
CATEGORIES()
elif mode==1:
print ""+url
GetContent(url)
elif mode==200:
PLAY_STREAM(name,url,iconimage)
elif mode==2001:
playall(name,url)
xbmcplugin.endOfDirectory(int(sys.argv[1]))
|
[
"[email protected]"
] | |
38737caf77f0aa21cdc1b89494ab8bf166d318f0
|
277f976227c7590f6de5e7991d8fbed23b6646fe
|
/euler/cleaned_solutions/p88.py
|
4d72ae9ef50c34c20cca638763abee936759bad8
|
[] |
no_license
|
domspad/euler
|
ca19aae72165eb4d08104ef7a2757115cfdb9a18
|
a4901403e442b376c2edd987a1571ab962dadab2
|
refs/heads/master
| 2021-01-17T14:04:39.198658 | 2016-07-25T23:40:10 | 2016-07-25T23:40:10 | 54,561,463 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,387 |
py
|
#~90 mins before running + 60 mins running? need to run again...
from math import sqrt
from collections import defaultdict
def primes_lt3(N):
"""
Return all primes less than N > 0 int
"""
# test every number less than N/2
primes = [ i for i in xrange(2,N)
if not any( ( i % p == 0 for p in xrange(2,int(sqrt(i))+1) ) )]
return primes
PRIMES = primes_lt3(10000)
def prime_fact_upto(n):
"""
n > 1, return prime facts of all integers up to n
12 --> {2:2, 3:1}
"""
PRIME_FACTS = [defaultdict(int),defaultdict(int)]
for i in xrange(2,n):
if i in PRIMES:
new_pf = defaultdict(int)
new_pf[i] = 1
PRIME_FACTS.append(new_pf)
else:
for p in PRIMES:
if p > sqrt(i) + 1: # not one found!
new_pf = defaultdict(int)
new_pf[i] = 1
PRIME_FACTS.append(new_pf)
break
elif i % p == 0:
new_pf = PRIME_FACTS[i/p].copy()
new_pf[p] += 1
PRIME_FACTS.append(new_pf)
break
return PRIME_FACTS
# PRIME_FACTS = prime_fact_upto(30)
from itertools import combinations
from collections_extended import bag,frozenbag
from collections import defaultdict
N = 12500
#from http://stackoverflow.com/questions/19368375/set-partitions-in-python
def partition(collection):
if len(collection) == 1:
yield [ collection ]
return
first = collection[0]
for smaller in partition(collection[1:]):
# insert `first` in each of the subpartition's subsets
for n, subset in enumerate(smaller):
yield smaller[:n] + [[ first ] + subset] + smaller[n+1:]
# put `first` in its own subset
yield [ [ first ] ] + smaller
def trans_pf_dict_to_bag(pf_dict):
return sum(([k]*pf_dict[k] for k in pf_dict),[])
def calc_k(bag):
prod = reduce(lambda x,y: x*y, bag, 1)
return prod - sum(bag) + len(bag)
found_ks = defaultdict(int)
FOUND_KS = set()
for e,pf in enumerate(prime_fact_upto(N)):
if e < 2:
continue
if e % 100 == 0:
print e
partitions = partition(trans_pf_dict_to_bag(pf))
bags = set(frozenbag(map(lambda x: reduce(lambda y,z: y*z, x, 1), p)) for p in partitions)
for k in map(calc_k, bags):
FOUND_KS.add(k)
if found_ks[k] == 0:
found_ks[k] = e
print sum(set((found_ks[i] for i in xrange(2,12001))))
########################################################################
#dp method?
N = 100
K = N/2
# prod_partitions = [[(,)]*K for i in xrange(N)]
|
[
"[email protected]"
] | |
06bf555808d01c943cc92f5dce4c87da0ba3687e
|
43e900f11e2b230cdc0b2e48007d40294fefd87a
|
/Amazon/OnlineAssessment/min_priority.py
|
7a2fa3bd70e6d543510c6edb459380fb66dfbf84
|
[] |
no_license
|
DarkAlexWang/leetcode
|
02f2ed993688c34d3ce8f95d81b3e36a53ca002f
|
89142297559af20cf990a8e40975811b4be36955
|
refs/heads/master
| 2023-01-07T13:01:19.598427 | 2022-12-28T19:00:19 | 2022-12-28T19:00:19 | 232,729,581 | 3 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 639 |
py
|
import collections
class Solution:
def minPriority(self, arr):
n = len(arr)
dic = collections.defaultdict()
for val in arr:
dic[val] = dic.get(val, 0) + 1
dic = dict(sorted(dic.items()))
print(dic)
mp = {}
priority = 1
for val in dic.keys():
mp[val] = priority
priority += 1
for i in range(n):
arr[i] = mp[arr[i]]
return arr
if __name__ == "__main__":
solution = Solution()
ans1 = solution.minPriority([1, 4, 8, 4])
ans2 = solution.minPriority([2, 9, 3, 2, 3])
print(ans1)
print(ans2)
|
[
"[email protected]"
] | |
44b403f9672aec21e320efe0bce3f2ada25ce3ba
|
1eab574606dffb14a63195de994ee7c2355989b1
|
/ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/multicastleafrange_dgvyl211bhrpy2fzdexlywzsyw5nzq.py
|
f4b10235203bb7e6cbbb3ed37963471e5842f470
|
[
"MIT"
] |
permissive
|
steiler/ixnetwork_restpy
|
56b3f08726301e9938aaea26f6dcd20ebf53c806
|
dd7ec0d311b74cefb1fe310d57b5c8a65d6d4ff9
|
refs/heads/master
| 2020-09-04T12:10:18.387184 | 2019-11-05T11:29:43 | 2019-11-05T11:29:43 | 219,728,796 | 0 | 0 | null | 2019-11-05T11:28:29 | 2019-11-05T11:28:26 | null |
UTF-8
|
Python
| false | false | 9,934 |
py
|
# MIT LICENSE
#
# Copyright 1997 - 2019 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class MulticastLeafRange(Base):
"""Configures the multicast leaf range values.
The MulticastLeafRange class encapsulates a list of multicastLeafRange resources that is be managed by the user.
A list of resources can be retrieved from the server using the MulticastLeafRange.find() method.
The list can be managed by the user by using the MulticastLeafRange.add() and MulticastLeafRange.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'multicastLeafRange'
def __init__(self, parent):
super(MulticastLeafRange, self).__init__(parent)
@property
def GroupTrafficRange(self):
"""An instance of the GroupTrafficRange class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.grouptrafficrange_yw5nzs9ncm91cfryywzmawnsyw5nzq.GroupTrafficRange)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.grouptrafficrange_yw5nzs9ncm91cfryywzmawnsyw5nzq import GroupTrafficRange
return GroupTrafficRange(self)
@property
def OpaqueValueElement(self):
"""An instance of the OpaqueValueElement class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.opaquevalueelement_w5nzs9vcgfxdwvwywx1zuvszw1lbnq.OpaqueValueElement)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.opaquevalueelement_w5nzs9vcgfxdwvwywx1zuvszw1lbnq import OpaqueValueElement
return OpaqueValueElement(self)
@property
def ContinuousIncrOpaqueValuesAcrossRoot(self):
"""It signifies the continuous increment of opaque values across root.
Returns:
bool
"""
return self._get_attribute('continuousIncrOpaqueValuesAcrossRoot')
@ContinuousIncrOpaqueValuesAcrossRoot.setter
def ContinuousIncrOpaqueValuesAcrossRoot(self, value):
self._set_attribute('continuousIncrOpaqueValuesAcrossRoot', value)
@property
def Enabled(self):
"""If true, enables the protocol.
Returns:
bool
"""
return self._get_attribute('enabled')
@Enabled.setter
def Enabled(self, value):
self._set_attribute('enabled', value)
@property
def LabelValueStart(self):
"""The first label in the range of labels.
Returns:
number
"""
return self._get_attribute('labelValueStart')
@LabelValueStart.setter
def LabelValueStart(self, value):
self._set_attribute('labelValueStart', value)
@property
def LabelValueStep(self):
"""The label value increment step for more than 1 range.
Returns:
number
"""
return self._get_attribute('labelValueStep')
@LabelValueStep.setter
def LabelValueStep(self, value):
self._set_attribute('labelValueStep', value)
@property
def LspCountPerRoot(self):
"""This is to specify how many different LSPs are created per Root.
Returns:
number
"""
return self._get_attribute('lspCountPerRoot')
@LspCountPerRoot.setter
def LspCountPerRoot(self, value):
self._set_attribute('lspCountPerRoot', value)
@property
def LspType(self):
"""The type of multicast LSP.
Returns:
str(p2mp)
"""
return self._get_attribute('lspType')
@property
def RootAddrCount(self):
"""The root address count for this Multicast FEC range.
Returns:
number
"""
return self._get_attribute('rootAddrCount')
@RootAddrCount.setter
def RootAddrCount(self, value):
self._set_attribute('rootAddrCount', value)
@property
def RootAddrStep(self):
"""The Root Address increment step. This is applicable only if Root Address Count is greater than 1.
Returns:
str
"""
return self._get_attribute('rootAddrStep')
@RootAddrStep.setter
def RootAddrStep(self, value):
self._set_attribute('rootAddrStep', value)
@property
def RootAddress(self):
"""The root address of the multicast LSP.
Returns:
str
"""
return self._get_attribute('rootAddress')
@RootAddress.setter
def RootAddress(self, value):
self._set_attribute('rootAddress', value)
def update(self, ContinuousIncrOpaqueValuesAcrossRoot=None, Enabled=None, LabelValueStart=None, LabelValueStep=None, LspCountPerRoot=None, RootAddrCount=None, RootAddrStep=None, RootAddress=None):
"""Updates a child instance of multicastLeafRange on the server.
Args:
ContinuousIncrOpaqueValuesAcrossRoot (bool): It signifies the continuous increment of opaque values across root.
Enabled (bool): If true, enables the protocol.
LabelValueStart (number): The first label in the range of labels.
LabelValueStep (number): The label value increment step for more than 1 range.
LspCountPerRoot (number): This is to specify how many different LSPs are created per Root.
RootAddrCount (number): The root address count for this Multicast FEC range.
RootAddrStep (str): The Root Address increment step. This is applicable only if Root Address Count is greater than 1.
RootAddress (str): The root address of the multicast LSP.
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
self._update(locals())
def add(self, ContinuousIncrOpaqueValuesAcrossRoot=None, Enabled=None, LabelValueStart=None, LabelValueStep=None, LspCountPerRoot=None, RootAddrCount=None, RootAddrStep=None, RootAddress=None):
"""Adds a new multicastLeafRange node on the server and retrieves it in this instance.
Args:
ContinuousIncrOpaqueValuesAcrossRoot (bool): It signifies the continuous increment of opaque values across root.
Enabled (bool): If true, enables the protocol.
LabelValueStart (number): The first label in the range of labels.
LabelValueStep (number): The label value increment step for more than 1 range.
LspCountPerRoot (number): This is to specify how many different LSPs are created per Root.
RootAddrCount (number): The root address count for this Multicast FEC range.
RootAddrStep (str): The Root Address increment step. This is applicable only if Root Address Count is greater than 1.
RootAddress (str): The root address of the multicast LSP.
Returns:
self: This instance with all currently retrieved multicastLeafRange data using find and the newly added multicastLeafRange data available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._create(locals())
def remove(self):
"""Deletes all the multicastLeafRange data in this instance from server.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, ContinuousIncrOpaqueValuesAcrossRoot=None, Enabled=None, LabelValueStart=None, LabelValueStep=None, LspCountPerRoot=None, LspType=None, RootAddrCount=None, RootAddrStep=None, RootAddress=None):
"""Finds and retrieves multicastLeafRange data from the server.
All named parameters support regex and can be used to selectively retrieve multicastLeafRange data from the server.
By default the find method takes no parameters and will retrieve all multicastLeafRange data from the server.
Args:
ContinuousIncrOpaqueValuesAcrossRoot (bool): It signifies the continuous increment of opaque values across root.
Enabled (bool): If true, enables the protocol.
LabelValueStart (number): The first label in the range of labels.
LabelValueStep (number): The label value increment step for more than 1 range.
LspCountPerRoot (number): This is to specify how many different LSPs are created per Root.
LspType (str(p2mp)): The type of multicast LSP.
RootAddrCount (number): The root address count for this Multicast FEC range.
RootAddrStep (str): The Root Address increment step. This is applicable only if Root Address Count is greater than 1.
RootAddress (str): The root address of the multicast LSP.
Returns:
self: This instance with matching multicastLeafRange data retrieved from the server available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._select(locals())
def read(self, href):
"""Retrieves a single instance of multicastLeafRange data from the server.
Args:
href (str): An href to the instance to be retrieved
Returns:
self: This instance with the multicastLeafRange data from the server available through an iterator or index
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
|
[
"[email protected]"
] | |
05933d6e8971beb73e7caacabd14fe5c9e0081de
|
18fa0ad57cd9c26bc2622ead61b88c81e017e2e8
|
/CNN_DNN_tensorflow/tensor_model_boost/input_data.py
|
801fad1042f51ce2267f9539d59d5cf67b1cf64a
|
[] |
no_license
|
weihhh/ECG_pro
|
45da18fad4709009cd4766a870fac7c5d5514a92
|
1e013cbb7352ad896661412f036fd9c6242a6001
|
refs/heads/master
| 2021-05-04T13:52:17.259815 | 2018-07-20T02:39:16 | 2018-07-20T02:39:16 | 120,323,445 | 2 | 4 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,456 |
py
|
from sklearn.model_selection import train_test_split#训练数据、测试数据切分
from collections import Counter
import pickle
from sklearn import preprocessing
from sklearn.metrics import classification_report,accuracy_score #模型准确率,查准率,查全率,f1_score
from tensor_ecg_model import tensor_dataset
def read_data_sets():
#unpickle
with open(r'D:\aa_work\ECG\svm\data.pickle','rb') as f:
ECG_data,ECG_annotation=pickle.load(f)#array of array,(*, 300),(*,1) *样本数
print('原始数据规模: ',ECG_data.shape,'原始标签规模: ',ECG_annotation.shape)
annotation_counts=Counter(ECG_annotation.flatten())
print('类别概览: ',annotation_counts)
#归一化
ECG_data=preprocessing.scale(ECG_data)
x_train,x_validation,y_train,y_validation=train_test_split(ECG_data,ECG_annotation.flatten(),test_size=0.5)
print('训练集规模: {},测试集规模: {}'.format(x_train.shape,x_validation.shape))
train_dataset=tensor_dataset(x_train,y_train)
validation_dataset=tensor_dataset(x_validation,y_validation)
return train_dataset,validation_dataset
def main():
train_dataset,validation_dataset=read_data_sets()
print('训练集:{},{}'.format(train_dataset._images.shape,train_dataset._labels.shape))
print('验证集:{}, {}'.format(validation_dataset._images.shape,validation_dataset._labels.shape))
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
51a0ae98eefc137ce5989545f2a927f293bdb19b
|
bbb137a11a91a30701bab30708ef7ddec24b6a2a
|
/models2Project/models2App/apps.py
|
42b1120906f9536737ea68d84715a3b4c63603f5
|
[
"Apache-2.0"
] |
permissive
|
cs-fullstack-2019-spring/django-models-cw-DB225
|
f6462a46b033f456766a14e39498d740c6179c9d
|
04656f885adaa47e84c339827fcd4216c53b0a94
|
refs/heads/master
| 2020-04-24T04:41:58.104155 | 2019-02-22T02:58:27 | 2019-02-22T02:58:27 | 171,713,602 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 95 |
py
|
from django.apps import AppConfig
class Models2AppConfig(AppConfig):
name = 'models2App'
|
[
"[email protected]"
] | |
5b8e0722c567a62b5772d325d53476f5dc7e763f
|
bc441bb06b8948288f110af63feda4e798f30225
|
/ucpro_sdk/model/metadata_center/stream_metrics_schema_pb2.py
|
c00bdc5b8bb6d992da17621de3f2b1535aefb23a
|
[
"Apache-2.0"
] |
permissive
|
easyopsapis/easyops-api-python
|
23204f8846a332c30f5f3ff627bf220940137b6b
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
refs/heads/master
| 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null |
UTF-8
|
Python
| false | true | 5,147 |
py
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: stream_metrics_schema.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from ucpro_sdk.model.metadata_center import stream_metrics_schema_field_pb2 as ucpro__sdk_dot_model_dot_metadata__center_dot_stream__metrics__schema__field__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='stream_metrics_schema.proto',
package='metadata_center',
syntax='proto3',
serialized_options=_b('ZIgo.easyops.local/contracts/protorepo-models/easyops/model/metadata_center'),
serialized_pb=_b('\n\x1bstream_metrics_schema.proto\x12\x0fmetadata_center\x1a\x41ucpro_sdk/model/metadata_center/stream_metrics_schema_field.proto\"\xc8\x01\n\x13StreamMetricsSchema\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0b\n\x03org\x18\x02 \x01(\x05\x12\x0f\n\x07version\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12=\n\ndimensions\x18\x05 \x03(\x0b\x32).metadata_center.StreamMetricsSchemaField\x12:\n\x07metrics\x18\x06 \x03(\x0b\x32).metadata_center.StreamMetricsSchemaFieldBKZIgo.easyops.local/contracts/protorepo-models/easyops/model/metadata_centerb\x06proto3')
,
dependencies=[ucpro__sdk_dot_model_dot_metadata__center_dot_stream__metrics__schema__field__pb2.DESCRIPTOR,])
_STREAMMETRICSSCHEMA = _descriptor.Descriptor(
name='StreamMetricsSchema',
full_name='metadata_center.StreamMetricsSchema',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='metadata_center.StreamMetricsSchema.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='org', full_name='metadata_center.StreamMetricsSchema.org', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='version', full_name='metadata_center.StreamMetricsSchema.version', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='metadata_center.StreamMetricsSchema.name', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dimensions', full_name='metadata_center.StreamMetricsSchema.dimensions', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='metrics', full_name='metadata_center.StreamMetricsSchema.metrics', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=116,
serialized_end=316,
)
_STREAMMETRICSSCHEMA.fields_by_name['dimensions'].message_type = ucpro__sdk_dot_model_dot_metadata__center_dot_stream__metrics__schema__field__pb2._STREAMMETRICSSCHEMAFIELD
_STREAMMETRICSSCHEMA.fields_by_name['metrics'].message_type = ucpro__sdk_dot_model_dot_metadata__center_dot_stream__metrics__schema__field__pb2._STREAMMETRICSSCHEMAFIELD
DESCRIPTOR.message_types_by_name['StreamMetricsSchema'] = _STREAMMETRICSSCHEMA
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
StreamMetricsSchema = _reflection.GeneratedProtocolMessageType('StreamMetricsSchema', (_message.Message,), {
'DESCRIPTOR' : _STREAMMETRICSSCHEMA,
'__module__' : 'stream_metrics_schema_pb2'
# @@protoc_insertion_point(class_scope:metadata_center.StreamMetricsSchema)
})
_sym_db.RegisterMessage(StreamMetricsSchema)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
|
[
"[email protected]"
] | |
f02c685f1d84a5172d78157a3535cdc42dd04fb0
|
21e7542abf4dbb3fdfbe7b021860c70cdf8b5ee8
|
/dnacauldron/Filter.py
|
722d767b55440ec6d38933bd8af2117736905069
|
[
"MIT"
] |
permissive
|
Edinburgh-Genome-Foundry/DnaCauldron
|
8a4b4b71481a4b8f6c944b7a542f735a27040fc1
|
21d9f96926dd3f6920e8d9783127e7115948eae3
|
refs/heads/master
| 2023-08-31T14:25:16.897465 | 2023-07-28T17:01:39 | 2023-07-28T17:01:39 | 91,811,105 | 44 | 9 |
MIT
| 2023-07-18T22:12:47 | 2017-05-19T13:53:31 |
Python
|
UTF-8
|
Python
| false | false | 3,236 |
py
|
"""
Filters applied in methods such as ``mix.compute_circular_assemblies`` in order
to filter out circular assemblies which would have the wrong marker, or
restriction sites of the digestion enzyme (these are unstable)
"""
from Bio import Restriction
from Bio.Seq import Seq
from .biotools import record_is_linear
class NoRestrictionSiteFilter:
"""Filters to ignore fragments and final assemblies containing a given
restriction site
"""
def __init__(self, enzyme_name):
self.enzyme_name = enzyme_name
self.enzyme = Restriction.__dict__[enzyme_name]
def __call__(self, seqrecord):
is_linear = record_is_linear(seqrecord, default=True)
if is_linear:
# Shameful hack so that enzyme sites of enzymes cutting outside
# of the sequence (but have their site inside) will be detected
seq = "AAAAAA" + Seq(str(seqrecord.seq)) + "AAAAAA"
else:
seq = seqrecord.seq
return (self.enzyme.search(seq, linear=is_linear) == [])
def __repr__(self):
return ("NoRestriction(%s)" % self.enzyme_name)
def __str__(self):
return ("NoRestriction(%s)" % self.enzyme_name)
class NoPatternFilter:
"""Filters to ignore fragments and final assemblies whose DNA sequence
contains the given pattern.
The pattern must be an exact sequence of DNA.
"""
# TODO: regular expressions
def __init__(self, pattern):
self.pattern = pattern
def __call__(self, seqrecord):
return seqrecord.seq.find(self.pattern == -1)
class TextSearchFilter:
"""Filters to ignore assemblies containing or not containing some text.
The text will be looked for in every feature of the construct.
Constructs which do NOT have the text pattern in at least one feature will
be filtered out, unless ``is_forbidden`` is set to True, at which case
constructs which DO have the text pattern will be filtered out.
"""
def __init__(self, text, is_forbidden=False):
self.text = text
self.is_forbidden = is_forbidden
@staticmethod
def gather_all_feature_text(feature):
"""Return a single string of all text in the feature (+qualifiers)."""
return " ".join(
[feature.type] +
list(map(str, feature.qualifiers.keys())) +
list(map(str, feature.qualifiers.values()))
)
def gather_all_texts(self, seqrecord):
"""Return a single string of all texts in all record features."""
return " ".join([self.gather_all_feature_text(feature)
for feature in seqrecord.features] +
list(map(str, seqrecord.annotations)))
def __call__(self, seqrecord):
all_texts = self.gather_all_texts(seqrecord)
text_found = self.text in all_texts
if self.is_forbidden:
return not text_found
else:
return text_found
class FragmentSetContainsPartsFilter:
def __init__(self, part_names):
self.mandatory_part_names = set(part_names)
def __call__(self, fragments):
fragments = set([f.original_part.id for f in fragments])
return fragments >= self.mandatory_part_names
|
[
"[email protected]"
] | |
7189c697d9a266365e1e83739f9a2ba79c9ee6bd
|
272ae95716e530d538937ded59ec5b6e0b6d4db8
|
/섹션 2/2. K번째 수/AA.py
|
7e7fb5d0ad9b617148e4fe6e520d0a3c4d09457b
|
[] |
no_license
|
gogoheejun/algorithm
|
83a1cb30bff5c349f53be16764e517a46e99cf1c
|
39e999abf7170f434a7ac6e1f698f066e55aca03
|
refs/heads/main
| 2023-06-22T13:06:32.135917 | 2021-07-25T15:46:19 | 2021-07-25T15:46:19 | 383,379,782 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 237 |
py
|
import sys
# sys.stdin = open("input.txt", "rt")
T = int(input())
for t in range(T):
n, s, e, k = map(int, input().split())
a = list(map(int, input().split()))
a = a[s-1:e]
a.sort()
print("#%d %d" % (t+1, a[k-1]))
|
[
"[email protected]"
] | |
853bbbaec74dc86334d689deb94adad51a9e766f
|
4917066aaf02a14a5a00308eacb3cfaafa185f51
|
/production/arrplastic.py
|
8cc75a743984a555012d0d928aa014aafab6ad6f
|
[
"MIT"
] |
permissive
|
markusritschel/thesis
|
d7bf28a7807aa1e3d2ecd1d3d67f1866a8640e1b
|
effc196ce062223e47fcfdfef205cc5e1678e19e
|
refs/heads/main
| 2023-05-09T23:55:59.807218 | 2021-06-07T01:05:09 | 2021-06-07T01:05:09 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,010 |
py
|
import sys
import os
arg = int(sys.argv[1])
tauRef = [(i / 20) for i in range(100, 121)][arg] # 21 cases
import math
import planetengine
planetengine.set_global_anchor(os.path.basename(__file__)[:-3], '.')
from planetengine.systems import Arrhenius, Viscoplastic
from planetengine.initials import Sinusoidal
final = (planetengine.finals.Averages, {'tolerance': 1e-3, 'minlength': 50})
initial = Sinusoidal(freq = 1.)
for alpha in [10 ** (i / 2.) for i in range(8, 15)]:
system = Arrhenius(
f = 1.,
aspect = 1.,
res = min(64, 2 ** math.floor(math.log10(alpha))),
alpha = alpha,
observers = True,
temperatureField = initial,
innerMethod = 'lu',
courant = 1.,
)
system[:final]()
initial = system
system = Viscoplastic(
f = 1.,
aspect = 1.,
res = 64,
alpha = 10.**7.,
tauRef = tauRef,
observers = True,
temperatureField = initial,
innerMethod = 'lu',
courant = 1,
)
system[:final:100]()
|
[
"[email protected]"
] | |
809b05581a91d7f83e0e9b65055f4f625b740d4f
|
e84672db206696b8f48113a754d85e93f9d57859
|
/routers/about_us_router/schemas.py
|
2119773164f185aca9806a241343eebc3dd6d7e1
|
[] |
no_license
|
eliblurr/eav1
|
80a24a1f27bc7ee4d9444f87815b4a28bd4f8c51
|
31a4d44d80ce55ac0df1bffcb3ce4d2a8a906f93
|
refs/heads/main
| 2023-06-08T23:12:11.116518 | 2021-06-23T08:58:58 | 2021-06-23T08:58:58 | 311,332,506 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 597 |
py
|
from pydantic import BaseModel
from typing import Optional
import datetime
class AboutUsBase(BaseModel):
title: Optional[str]
metatitle: Optional[str]
description: str
index: int
status: Optional[bool]
class CreateAboutUs(AboutUsBase):
pass
class UpdateAboutUs(AboutUsBase):
title: Optional[str]
metatitle: Optional[str]
description: Optional[str]
index: Optional[int]
status: Optional[bool]
class AboutUs(AboutUsBase):
id: int
date_created: datetime.datetime
date_modified: datetime.datetime
class Config():
orm_mode = True
|
[
"[email protected]"
] | |
e5d339075d6c49b20d922195e8001b0778f3b4c1
|
c80ec1805a7e6cb1bd3f4b3e383ef4f4cf164765
|
/gen/filters/rules/media/_hasnotematchingsubstringof.py
|
ac8d27c0922b99dc89a90d0f24532a4324f44713
|
[] |
no_license
|
balrok/gramps_addon
|
57c8e976c47ea3c1d1298d3fd4406c13909ac933
|
0c79561bed7ff42c88714edbc85197fa9235e188
|
refs/heads/master
| 2020-04-16T03:58:27.818732 | 2015-02-01T14:17:44 | 2015-02-01T14:17:44 | 30,111,898 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,783 |
py
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# GRAMPS modules
#
#-------------------------------------------------------------------------
from .._hasnotesubstrbase import HasNoteSubstrBase
#-------------------------------------------------------------------------
# "Media having notes that contain a substring"
#-------------------------------------------------------------------------
class HasNoteMatchingSubstringOf(HasNoteSubstrBase):
"""Media having notes containing <substring>"""
name = _('Media objects having notes containing <substring>')
description = _("Matches media objects whose notes contain text "
"matching a substring")
|
[
"[email protected]"
] | |
a99300fc155bea9ad18198e891be1b2266256ab3
|
0a0536142014f759aa08974118133767c1eac6a8
|
/build/pal_msgs/pal_tablet_msgs/catkin_generated/pkg.develspace.context.pc.py
|
15f1eff9bef9f885ff57e8eb85ea040e8ac92198
|
[] |
no_license
|
EricSun787/SSJ_AGV
|
bb1851bf5102b6917888717fabf4f627f142e47c
|
01d387efa1b53bd1a909f30dc89b14c9364ffd72
|
refs/heads/master
| 2023-07-08T18:30:43.704457 | 2017-09-19T16:02:44 | 2017-09-19T16:02:44 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 456 |
py
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/ssj/ros_ws/devel/include".split(';') if "/home/ssj/ros_ws/devel/include" != "" else []
PROJECT_CATKIN_DEPENDS = "message_runtime;std_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "pal_tablet_msgs"
PROJECT_SPACE_DIR = "/home/ssj/ros_ws/devel"
PROJECT_VERSION = "0.11.3"
|
[
"[email protected]"
] | |
b5ca6c99afc6cf2abe20890d232f3b733644036b
|
ef54d37f8a3303013ca7469871a320d303957ed7
|
/robo4.2/fusion/tests/wpst_crm/developer/rbriggs/configure_dev_enclosure/efit_data_variables.py
|
47492636e4178e363877a3df17500428793df79a
|
[] |
no_license
|
richa92/Jenkin_Regression_Testing
|
d18badfcf16bda682dfe7bcbbd66f54a9a27a58d
|
24a74926170cbdfafa47e972644e2fe5b627d8ff
|
refs/heads/master
| 2020-07-12T10:01:59.099137 | 2019-08-27T12:14:53 | 2019-08-27T12:14:53 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 65,108 |
py
|
"""
!!!!!!!!!!!!!!!!!!!!!!
REPLACE Encl1 with Encl1 for real hardware!!!!
!!!!!!!!!!!!!!!!!!!!!!
"""
def make_range_list(vrange):
rlist = []
for x in xrange(vrange['start'], (vrange['end'] + 1)):
rlist.append(vrange['prefix'] + str(x) + vrange['suffix'])
return rlist
admin_credentials = {'userName': 'Administrator', 'password': 'Wpst@hpvse123#!'}
vcenter = {'server': '15.199.230.130', 'user': 'rbriggs', 'password': 'hpvse1'}
appliance = {'type': 'ApplianceNetworkConfiguration',
'applianceNetworks':
[{'device': 'eth0',
'macAddress': None,
'interfaceName': '15.199.x.x',
'activeNode': '1',
'unconfigure': False,
'ipv4Type': 'STATIC',
'ipv4Subnet': '255.255.240.0',
'ipv4Gateway': '15.199.224.1',
'ipv4NameServers': ['16.110.135.51', '16.110.135.52'],
'app1Ipv4Addr': '15.199.229.190',
'ipv6Type': 'UNCONFIGURE',
'hostname': 'efit-rb.usa.hp.com',
'confOneNode': True,
'domainName': 'usa.hp.com',
'aliasDisabled': True,
}],
}
timeandlocale = {'type': 'TimeAndLocale', 'dateTime': None, 'timezone': 'UTC', 'ntpServers': ['ntp.hp.net'], 'locale': 'en_US.UTF-8'}
ranges = [{'name': 'CI-FIT-01-MAC', 'type': 'Range', 'category': 'id-range-VMAC', 'rangeCategory': 'CUSTOM', 'startAddress': 'A2:11:11:00:00:00', 'endAddress': 'A2:11:11:0F:FF:FF', 'enabled': True},
{'name': 'CI-FIT-01-WWN', 'type': 'Range', 'category': 'id-range-VWWN', 'rangeCategory': 'CUSTOM', 'startAddress': '21:11:11:11:00:00:00:00', 'endAddress': '21:11:11:11:00:0F:FF:FF', 'enabled': True},
{'name': 'CI-FIT-01-SN', 'type': 'Range', 'category': 'id-range-VSN', 'rangeCategory': 'CUSTOM', 'startAddress': 'VCUBBB0000', 'endAddress': 'VCUBBB0ZZZ', 'enabled': True}]
users = [{'userName': 'Serveradmin', 'password': 'Serveradmin', 'fullName': 'Serveradmin', 'roles': ['Server administrator'], 'emailAddress': '[email protected]', 'officePhone': '970-555-0003', 'mobilePhone': '970-500-0003', 'type': 'UserAndRoles'},
{'userName': 'Networkadmin', 'password': 'Networkadmin', 'fullName': 'Networkadmin', 'roles': ['Network administrator'], 'emailAddress': '[email protected]', 'officePhone': '970-555-0003', 'mobilePhone': '970-500-0003', 'type': 'UserAndRoles'},
{'userName': 'Backupadmin', 'password': 'Backupadmin', 'fullName': 'Backupadmin', 'roles': ['Backup administrator'], 'emailAddress': '[email protected]', 'officePhone': '970-555-0003', 'mobilePhone': '970-500-0003', 'type': 'UserAndRoles'},
{'userName': 'Noprivledge', 'password': 'Noprivledge', 'fullName': 'Noprivledge', 'roles': ['Read only'], 'emailAddress': '[email protected]', 'officePhone': '970-555-0003', 'mobilePhone': '970-500-0003', 'type': 'UserAndRoles'}
]
licenses = [{'key': 'YCDE D9MA H9P9 8HUZ U7B5 HWW5 Y9JL KMPL MHND 7AJ9 DXAU 2CSM GHTG L762 LFH6 F4R4 KJVT D5KM EFVW DT5J 83HJ 8VC6 AK2P 3EW2 L9YE HUNJ TZZ7 MB5X 82Z5 WHEF GE4C LUE3 BKT8 WXDG NK6Y C4GA HZL4 XBE7 3VJ6 2MSU 4ZU9 9WGG CZU7 WE4X YN44 CH55 KZLG 2F4N A8RJ UKEG 3F9V JQY5 "423207356 HPOV-NFR2 HP_OneView_w/o_iLO_16_Seat_NFR H3TCJHCGAYAY"'},
{'key': 'QC3C A9MA H9PQ GHVZ U7B5 HWW5 Y9JL KMPL 2HVF 4FZ9 DXAU 2CSM GHTG L762 7JX5 V5FU KJVT D5KM EFVW DV5J 43LL PSS6 AK2P 3EW2 T9YE XUNJ TZZ7 MB5X 82Z5 WHEF GE4C LUE3 BKT8 WXDG NK6Y C4GA HZL4 XBE7 3VJ6 2MSU 4ZU9 9WGG CZU7 WE4X YN44 CH55 KZLG 2F4N A8RJ UKEG 3F9V JQY5 "423207566 HPOV-NFR2 HP_OneView_w/o_iLO_48_Seat_NFR 6H72JHCGY5AU"'}
]
ethernet_networks = [{'name': 'RDP',
'type': 'ethernet-networkV3',
'vlanId': 1001,
'purpose': 'General',
'smartLink': True,
'privateNetwork': False,
'connectionTemplateUri': None,
'ethernetNetworkType': 'Tagged'},
{'name': 'IC',
'type': 'ethernet-networkV3',
'vlanId': None,
'purpose': 'General',
'smartLink': True,
'privateNetwork': False,
'connectionTemplateUri': None,
'ethernetNetworkType': 'Untagged'},
{'name': 'Tunnel1',
'type': 'ethernet-networkV3',
'vlanId': None,
'purpose': 'General',
'smartLink': True,
'privateNetwork': False,
'connectionTemplateUri': None,
'ethernetNetworkType': 'Tunnel'},
{'name': 'Tunnel2',
'type': 'ethernet-networkV3',
'vlanId': None,
'purpose': 'General',
'smartLink': True,
'privateNetwork': False,
'connectionTemplateUri': None,
'ethernetNetworkType': 'Tunnel'},
]
ethernet_ranges = [{'prefix': 'net_', 'suffix': '', 'start': 2, 'end': 50, 'name': None, 'type': 'ethernet-networkV3',
'vlanId': None, 'purpose': 'General', 'smartLink': True, 'privateNetwork': False, 'connectionTemplateUri': None,
'ethernetNetworkType': 'Tagged'},
{'prefix': 'net_', 'suffix': '', 'start': 51, 'end': 102, 'name': None, 'type': 'ethernet-networkV3',
'vlanId': None, 'purpose': 'General', 'smartLink': True, 'privateNetwork': False, 'connectionTemplateUri': None,
'ethernetNetworkType': 'Tagged'}]
network_sets = [{'name': 'NS_23', 'type': 'network-set', 'networkUris': ['net_23'], 'nativeNetworkUri': None},
{'name': 'NS_24', 'type': 'network-set', 'networkUris': ['net_24'], 'nativeNetworkUri': None},
{'name': 'NS_46', 'type': 'network-set', 'networkUris': ['net_46'], 'nativeNetworkUri': None},
{'name': 'NS_47', 'type': 'network-set', 'networkUris': ['net_47'], 'nativeNetworkUri': None},
{'name': 'NS_96', 'type': 'network-set', 'networkUris': ['net_96'], 'nativeNetworkUri': None},
{'name': 'NS_97', 'type': 'network-set', 'networkUris': ['net_97'], 'nativeNetworkUri': None},
{'name': 'NS_98', 'type': 'network-set', 'networkUris': ['net_98'], 'nativeNetworkUri': None},
{'name': 'NS_99', 'type': 'network-set', 'networkUris': ['net_99'], 'nativeNetworkUri': None}]
network_set_ranges = [{'prefix': 'net_', 'suffix': '', 'start': 2, 'end': 22, 'name': 'VlanTrunk1', 'type': 'network-set', 'networkUris': None, 'nativeNetworkUri': None},
{'prefix': 'net_', 'suffix': '', 'start': 25, 'end': 45, 'name': 'VlanTrunk2', 'type': 'network-set', 'networkUris': None, 'nativeNetworkUri': 'net_25'},
{'prefix': 'net_', 'suffix': '', 'start': 50, 'end': 70, 'name': 'VlanTrunk3', 'type': 'network-set', 'networkUris': None, 'nativeNetworkUri': 'net_50'},
{'prefix': 'net_', 'suffix': '', 'start': 75, 'end': 95, 'name': 'VlanTrunk4', 'type': 'network-set', 'networkUris': None, 'nativeNetworkUri': 'net_75'}
]
fc_networks = [{'name': 'SAN-1-A', 'type': 'fc-networkV2', 'linkStabilityTime': 30, 'autoLoginRedistribution': True, 'connectionTemplateUri': None, 'managedSanUri': None, 'fabricType': 'FabricAttach'},
{'name': 'SAN-2-B', 'type': 'fc-networkV2', 'linkStabilityTime': 30, 'autoLoginRedistribution': True, 'connectionTemplateUri': None, 'managedSanUri': None, 'fabricType': 'FabricAttach'},
{'name': 'SAN-3-A', 'type': 'fc-networkV2', 'linkStabilityTime': 30, 'autoLoginRedistribution': True, 'connectionTemplateUri': None, 'managedSanUri': None, 'fabricType': 'FabricAttach'},
{'name': 'SAN-4-B', 'type': 'fc-networkV2', 'linkStabilityTime': 30, 'autoLoginRedistribution': True, 'connectionTemplateUri': None, 'managedSanUri': None, 'fabricType': 'FabricAttach'},
{'name': 'SAN-5-A', 'type': 'fc-networkV2', 'linkStabilityTime': 30, 'autoLoginRedistribution': True, 'connectionTemplateUri': None, 'managedSanUri': None, 'fabricType': 'FabricAttach'},
{'name': 'SAN-6-B', 'type': 'fc-networkV2', 'linkStabilityTime': 30, 'autoLoginRedistribution': True, 'connectionTemplateUri': None, 'managedSanUri': None, 'fabricType': 'FabricAttach'},
]
fcoe_networks = [{'name': 'fcoe-100', 'type': 'fcoe-network', 'vlanId': 100},
{'name': 'fcoe-101', 'type': 'fcoe-network', 'vlanId': 101},
]
fcoe_ranges = [{'prefix': 'fcoe-', 'suffix': 'a', 'start': 1001, 'end': 1032},
]
fcoe_ranges = [{'prefix': 'fcoe-', 'suffix': '', 'start': 1000, 'end': 1255},]
enc_groups = [{'name': 'FFF8-8FC20-8FC24',
'type': 'EnclosureGroupV200',
'enclosureTypeUri': '/rest/enclosure-types/c7000',
'stackingMode': 'Enclosure',
'interconnectBayMappingCount': 8,
'configurationScript': None,
'interconnectBayMappings':
[{'interconnectBay': 1, 'logicalInterconnectGroupUri': 'LIG:LIG-FFF8-8FC20-8FC24'},
{'interconnectBay': 2, 'logicalInterconnectGroupUri': 'LIG:LIG-FFF8-8FC20-8FC24'},
{'interconnectBay': 3, 'logicalInterconnectGroupUri': 'LIG:LIG-FFF8-8FC20-8FC24'},
{'interconnectBay': 4, 'logicalInterconnectGroupUri': 'LIG:LIG-FFF8-8FC20-8FC24'},
{'interconnectBay': 5, 'logicalInterconnectGroupUri': 'LIG:LIG-FFF8-8FC20-8FC24'},
{'interconnectBay': 6, 'logicalInterconnectGroupUri': 'LIG:LIG-FFF8-8FC20-8FC24'},
{'interconnectBay': 7, 'logicalInterconnectGroupUri': None},
{'interconnectBay': 8, 'logicalInterconnectGroupUri': None}]},
]
encs = [{'hostname': '172.18.1.11', 'username': 'dcs', 'password': 'dcs', 'enclosureGroupUri': 'EG:FFF8-8FC20-8FC24', 'force': False, 'licensingIntent': 'OneViewNoiLO'}]
uplink_sets = {'RDP': {'name': 'RDP',
'ethernetNetworkType': 'Tagged',
'networkType': 'Ethernet',
'networkUris': ['RDP'],
'nativeNetworkUri': None,
'mode': 'Auto',
'logicalPortConfigInfos': [{'bay': '1', 'port': 'X5', 'speed': 'Auto'}]},
'IC': {'name': 'IC',
'ethernetNetworkType': 'Untagged',
'networkType': 'Ethernet',
'networkUris': ['IC'],
'nativeNetworkUri': None,
'mode': 'Auto',
'logicalPortConfigInfos': [{'bay': '1', 'port': 'X4', 'speed': 'Auto'}]},
'Tunnel1': {'name': 'Tunnel1',
'ethernetNetworkType': 'Tunnel',
'networkType': 'Ethernet',
'networkUris': ['Tunnel1'],
'nativeNetworkUri': None,
'mode': 'Auto',
'lacpTimer': 'Long',
'logicalPortConfigInfos': [{'bay': '1', 'port': 'X3', 'speed': 'Auto'}]},
'Tunnel2': {'name': 'Tunnel2',
'ethernetNetworkType': 'Tunnel',
'networkType': 'Ethernet',
'networkUris': ['Tunnel2'],
'nativeNetworkUri': None,
'mode': 'Auto',
'lacpTimer': 'Long',
'logicalPortConfigInfos': [{'bay': '2', 'port': 'X3', 'speed': 'Auto'}]},
'SAN-1-A': {'name': 'SAN-1-A',
'ethernetNetworkType': 'NotApplicable',
'networkType': 'FibreChannel',
'networkUris': ['SAN-1-A'],
'nativeNetworkUri': None,
'mode': 'Auto',
'logicalPortConfigInfos': [{'bay': '1', 'port': 'X7', 'speed': 'Auto'}]},
'SAN-2-B': {'name': 'SAN-2-B',
'ethernetNetworkType': 'NotApplicable',
'networkType': 'FibreChannel',
'networkUris': ['SAN-2-B'],
'nativeNetworkUri': None,
'mode': 'Auto',
'logicalPortConfigInfos': [{'bay': '2', 'port': 'X7', 'speed': 'Auto'}]},
'SAN-3-A': {'name': 'SAN-3-A',
'ethernetNetworkType': 'NotApplicable',
'networkType': 'FibreChannel',
'networkUris': ['SAN-3-A'],
'nativeNetworkUri': None,
'mode': 'Auto',
'logicalPortConfigInfos': [{'bay': '3', 'port': '1', 'speed': 'Auto'},
{'bay': '3', 'port': '2', 'speed': 'Auto'}]},
'SAN-4-B': {'name': 'SAN-4-B',
'ethernetNetworkType': 'NotApplicable',
'networkType': 'FibreChannel',
'networkUris': ['SAN-4-B'],
'nativeNetworkUri': None,
'mode': 'Auto',
'logicalPortConfigInfos': [{'bay': '4', 'port': '1', 'speed': 'Auto'},
{'bay': '4', 'port': '2', 'speed': 'Auto'}]},
'SAN-5-A': {'name': 'SAN-5-A',
'ethernetNetworkType': 'NotApplicable',
'networkType': 'FibreChannel',
'networkUris': ['SAN-5-A'],
'nativeNetworkUri': None,
'mode': 'Auto',
'logicalPortConfigInfos': [{'bay': '5', 'port': '1', 'speed': 'Auto'},
{'bay': '5', 'port': '2', 'speed': 'Auto'}]},
'SAN-6-B': {'name': 'SAN-6-B',
'ethernetNetworkType': 'NotApplicable',
'networkType': 'FibreChannel',
'networkUris': ['SAN-6-B'],
'nativeNetworkUri': None,
'mode': 'Auto',
'logicalPortConfigInfos': [{'bay': '6', 'port': '1', 'speed': 'Auto'},
{'bay': '6', 'port': '2', 'speed': 'Auto'}]},
'BigPipe1': {'name': 'BigPipe1',
'ethernetNetworkType': 'Tagged',
'networkType': 'Ethernet',
'networkUris': make_range_list(ethernet_ranges[0]),
'nativeNetworkUri': None,
'mode': 'Auto',
'logicalPortConfigInfos': [{'bay': '1', 'port': 'X1', 'speed': 'Auto'},
{'bay': '1', 'port': 'X2', 'speed': 'Auto'}]},
'BigPipe2': {'name': 'BigPipe2',
'ethernetNetworkType': 'Tagged',
'networkType': 'Ethernet',
'networkUris': make_range_list(ethernet_ranges[1]),
'nativeNetworkUri': None,
'mode': 'Auto',
'logicalPortConfigInfos': [{'bay': '2', 'port': 'X1', 'speed': 'Auto'},
{'bay': '2', 'port': 'X2', 'speed': 'Auto'}]},
}
icmap = {"interconnectMapEntryTemplates": [{"logicalLocation": {"locationEntries": [{"relativeValue": 1, "type": "Bay"}, {"relativeValue": 1, "type": "Enclosure"}]}, "permittedInterconnectTypeUri": "HP VC FlexFabric-20/40 F8 Module", "enclosureIndex": 1},
{"logicalLocation": {"locationEntries": [{"relativeValue": 1, "type": "Bay"}, {"relativeValue": 1, "type": "Enclosure"}]}, "permittedInterconnectTypeUri": "HP VC FlexFabric-20/40 F8 Module", "enclosureIndex": 1},
{"logicalLocation": {"locationEntries": [{"relativeValue": 1, "type": "Bay"}, {"relativeValue": 1, "type": "Enclosure"}]}, "permittedInterconnectTypeUri": "HP VC 8Gb 20-Port FC Module", "enclosureIndex": 1},
{"logicalLocation": {"locationEntries": [{"relativeValue": 1, "type": "Bay"}, {"relativeValue": 1, "type": "Enclosure"}]}, "permittedInterconnectTypeUri": "HP VC 8Gb 20-Port FC Module", "enclosureIndex": 1},
{"logicalLocation": {"locationEntries": [{"relativeValue": 1, "type": "Bay"}, {"relativeValue": 1, "type": "Enclosure"}]}, "permittedInterconnectTypeUri": "HP VC 8Gb 24-Port FC Module", "enclosureIndex": 1},
{"logicalLocation": {"locationEntries": [{"relativeValue": 1, "type": "Bay"}, {"relativeValue": 1, "type": "Enclosure"}]}, "permittedInterconnectTypeUri": "HP VC 8Gb 24-Port FC Module", "enclosureIndex": 1}]
}
icmap = [{'bay': 1, 'enclosure': 1, 'type': 'HP VC FlexFabric-20/40 F8 Module', 'enclosureIndex': 1},
{'bay': 2, 'enclosure': 1, 'type': 'HP VC FlexFabric-20/40 F8 Module', 'enclosureIndex': 1},
{'bay': 3, 'enclosure': 1, 'type': 'HP VC 8Gb 20-Port FC Module', 'enclosureIndex': 1},
{'bay': 4, 'enclosure': 1, 'type': 'HP VC 8Gb 20-Port FC Module', 'enclosureIndex': 1},
{'bay': 5, 'enclosure': 1, 'type': 'HP VC 8Gb 24-Port FC Module', 'enclosureIndex': 1},
{'bay': 6, 'enclosure': 1, 'type': 'HP VC 8Gb 24-Port FC Module', 'enclosureIndex': 1},
]
# new - added enclosureIndex
icmap = [{'bay': 1, 'enclosure': 1, 'type': 'HP VC FlexFabric-20/40 F8 Module'},
{'bay': 2, 'enclosure': 1, 'type': 'HP VC FlexFabric-20/40 F8 Module'},
{'bay': 3, 'enclosure': 1, 'type': 'HP VC 8Gb 20-Port FC Module'},
{'bay': 4, 'enclosure': 1, 'type': 'HP VC 8Gb 20-Port FC Module'},
{'bay': 5, 'enclosure': 1, 'type': 'HP VC 8Gb 24-Port FC Module'},
{'bay': 6, 'enclosure': 1, 'type': 'HP VC 8Gb 24-Port FC Module'},
]
ligs = [{'name': 'LIG-FFF8-8FC20-8FC24',
'type': 'logical-interconnect-groupV3',
'enclosureType': 'C7000',
'interconnectMapTemplate': icmap,
#new
'enclosureIndexes': [1],
'interconnectBaySet': 1,
'redundancyType': 'HighlyAvailable',
'fcoeSettings': None,
#end new
'uplinkSets': [uplink_sets['RDP'].copy(),
uplink_sets['IC'].copy(),
uplink_sets['Tunnel1'].copy(),
uplink_sets['Tunnel2'].copy(),
uplink_sets['SAN-1-A'].copy(),
uplink_sets['SAN-2-B'].copy(),
uplink_sets['SAN-3-A'].copy(),
uplink_sets['SAN-4-B'].copy(),
uplink_sets['SAN-5-A'].copy(),
uplink_sets['SAN-6-B'].copy(),
uplink_sets['BigPipe1'].copy(),
uplink_sets['BigPipe2'].copy()],
'stackingMode': 'Enclosure',
'ethernetSettings': None,
'state': 'Active',
'telemetryConfiguration': None,
'snmpConfiguration': None},
]
telemetry = {'enableTelemetry': True, 'sampleInterval': 400, 'sampleCount': 20}
trapDestinations = [{'trapSeverities': ['Major'],
'enetTrapCategories': ['Other'],
'fcTrapCategories': ['Other'],
'vcmTrapCategories': ['Legacy'],
'trapFormat': 'SNMPv1',
'trapDestination': '192.168.99.99',
'communityString': 'public'}]
snmp = {'snmpAccess': ['192.168.1.0/24'],
'trapDestinations': trapDestinations}
enet = {'enableFastMacCacheFailover': False}
server_profiles = [{'type': 'ServerProfileV5', 'serverHardwareUri': 'Encl1, bay 1',
'serverHardwareTypeUri': '', 'enclosureUri': 'ENC:Encl1', 'enclosureGroupUri': 'EG:FFF8-8FC20-8FC24', 'serialNumberType': 'Virtual', 'macType': 'Virtual', 'wwnType': 'Virtual',
'name': 'Encl1_Bay1-BL465cGen8', 'description': '', 'affinity': 'Bay',
'connections': [{'id': 1, 'name': '1', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-a', 'requestedMbps': '2500', 'networkUri': 'ETH:RDP', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 2, 'name': '2', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-a', 'requestedMbps': '2500', 'networkUri': 'ETH:IC', 'boot': {'priority': 'Primary'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 3, 'name': '3', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-b', 'requestedMbps': '2500', 'networkUri': 'ETH:net_96', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 4, 'name': '4', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-b', 'requestedMbps': '2500', 'networkUri': 'ETH:net_96', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 5, 'name': '5', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk1', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 6, 'name': '6', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk1', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 7, 'name': '7', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-d', 'requestedMbps': '2500', 'networkUri': 'NS:NS_23', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 8, 'name': '8', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-d', 'requestedMbps': '2500', 'networkUri': 'NS:NS_23', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 9, 'name': '9', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-3-A', 'boot': {'priority': 'NotBootable'}},
{'id': 10, 'name': '10', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-4-B', 'boot': {'priority': 'NotBootable'}},
{'id': 11, 'name': '11', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-5-A', 'boot': {'priority': 'Primary', 'targets': [{'arrayWwpn': '21110002ac00364c', 'lun': '0'}]}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 12, 'name': '12', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-6-B', 'boot': {'priority': 'Secondary', 'targets': [{'arrayWwpn': '20110002ac00364c', 'lun': '0'}]}, 'mac': None, 'wwpn': '', 'wwnn': ''},
]},
{'type': 'ServerProfileV5', 'serverHardwareUri': 'Encl1, bay 2',
'serverHardwareTypeUri': '', 'enclosureUri': 'ENC:Encl1', 'enclosureGroupUri': 'EG:FFF8-8FC20-8FC24', 'serialNumberType': 'Virtual', 'macType': 'Virtual', 'wwnType': 'Virtual',
'name': 'Encl1_Bay2-BL465cGen8', 'description': '', 'affinity': 'Bay',
'connections': [{'id': 1, 'name': '1', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-a', 'requestedMbps': '2500', 'networkUri': 'ETH:RDP', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 2, 'name': '2', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-a', 'requestedMbps': '2500', 'networkUri': 'ETH:IC', 'boot': {'priority': 'Primary'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 3, 'name': '3', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-b', 'requestedMbps': '2500', 'networkUri': 'ETH:net_97', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 4, 'name': '4', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-b', 'requestedMbps': '2500', 'networkUri': 'ETH:net_97', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 5, 'name': '5', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk2', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 6, 'name': '6', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk2', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 7, 'name': '7', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-d', 'requestedMbps': '2500', 'networkUri': 'NS:NS_24', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 8, 'name': '8', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-d', 'requestedMbps': '2500', 'networkUri': 'NS:NS_24', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 9, 'name': '9', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-3-A', 'boot': {'priority': 'NotBootable'}},
{'id': 10, 'name': '10', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-4-B', 'boot': {'priority': 'NotBootable'}},
{'id': 11, 'name': '11', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-5-A', 'boot': {'priority': 'NotBootable'}},
{'id': 12, 'name': '12', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-6-B', 'boot': {'priority': 'NotBootable'}},
]},
{'type': 'ServerProfileV5', 'serverHardwareUri': 'Encl1, bay 3',
'serverHardwareTypeUri': '', 'enclosureUri': 'ENC:Encl1', 'enclosureGroupUri': 'EG:FFF8-8FC20-8FC24', 'serialNumberType': 'Virtual', 'macType': 'Virtual', 'wwnType': 'Virtual',
'name': 'Encl1_Bay3-BL465cGen8', 'description': '', 'affinity': 'Bay',
'connections': [{'id': 1, 'name': '1', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-a', 'requestedMbps': '2500', 'networkUri': 'ETH:RDP', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 2, 'name': '2', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-a', 'requestedMbps': '2500', 'networkUri': 'ETH:IC', 'boot': {'priority': 'Primary'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 3, 'name': '3', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-b', 'requestedMbps': '2500', 'networkUri': 'ETH:net_98', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 4, 'name': '4', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-b', 'requestedMbps': '2500', 'networkUri': 'ETH:net_98', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 5, 'name': '5', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk3', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 6, 'name': '6', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk3', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 7, 'name': '7', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-d', 'requestedMbps': '2500', 'networkUri': 'NS:NS_46', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 8, 'name': '8', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-d', 'requestedMbps': '2500', 'networkUri': 'NS:NS_46', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 9, 'name': '9', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-3-A', 'boot': {'priority': 'NotBootable'}},
{'id': 10, 'name': '10', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-4-B', 'boot': {'priority': 'NotBootable'}},
{'id': 11, 'name': '11', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-5-A', 'boot': {'priority': 'NotBootable'}},
{'id': 12, 'name': '12', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-6-B', 'boot': {'priority': 'NotBootable'}},
]},
{'type': 'ServerProfileV5', 'serverHardwareUri': 'Encl1, bay 4',
'serverHardwareTypeUri': '', 'enclosureUri': 'ENC:Encl1', 'enclosureGroupUri': 'EG:FFF8-8FC20-8FC24', 'serialNumberType': 'Virtual', 'macType': 'Virtual', 'wwnType': 'Virtual',
'name': 'Encl1_Bay4-BL465cGen8', 'description': '', 'affinity': 'Bay',
'connections': [{'id': 1, 'name': '1', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-a', 'requestedMbps': '2500', 'networkUri': 'ETH:RDP', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 2, 'name': '2', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-a', 'requestedMbps': '2500', 'networkUri': 'ETH:IC', 'boot': {'priority': 'Primary'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 3, 'name': '3', 'functionType': 'FibreChannel', 'portId': 'Flb 1:1-b', 'requestedMbps': '2000', 'networkUri': 'FC:SAN-1-A', 'boot': {'priority': 'NotBootable'}},
{'id': 4, 'name': '4', 'functionType': 'FibreChannel', 'portId': 'Flb 1:2-b', 'requestedMbps': '2000', 'networkUri': 'FC:SAN-2-B', 'boot': {'priority': 'NotBootable'}},
{'id': 5, 'name': '5', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk4', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 6, 'name': '6', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk4', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 7, 'name': '7', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-d', 'requestedMbps': '2500', 'networkUri': 'ETH:net_99', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 8, 'name': '8', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-d', 'requestedMbps': '2500', 'networkUri': 'ETH:net_99', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 9, 'name': '9', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-3-A', 'boot': {'priority': 'NotBootable'}},
{'id': 10, 'name': '10', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-4-B', 'boot': {'priority': 'NotBootable'}},
{'id': 11, 'name': '11', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-5-A', 'boot': {'priority': 'NotBootable'}},
{'id': 12, 'name': '12', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-6-B', 'boot': {'priority': 'NotBootable'}},
]},
{'type': 'ServerProfileV5', 'serverHardwareUri': 'Encl1, bay 5',
'serverHardwareTypeUri': '', 'enclosureUri': 'ENC:Encl1', 'enclosureGroupUri': 'EG:FFF8-8FC20-8FC24', 'serialNumberType': 'Virtual', 'macType': 'Virtual', 'wwnType': 'Virtual',
'name': 'Encl1_Bay5-BL460cGen8', 'description': '', 'affinity': 'Bay',
'connections': [{'id': 1, 'name': '1', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-a', 'requestedMbps': '2500', 'networkUri': 'ETH:RDP', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 2, 'name': '2', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-a', 'requestedMbps': '2500', 'networkUri': 'ETH:IC', 'boot': {'priority': 'Primary'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 3, 'name': '3', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-b', 'requestedMbps': '2500', 'networkUri': 'ETH:net_96', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 4, 'name': '4', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-b', 'requestedMbps': '2500', 'networkUri': 'ETH:net_96', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 5, 'name': '5', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk1', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 6, 'name': '6', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk1', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 7, 'name': '7', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-d', 'requestedMbps': '2500', 'networkUri': 'NS:NS_23', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 8, 'name': '8', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-d', 'requestedMbps': '2500', 'networkUri': 'NS:NS_23', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 9, 'name': '9', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-3-A', 'boot': {'priority': 'NotBootable'}},
{'id': 10, 'name': '10', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-4-B', 'boot': {'priority': 'NotBootable'}},
{'id': 11, 'name': '11', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-5-A', 'boot': {'priority': 'Primary', 'targets': [{'arrayWwpn': '21110002ac00364c', 'lun': '0'}]}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 12, 'name': '12', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-6-B', 'boot': {'priority': 'Secondary', 'targets': [{'arrayWwpn': '20110002ac00364c', 'lun': '0'}]}, 'mac': None, 'wwpn': '', 'wwnn': ''},
]},
{'type': 'ServerProfileV5', 'serverHardwareUri': 'Encl1, bay 6',
'serverHardwareTypeUri': '', 'enclosureUri': 'ENC:Encl1', 'enclosureGroupUri': 'EG:FFF8-8FC20-8FC24', 'serialNumberType': 'Virtual', 'macType': 'Virtual', 'wwnType': 'Virtual',
'name': 'Encl1_Bay6-BL460cGen8', 'description': '', 'affinity': 'Bay',
'connections': [{'id': 1, 'name': '1', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-a', 'requestedMbps': '2500', 'networkUri': 'ETH:RDP', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 2, 'name': '2', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-a', 'requestedMbps': '2500', 'networkUri': 'ETH:IC', 'boot': {'priority': 'Primary'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 3, 'name': '3', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-b', 'requestedMbps': '2500', 'networkUri': 'ETH:net_97', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 4, 'name': '4', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-b', 'requestedMbps': '2500', 'networkUri': 'ETH:net_97', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 5, 'name': '5', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk2', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 6, 'name': '6', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk2', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 7, 'name': '7', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-d', 'requestedMbps': '2500', 'networkUri': 'NS:NS_24', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 8, 'name': '8', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-d', 'requestedMbps': '2500', 'networkUri': 'NS:NS_24', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 9, 'name': '9', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-3-A', 'boot': {'priority': 'NotBootable'}},
{'id': 10, 'name': '10', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-4-B', 'boot': {'priority': 'NotBootable'}},
{'id': 11, 'name': '11', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-5-A', 'boot': {'priority': 'NotBootable'}},
{'id': 12, 'name': '12', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-6-B', 'boot': {'priority': 'NotBootable'}},
]},
{'type': 'ServerProfileV5', 'serverHardwareUri': 'Encl1, bay 7',
'serverHardwareTypeUri': '', 'enclosureUri': 'ENC:Encl1', 'enclosureGroupUri': 'EG:FFF8-8FC20-8FC24', 'serialNumberType': 'Virtual', 'macType': 'Virtual', 'wwnType': 'Virtual',
'name': 'Encl1_Bay7-BL460cGen8', 'description': '', 'affinity': 'Bay',
'connections': [{'id': 1, 'name': '1', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-a', 'requestedMbps': '2500', 'networkUri': 'ETH:RDP', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 2, 'name': '2', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-a', 'requestedMbps': '2500', 'networkUri': 'ETH:IC', 'boot': {'priority': 'Primary'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 3, 'name': '3', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-b', 'requestedMbps': '2500', 'networkUri': 'ETH:net_98', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 4, 'name': '4', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-b', 'requestedMbps': '2500', 'networkUri': 'ETH:net_98', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 5, 'name': '5', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk3', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 6, 'name': '6', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk3', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 7, 'name': '7', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-d', 'requestedMbps': '2500', 'networkUri': 'NS:NS_46', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 8, 'name': '8', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-d', 'requestedMbps': '2500', 'networkUri': 'NS:NS_46', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 9, 'name': '9', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-3-A', 'boot': {'priority': 'NotBootable'}},
{'id': 10, 'name': '10', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-4-B', 'boot': {'priority': 'NotBootable'}},
{'id': 11, 'name': '11', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-5-A', 'boot': {'priority': 'NotBootable'}},
{'id': 12, 'name': '12', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-6-B', 'boot': {'priority': 'NotBootable'}},
]},
{'type': 'ServerProfileV5', 'serverHardwareUri': 'Encl1, bay 8',
'serverHardwareTypeUri': '', 'enclosureUri': 'ENC:Encl1', 'enclosureGroupUri': 'EG:FFF8-8FC20-8FC24', 'serialNumberType': 'Virtual', 'macType': 'Virtual', 'wwnType': 'Virtual',
'name': 'Encl1_Bay8-BL460cGen8', 'description': '', 'affinity': 'Bay',
'connections': [{'id': 1, 'name': '1', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-a', 'requestedMbps': '2500', 'networkUri': 'ETH:RDP', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 2, 'name': '2', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-a', 'requestedMbps': '2500', 'networkUri': 'ETH:IC', 'boot': {'priority': 'Primary'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 3, 'name': '3', 'functionType': 'FibreChannel', 'portId': 'Flb 1:1-b', 'requestedMbps': '2000', 'networkUri': 'FC:SAN-1-A', 'boot': {'priority': 'NotBootable'}},
{'id': 4, 'name': '4', 'functionType': 'FibreChannel', 'portId': 'Flb 1:2-b', 'requestedMbps': '2000', 'networkUri': 'FC:SAN-2-B', 'boot': {'priority': 'NotBootable'}},
{'id': 5, 'name': '5', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk4', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 6, 'name': '6', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk4', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 7, 'name': '7', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-d', 'requestedMbps': '2500', 'networkUri': 'ETH:net_99', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 8, 'name': '8', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-d', 'requestedMbps': '2500', 'networkUri': 'ETH:net_99', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 9, 'name': '9', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-3-A', 'boot': {'priority': 'NotBootable'}},
{'id': 10, 'name': '10', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-4-B', 'boot': {'priority': 'NotBootable'}},
{'id': 11, 'name': '11', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-5-A', 'boot': {'priority': 'NotBootable'}},
{'id': 12, 'name': '12', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-6-B', 'boot': {'priority': 'NotBootable'}},
]},
{'type': 'ServerProfileV5', 'serverHardwareUri': 'Encl1, bay 9',
'serverHardwareTypeUri': '', 'enclosureUri': 'ENC:Encl1', 'enclosureGroupUri': 'EG:FFF8-8FC20-8FC24', 'serialNumberType': 'Virtual', 'macType': 'Virtual', 'wwnType': 'Virtual',
'name': 'Encl1_Bay9-BL420cGen8', 'description': '', 'affinity': 'Bay',
'connections': [{'id': 1, 'name': '1', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-a', 'requestedMbps': '2500', 'networkUri': 'ETH:RDP', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 2, 'name': '2', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-a', 'requestedMbps': '2500', 'networkUri': 'ETH:IC', 'boot': {'priority': 'Primary'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 3, 'name': '3', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-b', 'requestedMbps': '2500', 'networkUri': 'ETH:net_96', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 4, 'name': '4', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-b', 'requestedMbps': '2500', 'networkUri': 'ETH:net_96', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 5, 'name': '5', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk1', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 6, 'name': '6', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk1', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 7, 'name': '7', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-d', 'requestedMbps': '2500', 'networkUri': 'NS:NS_23', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 8, 'name': '8', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-d', 'requestedMbps': '2500', 'networkUri': 'NS:NS_23', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 9, 'name': '9', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-3-A', 'boot': {'priority': 'NotBootable'}},
{'id': 10, 'name': '10', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-4-B', 'boot': {'priority': 'NotBootable'}},
{'id': 11, 'name': '11', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-5-A', 'boot': {'priority': 'Primary', 'targets': [{'arrayWwpn': '21110002ac00364c', 'lun': '0'}]}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 12, 'name': '12', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-6-B', 'boot': {'priority': 'Secondary', 'targets': [{'arrayWwpn': '20110002ac00364c', 'lun': '0'}]}, 'mac': None, 'wwpn': '', 'wwnn': ''},
]},
{'type': 'ServerProfileV5', 'serverHardwareUri': 'Encl1, bay 10',
'serverHardwareTypeUri': '', 'enclosureUri': 'ENC:Encl1', 'enclosureGroupUri': 'EG:FFF8-8FC20-8FC24', 'serialNumberType': 'Virtual', 'macType': 'Virtual', 'wwnType': 'Virtual',
'name': 'Encl1_Bay10-BL420cGen8', 'description': '', 'affinity': 'Bay',
'connections': [{'id': 1, 'name': '1', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-a', 'requestedMbps': '2500', 'networkUri': 'ETH:RDP', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 2, 'name': '2', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-a', 'requestedMbps': '2500', 'networkUri': 'ETH:IC', 'boot': {'priority': 'Primary'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 3, 'name': '3', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-b', 'requestedMbps': '2500', 'networkUri': 'ETH:net_97', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 4, 'name': '4', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-b', 'requestedMbps': '2500', 'networkUri': 'ETH:net_97', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 5, 'name': '5', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk2', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 6, 'name': '6', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk2', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 7, 'name': '7', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-d', 'requestedMbps': '2500', 'networkUri': 'NS:NS_24', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 8, 'name': '8', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-d', 'requestedMbps': '2500', 'networkUri': 'NS:NS_24', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 9, 'name': '9', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-3-A', 'boot': {'priority': 'NotBootable'}},
{'id': 10, 'name': '10', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-4-B', 'boot': {'priority': 'NotBootable'}},
{'id': 11, 'name': '11', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-5-A', 'boot': {'priority': 'NotBootable'}},
{'id': 12, 'name': '12', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-6-B', 'boot': {'priority': 'NotBootable'}},
]},
{'type': 'ServerProfileV5', 'serverHardwareUri': 'Encl1, bay 11',
'serverHardwareTypeUri': '', 'enclosureUri': 'ENC:Encl1', 'enclosureGroupUri': 'EG:FFF8-8FC20-8FC24', 'serialNumberType': 'Virtual', 'macType': 'Virtual', 'wwnType': 'Virtual',
'name': 'Encl1_Bay11-BL42065cGen8', 'description': '', 'affinity': 'Bay',
'connections': [{'id': 1, 'name': '1', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-a', 'requestedMbps': '2500', 'networkUri': 'ETH:RDP', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 2, 'name': '2', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-a', 'requestedMbps': '2500', 'networkUri': 'ETH:IC', 'boot': {'priority': 'Primary'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 3, 'name': '3', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-b', 'requestedMbps': '2500', 'networkUri': 'ETH:net_98', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 4, 'name': '4', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-b', 'requestedMbps': '2500', 'networkUri': 'ETH:net_98', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 5, 'name': '5', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk3', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 6, 'name': '6', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk3', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 7, 'name': '7', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-d', 'requestedMbps': '2500', 'networkUri': 'NS:NS_46', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 8, 'name': '8', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-d', 'requestedMbps': '2500', 'networkUri': 'NS:NS_46', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 9, 'name': '9', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-3-A', 'boot': {'priority': 'NotBootable'}},
{'id': 10, 'name': '10', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-4-B', 'boot': {'priority': 'NotBootable'}},
{'id': 11, 'name': '11', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-5-A', 'boot': {'priority': 'NotBootable'}},
{'id': 12, 'name': '12', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-6-B', 'boot': {'priority': 'NotBootable'}},
]},
{'type': 'ServerProfileV5', 'serverHardwareUri': 'Encl1, bay 12',
'serverHardwareTypeUri': '', 'enclosureUri': 'ENC:Encl1', 'enclosureGroupUri': 'EG:FFF8-8FC20-8FC24', 'serialNumberType': 'Virtual', 'macType': 'Virtual', 'wwnType': 'Virtual',
'name': 'Encl1_Bay12-BL420cGen8', 'description': '', 'affinity': 'Bay',
'connections': [{'id': 1, 'name': '1', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-a', 'requestedMbps': '2500', 'networkUri': 'ETH:RDP', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 2, 'name': '2', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-a', 'requestedMbps': '2500', 'networkUri': 'ETH:IC', 'boot': {'priority': 'Primary'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 3, 'name': '3', 'functionType': 'FibreChannel', 'portId': 'Flb 1:1-b', 'requestedMbps': '2000', 'networkUri': 'FC:SAN-1-A', 'boot': {'priority': 'NotBootable'}},
{'id': 4, 'name': '4', 'functionType': 'FibreChannel', 'portId': 'Flb 1:2-b', 'requestedMbps': '2000', 'networkUri': 'FC:SAN-2-B', 'boot': {'priority': 'NotBootable'}},
{'id': 5, 'name': '5', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk4', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 6, 'name': '6', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-c', 'requestedMbps': '2500', 'networkUri': 'NS:VlanTrunk4', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 7, 'name': '7', 'functionType': 'Ethernet', 'portId': 'Flb 1:1-d', 'requestedMbps': '2500', 'networkUri': 'ETH:net_99', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 8, 'name': '8', 'functionType': 'Ethernet', 'portId': 'Flb 1:2-d', 'requestedMbps': '2500', 'networkUri': 'ETH:net_99', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 9, 'name': '9', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-3-A', 'boot': {'priority': 'NotBootable'}},
{'id': 10, 'name': '10', 'functionType': 'FibreChannel', 'portId': 'Mezz 1:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-4-B', 'boot': {'priority': 'NotBootable'}},
{'id': 11, 'name': '11', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:1', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-5-A', 'boot': {'priority': 'NotBootable'}},
{'id': 12, 'name': '12', 'functionType': 'FibreChannel', 'portId': 'Mezz 2:2', 'requestedMbps': '8000', 'networkUri': 'FC:SAN-6-B', 'boot': {'priority': 'NotBootable'}},
]},
{'type': 'ServerProfileV5', 'serverHardwareUri': 'Encl1, bay 13',
'serverHardwareTypeUri': '', 'enclosureUri': 'ENC:Encl1', 'enclosureGroupUri': 'EG:FFF8-8FC20-8FC24', 'serialNumberType': 'Virtual', 'macType': 'Virtual', 'wwnType': 'Virtual',
'name': 'Encl1_Bay13-BL460cG7', 'description': '', 'affinity': 'Bay',
'connections': [{'id': 1, 'name': '1', 'functionType': 'Ethernet', 'portId': 'Lom 1:1-a', 'requestedMbps': '2500', 'networkUri': 'ETH:RDP', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 2, 'name': '2', 'functionType': 'Ethernet', 'portId': 'Lom 1:2-a', 'requestedMbps': '2500', 'networkUri': 'ETH:IC', 'boot': {'priority': 'Primary'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 3, 'name': '3', 'functionType': 'Ethernet', 'portId': 'Lom 1:1-b', 'requestedMbps': '2500', 'networkUri': 'ETH:Tunnel1', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 4, 'name': '4', 'functionType': 'Ethernet', 'portId': 'Lom 1:1-b', 'requestedMbps': '2500', 'networkUri': 'ETH:Tunnel2', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
]},
{'type': 'ServerProfileV5', 'serverHardwareUri': 'Encl1, bay 14',
'serverHardwareTypeUri': '', 'enclosureUri': 'ENC:Encl1', 'enclosureGroupUri': 'EG:FFF8-8FC20-8FC24', 'serialNumberType': 'Virtual', 'macType': 'Virtual', 'wwnType': 'Virtual',
'name': 'Encl1_Bay14-BL460cG7', 'description': '', 'affinity': 'Bay',
'connections': [{'id': 1, 'name': '1', 'functionType': 'Ethernet', 'portId': 'Lom 1:1-a', 'requestedMbps': '2500', 'networkUri': 'ETH:RDP', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 2, 'name': '2', 'functionType': 'Ethernet', 'portId': 'Lom 1:2-a', 'requestedMbps': '2500', 'networkUri': 'ETH:IC', 'boot': {'priority': 'Primary'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 3, 'name': '3', 'functionType': 'Ethernet', 'portId': 'Lom 1:1-b', 'requestedMbps': '2500', 'networkUri': 'ETH:Tunnel1', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 4, 'name': '4', 'functionType': 'Ethernet', 'portId': 'Lom 1:1-b', 'requestedMbps': '2500', 'networkUri': 'ETH:Tunnel2', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
]},
{'type': 'ServerProfileV5', 'serverHardwareUri': 'Encl1, bay 15',
'serverHardwareTypeUri': '', 'enclosureUri': 'ENC:Encl1', 'enclosureGroupUri': 'EG:FFF8-8FC20-8FC24', 'serialNumberType': 'Virtual', 'macType': 'Virtual', 'wwnType': 'Virtual',
'name': 'Encl1_Bay15-BL460cG7', 'description': '', 'affinity': 'Bay',
'connections': [{'id': 1, 'name': '1', 'functionType': 'Ethernet', 'portId': 'Lom 1:1-a', 'requestedMbps': '2500', 'networkUri': 'ETH:RDP', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 2, 'name': '2', 'functionType': 'Ethernet', 'portId': 'Lom 1:2-a', 'requestedMbps': '2500', 'networkUri': 'ETH:IC', 'boot': {'priority': 'Primary'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 3, 'name': '3', 'functionType': 'Ethernet', 'portId': 'Lom 1:1-b', 'requestedMbps': '2500', 'networkUri': 'ETH:Tunnel1', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 4, 'name': '4', 'functionType': 'Ethernet', 'portId': 'Lom 1:1-b', 'requestedMbps': '2500', 'networkUri': 'ETH:Tunnel2', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
]},
{'type': 'ServerProfileV5', 'serverHardwareUri': 'Encl1, bay 16',
'serverHardwareTypeUri': '', 'enclosureUri': 'ENC:Encl1', 'enclosureGroupUri': 'EG:FFF8-8FC20-8FC24', 'serialNumberType': 'Virtual', 'macType': 'Virtual', 'wwnType': 'Virtual',
'name': 'Encl1_Bay16-BL460cG7', 'description': '', 'affinity': 'Bay',
'connections': [{'id': 1, 'name': '1', 'functionType': 'Ethernet', 'portId': 'Lom 1:1-a', 'requestedMbps': '2500', 'networkUri': 'ETH:RDP', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 2, 'name': '2', 'functionType': 'Ethernet', 'portId': 'Lom 1:2-a', 'requestedMbps': '2500', 'networkUri': 'ETH:IC', 'boot': {'priority': 'Primary'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 3, 'name': '3', 'functionType': 'Ethernet', 'portId': 'Lom 1:1-b', 'requestedMbps': '2500', 'networkUri': 'ETH:Tunnel1', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
{'id': 4, 'name': '4', 'functionType': 'Ethernet', 'portId': 'Lom 1:1-b', 'requestedMbps': '2500', 'networkUri': 'ETH:Tunnel2', 'boot': {'priority': 'NotBootable'}, 'mac': None, 'wwpn': '', 'wwnn': ''},
]},
]
true = True
false = False
rc = {'200': 200, '201': 201, '202': 202, '400': 400, '401': 401, '403': 403, '412': 412, '500': 500}
########################################
default_variables = {'admin_credentials': admin_credentials,
'appliance': appliance,
'encs': encs,
'enc_groups': enc_groups,
'ethernet_networks': ethernet_networks,
'ethernet_ranges': ethernet_ranges,
'fc_networks': fc_networks,
'fcoe_networks': fcoe_networks,
'fcoe_ranges': fcoe_ranges,
'licenses': licenses,
'ligs': ligs,
'network_sets': network_sets,
'network_set_ranges': network_set_ranges,
'ranges': ranges,
'rc': rc,
'server_profiles': server_profiles,
'uplink_sets': uplink_sets,
'users': users,
'timeandlocale': timeandlocale,
'true': true, 'false': false,
'vcenter': vcenter}
def get_variables():
variables = default_variables
return variables
|
[
"[email protected]"
] | |
2fcb25aed79d349b7d2fd32d4022304eb3510056
|
26edc9df2bc9df95cd94e6cea78fa2ffb6c2ac44
|
/Pmw-2.0.0/Pmw/Pmw_2_0_0/demos/ConfigClass.py
|
2602ed7980043f9a429f626772cb4fc6a8601319
|
[] |
no_license
|
khw7096/opener
|
55c3c43ddfa62c9f2bdf23cccde1b9abbcb7ece9
|
4981e1c53e7fe55f9c4ba7c5f57ee9f8634f389d
|
refs/heads/master
| 2021-06-09T10:57:53.101948 | 2017-06-06T09:18:26 | 2017-06-06T09:18:26 | 35,756,994 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,372 |
py
|
title = 'Component python class configuration demonstration'
# Import Pmw from this directory tree.
import sys
sys.path[:0] = ['../../..']
import tkinter
import Pmw
class MyButton(tkinter.Button):
# This is just an ordinary button with special colors.
def __init__(self, master=None, cnf={}, **kw):
self.__toggle = 0
kw['background'] = 'green'
kw['activebackground'] = 'red'
tkinter.Button.__init__(*(self, master, cnf), **kw)
class Demo:
def __init__(self, parent):
# Create a title label:
label = tkinter.Label(parent,
text = 'EntryFields with label components of specified type:')
label.pack(fill='x', expand=1, padx=10, pady=5)
# Create and pack some EntryFields.
entries = []
entry = Pmw.EntryField(parent,
labelpos = 'w',
label_text = 'Label'
)
entry.pack(fill='x', expand=1, padx=10, pady=5)
entries.append(entry)
entry = Pmw.EntryField(parent,
labelpos = 'w',
label_pyclass = tkinter.Button,
label_text = 'Button'
)
entry.pack(fill='x', expand=1, padx=10, pady=5)
entries.append(entry)
entry = Pmw.EntryField(parent,
labelpos = 'w',
label_pyclass = MyButton,
label_text = 'Special button'
)
entry.pack(fill='x', expand=1, padx=10, pady=5)
entries.append(entry)
Pmw.alignlabels(entries)
# Create and pack a ButtonBox.
buttonBox = Pmw.ButtonBox(parent,
labelpos = 'nw',
label_text = 'ButtonBox:')
buttonBox.pack(fill = 'both', expand = 1, padx=10, pady=5)
# Add some buttons to the ButtonBox.
buttonBox.add('with a')
buttonBox.add('special', pyclass = MyButton)
buttonBox.add('button')
######################################################################
# Create demo in root window for testing.
if __name__ == '__main__':
root = tkinter.Tk()
Pmw.initialise(root)
root.title(title)
exitButton = tkinter.Button(root, text = 'Exit', command = root.destroy)
exitButton.pack(side = 'bottom')
widget = Demo(root)
root.mainloop()
|
[
"[email protected]"
] | |
0fad5ee29a5268ce97c8cf84b91a05bda392f678
|
63afbceb127443806e7ee989be74e54e04152e88
|
/app01/migrations/0009_auto_20171205_1005.py
|
73ded2547d58a26c5a03a167482d838ba69312d4
|
[] |
no_license
|
Jamin2018/Django_zhihu
|
002be8c2ff457f44798b881aaebb98f9611c165e
|
1f5adc08cfd66eb2cf1049cf9e09ad0bb66d5509
|
refs/heads/master
| 2021-08-23T13:52:58.887369 | 2017-12-05T04:26:42 | 2017-12-05T04:26:42 | 113,132,441 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,007 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-12-05 02:05
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('app01', '0008_auto_20171205_1003'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='body',
field=models.TextField(default='1'),
preserve_default=False,
),
migrations.AlterField(
model_name='comment',
name='p',
field=models.ForeignKey(default='1', on_delete=django.db.models.deletion.CASCADE, to='app01.Post'),
preserve_default=False,
),
migrations.AlterField(
model_name='comment',
name='u',
field=models.ForeignKey(default='1', on_delete=django.db.models.deletion.CASCADE, to='app01.User'),
preserve_default=False,
),
]
|
[
"[email protected]"
] | |
a9f0f51216566c7f222ea78b7291aa59324f8376
|
6b9865a42a86c538cff987b3079da5b644137b37
|
/0x1B-island_perimeter/0-island_perimeter.py
|
340b08d724b063ab8518cf088b66084e17c4dce4
|
[] |
no_license
|
zahraaassaad/holbertonschool-python
|
099072b0350f594adf30b47eb18fcdce0375546d
|
83d7d185f14f44ea4481538ab1e04463a9b62739
|
refs/heads/master
| 2023-01-09T03:34:48.082352 | 2020-11-20T17:54:05 | 2020-11-20T17:54:05 | 291,666,802 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 551 |
py
|
#!/usr/bin/python3
"""
Module for to_graph.
"""
def island_perimeter(grid):
""" Returns the perimeter of the island described in grid."""
num = 0
neighbor = 0
for i in range(len(grid)):
for j in range(len(grid[0])):
if grid[i][j] == 1:
num = num + 1
if i > 0 and grid[i-1][j] == 1:
neighbor += 1
if j > 0 and grid[i][j-1] == 1:
neighbor += 1
return num * 4 - neighbor * 2
|
[
"[email protected]"
] | |
0616483d02c5aefc6595b766907293a1a338733f
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_231/ch131_2020_04_01_18_01_14_493339.py
|
eb145819744ce0d279dd15de473d87ced7a5bfef
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 514 |
py
|
import random
s=random.randint(2,20)
n1=int(input('Digite um numero:'))
n2=int(input('Digite um numero maior ou igual:'))
if s<n1:
print('Soma menor')
if s>n2:
print('Soma maior')
else:
print('Soma no meio')
d=10
print('Voce tem {} dinheiros'.format(d))
q=int(input('Quantos chutes voce quer comprar?'))
d=d-q
c=int(input('Qual é o seu chute?'))
while c!=s and q>0:
q= q-1
c= int(input('Errou tente novamente:'))
if s==c:
d= d+ 5*d
print('Você terminou o jogo com {} dinheiros'.format(d))
|
[
"[email protected]"
] | |
1eceb6d7ed6e20c50f6cd3cfa5ed3cb1af5c6b5f
|
5508368df9df1036755aeaa8574bdadcc25359f8
|
/AmadoWHApp/migrations/0020_auto_20180811_1629.py
|
1744319d9177284bc372d66e2ea8518549de2da6
|
[] |
no_license
|
HanSol1994/Amado
|
4627a6165009733059e8e87f545244f69d492b91
|
9fbc37250b9974bbf3a3c3a2571a748a300b2d29
|
refs/heads/master
| 2022-01-23T23:59:30.666584 | 2019-07-23T08:12:19 | 2019-07-23T08:12:19 | 198,373,740 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,745 |
py
|
# Generated by Django 2.0.5 on 2018-08-11 11:59
from django.db import migrations, models
import django_jalali.db.models
class Migration(migrations.Migration):
dependencies = [
('AmadoWHApp', '0019_auto_20180806_1249'),
]
operations = [
migrations.DeleteModel(
name='ShowHelpTicket',
),
migrations.AlterField(
model_name='amadofood',
name='price_change_date',
field=django_jalali.db.models.jDateField(default='1397-05-20', verbose_name='تاریخ تغییر قیمت'),
),
migrations.AlterField(
model_name='branchwarehouse',
name='date',
field=django_jalali.db.models.jDateField(default='1397-05-20', verbose_name='تاریخ موجودی'),
),
migrations.AlterField(
model_name='foodsale',
name='date',
field=django_jalali.db.models.jDateField(default='1397-05-20', verbose_name='تاریخ فروش'),
),
migrations.AlterField(
model_name='message',
name='message_date',
field=models.CharField(blank=True, default='1397-05-20 ساعت 16:29', max_length=32, null=True, verbose_name='تاریخ ارسال پیام'),
),
migrations.AlterField(
model_name='price',
name='cost',
field=models.IntegerField(verbose_name='قیمت(ریال)'),
),
migrations.AlterField(
model_name='price',
name='date',
field=django_jalali.db.models.jDateField(default='1397-05-20', verbose_name='تاریخ قیمت'),
),
migrations.AlterField(
model_name='product',
name='price_change_date',
field=django_jalali.db.models.jDateField(default='1397-05-20', verbose_name='تاریخ تغییر قیمت'),
),
migrations.AlterField(
model_name='product',
name='product_actual_price_1',
field=models.IntegerField(blank=True, null=True, verbose_name='قیمت تمام شده نوع ۱(ریال)'),
),
migrations.AlterField(
model_name='product',
name='product_actual_price_2',
field=models.IntegerField(blank=True, null=True, verbose_name='قیمت تمام شده نوع ۲(ریال)'),
),
migrations.AlterField(
model_name='request',
name='request_date',
field=django_jalali.db.models.jDateField(blank=True, default='1397-05-20', null=True, verbose_name='تاریخ درخواست'),
),
migrations.AlterField(
model_name='request',
name='request_time',
field=models.TimeField(blank=True, default='16:29:16', null=True, verbose_name='ساعت درخواست'),
),
migrations.AlterField(
model_name='requestproduct',
name='request_date',
field=django_jalali.db.models.jDateField(blank=True, default='1397-05-20', null=True, verbose_name='تاریخ ثبت'),
),
migrations.AlterField(
model_name='requestproduct',
name='request_time',
field=models.TimeField(blank=True, default='16:29:16', null=True, verbose_name='ساعت ثبت'),
),
migrations.AlterField(
model_name='requestproductvariance',
name='request_date',
field=django_jalali.db.models.jDateField(blank=True, default='1397-05-20', null=True, verbose_name='تاریخ ثبت مغایرت'),
),
migrations.AlterField(
model_name='requestproductvariance',
name='request_time',
field=models.TimeField(blank=True, default='16:29:16', null=True, verbose_name='ساعت ثبت مغایرت'),
),
migrations.AlterField(
model_name='shopdetail',
name='last_price',
field=models.IntegerField(verbose_name='آخرین قیمت(ریال)'),
),
migrations.AlterField(
model_name='shopdetail',
name='last_price_date',
field=django_jalali.db.models.jDateField(default='1397-05-20', verbose_name='تاریخ آخرین قیمت'),
),
migrations.AlterField(
model_name='shopdetail',
name='rc_date',
field=django_jalali.db.models.jDateField(default='1397-05-20', verbose_name='تاریخ دریافت'),
),
migrations.AlterField(
model_name='shoprequest',
name='from_date',
field=django_jalali.db.models.jDateField(default='1397-05-20', verbose_name='تاریخ خرید'),
),
]
|
[
"[email protected]"
] | |
23e930e42475382cf34d1a2f27f9c1fea496abce
|
7ab41799fd38489c93282f1beb3b20e7ef8ff165
|
/python/141.py
|
513b3798a89e31e94eb8c4208e90df739cbc0f2d
|
[] |
no_license
|
scturtle/leetcode-sol
|
86c4095df6b31a9fcad683f2d63669ce1691633c
|
e1a9ce5d9b8fe4bd11e50bd1d5ba1933de845db7
|
refs/heads/master
| 2020-04-23T00:01:37.016267 | 2015-11-21T04:15:27 | 2015-11-21T04:15:27 | 32,385,573 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 439 |
py
|
class Solution(object):
def hasCycle(self, head):
"""
:type head: ListNode
:rtype: bool
"""
slow = fast = head
while slow and fast:
fast = fast.next
if fast is slow:
return True
if fast:
fast = fast.next
if fast is slow:
return True
slow = slow.next
return False
|
[
"[email protected]"
] | |
2d666e7337eebaf1bf48c46e9692e00a29ebce20
|
7c7c22e1b9c2406cb417b3ca394878765d2cb9de
|
/app/models.py
|
c4514e30300feec142c3b1d73c508db24f84b49c
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
Carolwanzuu/m-blog
|
215312a91702c771cf56041623314607319f474f
|
baa4df0a5d43f216f82829bb5eeb8669b6317c4b
|
refs/heads/master
| 2023-06-05T09:58:53.344224 | 2021-06-30T08:40:08 | 2021-06-30T08:40:08 | 378,681,471 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,990 |
py
|
from . import db
from datetime import datetime
from werkzeug.security import generate_password_hash,check_password_hash
from flask_login import UserMixin
from . import login_manager
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class Quote:
def __init__(self,id,quote,author):
self.id =id
self.quote = quote
self.author = author
# class User:
class User(UserMixin,db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key = True)
username = db.Column(db.String(255))
email = db.Column(db.String(255))
bio = db.Column(db.String(255))
profile_pic_path = db.Column(db.String(255))
pass_secure = db.Column(db.String(255))
blog = db.relationship('Blog', backref = 'users', lazy = 'dynamic')
comment = db.relationship('Comment', backref = 'users', lazy = 'dynamic')
pass_secure = db.Column(db.String(255))
def save_user(self):
db.session.add(self)
db.session.commit()
def delete_user(self):
db.session.delete(self)
db.session.commit()
@property
def password(self):
raise AttributeError('You cannot read the password attribute')
@password.setter
def password(self, password):
self.pass_secure = generate_password_hash(password)
def verify_password(self,password):
return check_password_hash(self.pass_secure,password)
def __repr__(self):
return f'User {self.username}'
class Blog(db.Model):
__tablename = 'blog'
id = db.Column(db.Integer,primary_key = True)
title = db.Column(db.String(255))
blog_content = db.Column(db.String(255))
author = db.Column(db.String(255))
posted = db.Column(db.DateTime, default = datetime.utcnow)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
comments = db.relationship('Comment',backref = 'blog',lazy = 'dynamic')
def save_blog(self):
db.session.add(self)
db.session.commit()
def delete_blog(self):
db.session.delete(self)
db.session.commit()
@classmethod
def get_blog(cls,user_id):
blogs = Blog.query.filter_by(user_id=user_id).all()
return Blog
def __repr__(self):
return f'Blog {self.name}'
class Comment(db.Model):
__tablename__='comments'
id=db.Column(db.Integer,primary_key=True)
comment=db.Column(db.String)
posted=db.Column(db.DateTime,default=datetime.utcnow)
user_id=db.Column(db.Integer,db.ForeignKey("users.id"))
blog_id=db.Column(db.Integer,db.ForeignKey('blog.id'))
def save_comment(self):
db.session.add(self)
db.session.commit()
def delete_comment(self):
db.session.delete(self)
db.session.commit()
def __repr__(self):
return f'Comment{self.comment}'
@classmethod
def get_comments(cls,id):
comment=Comment.query.filter_by(blog_id=id).all()
return comment
|
[
"[email protected]"
] | |
1e78f3e98a24c17a7a858bc9bbbf0f52e68fae73
|
1beb347ae7119e033f4180a82ce6113d37a32a9c
|
/matrix2.py
|
f047cc758f5b6e079baa65707e88c8a4927c72a8
|
[] |
no_license
|
Jamshid93/2DLists
|
b1f6dea889fcf48bad9929cd78c4ccf52d40f192
|
8e1f15307fb18c4d95ba2f14bae4775d15acdf02
|
refs/heads/master
| 2022-01-28T23:26:59.174770 | 2019-07-20T07:58:42 | 2019-07-20T07:58:42 | 197,885,153 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 70 |
py
|
matrix = [
[1,2,3],
[4,5,6],
[7,8,9]
]
print(matrix[1][1])
|
[
"[email protected]"
] | |
846264f0a27ea09c3be94e1151a676b918da4b47
|
d561fab22864cec1301393d38d627726671db0b2
|
/python/helpers/typeshed/stdlib/3/linecache.pyi
|
a77de887c7b3029234ce84f5159ce0aee0a01eb9
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
Vedenin/intellij-community
|
724dcd8b3e7c026936eed895cf964bb80574689a
|
74a89fa7083dedc6455a16e10cf779d191d79633
|
refs/heads/master
| 2021-01-25T00:47:43.514138 | 2017-03-27T15:48:36 | 2017-03-27T15:54:02 | 86,361,176 | 1 | 1 | null | 2017-03-27T16:54:23 | 2017-03-27T16:54:23 | null |
UTF-8
|
Python
| false | false | 200 |
pyi
|
from typing import Any
def getline(filename: str, lineno: int, module_globals: Any=...) -> str: pass
def clearcache() -> None: pass
def getlines(filename: str, module_globals: Any=...) -> None: pass
|
[
"[email protected]"
] | |
dd67cc4be7876eb5b448ca6cb7d1e9b1263d0eb0
|
b4e326e8cf257f3920767a258a0740b9c121d0cd
|
/general-practice/28_08_2019/p1.py
|
7280f4b0c5ff41b04c05fe9b85d3dfd595dfac55
|
[] |
no_license
|
luthraG/ds-algo-war
|
6646e7539dc20d1acd631edb2ed0a83384e4ea7e
|
05a0b1c6abe47eb5a403b0f61e315e6609bfa466
|
refs/heads/master
| 2020-07-03T23:39:21.849008 | 2019-09-18T09:43:57 | 2019-09-18T09:43:57 | 202,087,293 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 708 |
py
|
from timeit import default_timer as timer
if __name__ == '__main__':
test_cases = int(input('Enter test cases :: '))
for t in range(test_cases):
number = int(input('Enter number :: '))
start = timer()
# since we need multiples below
number -= 1
mux3 = number // 3
mux5 = number // 5
mux15 = number // 15
mux3 = 3 * ((mux3 * (mux3 + 1)) // 2)
mux5 = 5 * ((mux5 * (mux5 + 1)) // 2)
mux15 = 15 * ((mux15 * (mux15 + 1)) // 2)
sum = mux3 + mux5 - mux15
print('Sum of multiples of 3 and 5 below {} is {}'.format(number + 1, sum))
end = timer()
print('Time taken is {}'.format(end - start))
|
[
"[email protected]"
] | |
a33b4f14f7a159a87a0c7bae1d44ca4f5cfbdda2
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/verbs/_babble.py
|
5ff3bdd5eb3760e6b698e33c67e8e28434a76e73
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 432 |
py
|
#calss header
class _BABBLE():
def __init__(self,):
self.name = "BABBLE"
self.definitions = [u'to talk or say something in a quick, confused, excited, or silly way: ', u'(of a stream) to make the low, continuous noise of water flowing over stones: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'verbs'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
|
[
"[email protected]"
] | |
1645d1441b6fb91bfd157539cff28ac587c5682f
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_386/ch140_2020_04_01_19_54_27_737379.py
|
9c722141f7b9ff19386a9a12b1ec1d2082a859de
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 323 |
py
|
notas=[1, 2, 4.2, 5.5, 8, 9]
def faixa_notas(notas):
nota_b = []
nota_m = []
nota_a = []
for i in notas:
if i < 5:
nota_a +=1
elif i > 5 and i <= 7:
nota_b +=1
elif i > 7:
nota_a +=1
nota_b.extend(nota_m)
nota_m.extend(nota_a)
|
[
"[email protected]"
] | |
64c83edfd993d2f54b775476fe706d0ed2d40465
|
57f5cdf321d4fc39217b2c3dcd5ce3750330d618
|
/pratica/listas/max_min_for.py
|
ee94f56573628e24ff949b26fdd1383cff11b48b
|
[
"MIT"
] |
permissive
|
chicolucio-python-learning/entrevistas-tecnicas
|
ba9575f680cb663d340840e71654432904f6b5af
|
1efec34e18fe0f726a1d3c390e2c0c8b07405360
|
refs/heads/main
| 2022-12-26T22:53:10.364365 | 2020-10-07T17:42:56 | 2020-10-07T17:42:56 | 302,114,910 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 926 |
py
|
import random
import pytest
def max_min(lst):
"""
Calculate the maximum and minimum of a list lst
Parameters
----------
lst : list
Returns
-------
tuple
(max, min)
"""
if not lst:
raise ValueError('Empty list')
max_value = min_value = lst[0]
for value in lst:
if value > max_value:
max_value = value
if value < min_value:
min_value = value
return max_value, min_value
def test_error():
lst = []
with pytest.raises(ValueError):
max_min(lst)
def test_single_element_list():
lst = [1]
assert max_min(lst) == (1, 1)
def test_ordered_list():
lst = list(range(0, 11))
assert max_min(lst) == (10, 0)
def test_random_list():
lst = list(range(100))
random.seed(42)
random.shuffle(lst)
assert lst[0] == 42
assert lst[-1] == 81
assert max_min(lst) == (99, 0)
|
[
"[email protected]"
] | |
cb13d4be78e90d678affb45cd65d51c3dff6b4e0
|
2ef5b78a1a750ee33d86f36bba176796163e3933
|
/demo23/test9.py
|
23576f92e307bfc134a62e86f24d243d61c4720d
|
[] |
no_license
|
LIZEJU/flask-demo
|
08f8419757dc4902239b89b3df9ea71ce918ad26
|
6ae201e3cc078b7a3fd18fb6d114b0b83f1b4b41
|
refs/heads/master
| 2020-09-27T05:58:09.742198 | 2020-01-30T11:04:41 | 2020-01-30T11:04:41 | 226,445,858 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 544 |
py
|
import matplotlib.pyplot as plt
import numpy as np
x = [np.random.randint(0,20) for i in range(10)]
print(x)
y = [np.random.randint(0,20) for i in range(10)]
# x,y 用来设置封闭区域的定点的有序数对,参数color用来完成封闭区域的填充颜色的设置工作
plt.fill(x,y,color='cornflowerblue')
# 完成多边形相对位置的调整
plt.xlim(-1,16)
plt.ylim(-1,16)
# 设置刻度
plt.xticks(np.arange(0,16,5))
plt.yticks(np.arange(0,16,5))
# 保存生成的图片
plt.savefig('8.png')
# 展示绘制效果
plt.show()
|
[
"[email protected]"
] | |
5d8086c8c19f7b022f0b49451639e6638afc9c7c
|
050fc5ca698dfd7612dee42aa980fc7b5eee40a2
|
/tests/plugin/data/sw_loguru/services/consumer.py
|
45e145e7877e9f8012feeac26a6b6b3138fdd3f0
|
[
"Apache-2.0"
] |
permissive
|
apache/skywalking-python
|
8ac6ce06630c519f9984a45e74c1fcc88cf5b9d6
|
1a360228c63cd246dd4c5dd8e1f09bdd5556ad7d
|
refs/heads/master
| 2023-09-05T02:45:56.225937 | 2023-08-28T22:19:24 | 2023-08-28T22:19:24 | 261,456,329 | 178 | 122 |
Apache-2.0
| 2023-08-28T22:19:26 | 2020-05-05T12:13:49 |
Python
|
UTF-8
|
Python
| false | false | 1,109 |
py
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import requests
if __name__ == '__main__':
from fastapi import FastAPI
import uvicorn
app = FastAPI()
@app.get('/users')
async def application():
res = requests.get('http://provider:9091/users', timeout=5)
return {'http': res.json()}
uvicorn.run(app, host='0.0.0.0', port=9090)
|
[
"[email protected]"
] | |
3a31aaa867eee42a88b2012888b4e89517880fcb
|
1fe0b680ce53bb3bb9078356ea2b25e572d9cfdc
|
/venv/lib/python2.7/site-packages/ansible/modules/network/cloudengine/ce_ip_interface.py
|
ccf7d493060dac208aa09b52941e8a89961d495e
|
[
"MIT"
] |
permissive
|
otus-devops-2019-02/devopscourses_infra
|
1929c4a9eace3fdb0eb118bf216f3385fc0cdb1c
|
e42e5deafce395af869084ede245fc6cff6d0b2c
|
refs/heads/master
| 2020-04-29T02:41:49.985889 | 2019-05-21T06:35:19 | 2019-05-21T06:35:19 | 175,780,457 | 0 | 1 |
MIT
| 2019-05-21T06:35:20 | 2019-03-15T08:35:54 |
HCL
|
UTF-8
|
Python
| false | false | 24,036 |
py
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ce_ip_interface
version_added: "2.4"
short_description: Manages L3 attributes for IPv4 and IPv6 interfaces on HUAWEI CloudEngine switches.
description:
- Manages Layer 3 attributes for IPv4 and IPv6 interfaces on HUAWEI CloudEngine switches.
author: QijunPan (@QijunPan)
notes:
- Interface must already be a L3 port when using this module.
- Logical interfaces (loopback, vlanif) must be created first.
- C(mask) must be inserted in decimal format (i.e. 24) for
both IPv6 and IPv4.
- A single interface can have multiple IPv6 configured.
options:
interface:
description:
- Full name of interface, i.e. 40GE1/0/22, vlanif10.
required: true
addr:
description:
- IPv4 or IPv6 Address.
mask:
description:
- Subnet mask for IPv4 or IPv6 Address in decimal format.
version:
description:
- IP address version.
default: v4
choices: ['v4','v6']
ipv4_type:
description:
- Specifies an address type.
The value is an enumerated type.
main, primary IP address.
sub, secondary IP address.
default: main
choices: ['main','sub']
state:
description:
- Specify desired state of the resource.
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
- name: ip_interface module test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: Ensure ipv4 address is configured on 10GE1/0/22
ce_ip_interface:
interface: 10GE1/0/22
version: v4
state: present
addr: 20.20.20.20
mask: 24
provider: '{{ cli }}'
- name: Ensure ipv4 secondary address is configured on 10GE1/0/22
ce_ip_interface:
interface: 10GE1/0/22
version: v4
state: present
addr: 30.30.30.30
mask: 24
ipv4_type: sub
provider: '{{ cli }}'
- name: Ensure ipv6 is enabled on 10GE1/0/22
ce_ip_interface:
interface: 10GE1/0/22
version: v6
state: present
provider: '{{ cli }}'
- name: Ensure ipv6 address is configured on 10GE1/0/22
ce_ip_interface:
interface: 10GE1/0/22
version: v6
state: present
addr: 2001::db8:800:200c:cccb
mask: 64
provider: '{{ cli }}'
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"addr": "20.20.20.20", "interface": "10GE1/0/22", "mask": "24"}
existing:
description: k/v pairs of existing IP attributes on the interface
returned: always
type: dict
sample: {"ipv4": [{"ifIpAddr": "11.11.11.11", "subnetMask": "255.255.0.0", "addrType": "main"}],
"interface": "10GE1/0/22"}
end_state:
description: k/v pairs of IP attributes after module execution
returned: always
type: dict
sample: {"ipv4": [{"ifIpAddr": "20.20.20.20", "subnetMask": "255.255.255.0", "addrType": "main"}],
"interface": "10GE1/0/22"}
updates:
description: commands sent to the device
returned: always
type: list
sample: ["interface 10GE1/0/22", "ip address 20.20.20.20 24"]
changed:
description: check to see if a change was made on the device
returned: always
type: bool
sample: true
'''
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.cloudengine.ce import get_nc_config, set_nc_config, ce_argument_spec
CE_NC_GET_INTF = """
<filter type="subtree">
<ifm xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<interfaces>
<interface>
<ifName>%s</ifName>
<isL2SwitchPort></isL2SwitchPort>
<ifmAm4>
</ifmAm4>
<ifmAm6>
</ifmAm6>
</interface>
</interfaces>
</ifm>
</filter>
"""
CE_NC_ADD_IPV4 = """
<config>
<ifm xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<interfaces>
<interface>
<ifName>%s</ifName>
<ifmAm4>
<am4CfgAddrs>
<am4CfgAddr operation="merge">
<ifIpAddr>%s</ifIpAddr>
<subnetMask>%s</subnetMask>
<addrType>%s</addrType>
</am4CfgAddr>
</am4CfgAddrs>
</ifmAm4>
</interface>
</interfaces>
</ifm>
</config>
"""
CE_NC_MERGE_IPV4 = """
<config>
<ifm xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<interfaces>
<interface>
<ifName>%s</ifName>
<ifmAm4>
<am4CfgAddrs>
<am4CfgAddr operation="delete">
<ifIpAddr>%s</ifIpAddr>
<subnetMask>%s</subnetMask>
<addrType>main</addrType>
</am4CfgAddr>
<am4CfgAddr operation="merge">
<ifIpAddr>%s</ifIpAddr>
<subnetMask>%s</subnetMask>
<addrType>main</addrType>
</am4CfgAddr>
</am4CfgAddrs>
</ifmAm4>
</interface>
</interfaces>
</ifm>
</config>
"""
CE_NC_DEL_IPV4 = """
<config>
<ifm xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<interfaces>
<interface>
<ifName>%s</ifName>
<ifmAm4>
<am4CfgAddrs>
<am4CfgAddr operation="delete">
<ifIpAddr>%s</ifIpAddr>
<subnetMask>%s</subnetMask>
<addrType>%s</addrType>
</am4CfgAddr>
</am4CfgAddrs>
</ifmAm4>
</interface>
</interfaces>
</ifm>
</config>
"""
CE_NC_ADD_IPV6 = """
<config>
<ifm xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<interfaces>
<interface>
<ifName>%s</ifName>
<ifmAm6>
<am6CfgAddrs>
<am6CfgAddr operation="merge">
<ifIp6Addr>%s</ifIp6Addr>
<addrPrefixLen>%s</addrPrefixLen>
<addrType6>global</addrType6>
</am6CfgAddr>
</am6CfgAddrs>
</ifmAm6>
</interface>
</interfaces>
</ifm>
</config>
"""
CE_NC_DEL_IPV6 = """
<config>
<ifm xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<interfaces>
<interface>
<ifName>%s</ifName>
<ifmAm6>
<am6CfgAddrs>
<am6CfgAddr operation="delete">
<ifIp6Addr>%s</ifIp6Addr>
<addrPrefixLen>%s</addrPrefixLen>
<addrType6>global</addrType6>
</am6CfgAddr>
</am6CfgAddrs>
</ifmAm6>
</interface>
</interfaces>
</ifm>
</config>
"""
CE_NC_MERGE_IPV6_ENABLE = """
<config>
<ifm xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<interfaces>
<interface>
<ifName>%s</ifName>
<ifmAm6 operation="merge">
<enableFlag>%s</enableFlag>
</ifmAm6>
</interface>
</interfaces>
</ifm>
</config>
"""
def get_interface_type(interface):
"""Gets the type of interface, such as 10GE, ETH-TRUNK, VLANIF..."""
if interface is None:
return None
iftype = None
if interface.upper().startswith('GE'):
iftype = 'ge'
elif interface.upper().startswith('10GE'):
iftype = '10ge'
elif interface.upper().startswith('25GE'):
iftype = '25ge'
elif interface.upper().startswith('4X10GE'):
iftype = '4x10ge'
elif interface.upper().startswith('40GE'):
iftype = '40ge'
elif interface.upper().startswith('100GE'):
iftype = '100ge'
elif interface.upper().startswith('VLANIF'):
iftype = 'vlanif'
elif interface.upper().startswith('LOOPBACK'):
iftype = 'loopback'
elif interface.upper().startswith('METH'):
iftype = 'meth'
elif interface.upper().startswith('ETH-TRUNK'):
iftype = 'eth-trunk'
elif interface.upper().startswith('VBDIF'):
iftype = 'vbdif'
elif interface.upper().startswith('NVE'):
iftype = 'nve'
elif interface.upper().startswith('TUNNEL'):
iftype = 'tunnel'
elif interface.upper().startswith('ETHERNET'):
iftype = 'ethernet'
elif interface.upper().startswith('FCOE-PORT'):
iftype = 'fcoe-port'
elif interface.upper().startswith('FABRIC-PORT'):
iftype = 'fabric-port'
elif interface.upper().startswith('STACK-PORT'):
iftype = 'stack-port'
elif interface.upper().startswith('NULL'):
iftype = 'null'
else:
return None
return iftype.lower()
def is_valid_v4addr(addr):
"""check is ipv4 addr is valid"""
if not addr:
return False
if addr.find('.') != -1:
addr_list = addr.split('.')
if len(addr_list) != 4:
return False
for each_num in addr_list:
if not each_num.isdigit():
return False
if int(each_num) > 255:
return False
return True
return False
class IpInterface(object):
"""
Manages L3 attributes for IPv4 and IPv6 interfaces.
"""
def __init__(self, argument_spec):
self.spec = argument_spec
self.module = None
self.__init_module__()
# module input info]
self.interface = self.module.params['interface']
self.addr = self.module.params['addr']
self.mask = self.module.params['mask']
self.version = self.module.params['version']
self.ipv4_type = self.module.params['ipv4_type']
self.state = self.module.params['state']
# state
self.changed = False
self.updates_cmd = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
# interface info
self.intf_info = dict()
self.intf_type = None
def __init_module__(self):
""" init module """
required_if = [("version", "v4", ("addr", "mask"))]
required_together = [("addr", "mask")]
self.module = AnsibleModule(
argument_spec=self.spec,
required_if=required_if,
required_together=required_together,
supports_check_mode=True
)
def netconf_set_config(self, xml_str, xml_name):
""" netconf set config """
rcv_xml = set_nc_config(self.module, xml_str)
if "<ok/>" not in rcv_xml:
self.module.fail_json(msg='Error: %s failed.' % xml_name)
def get_interface_dict(self, ifname):
""" get one interface attributes dict."""
intf_info = dict()
conf_str = CE_NC_GET_INTF % ifname
rcv_xml = get_nc_config(self.module, conf_str)
if "<data/>" in rcv_xml:
return intf_info
# get interface base info
intf = re.findall(
r'.*<ifName>(.*)</ifName>.*\s*'
r'<isL2SwitchPort>(.*)</isL2SwitchPort>.*', rcv_xml)
if intf:
intf_info = dict(ifName=intf[0][0],
isL2SwitchPort=intf[0][1])
# get interface ipv4 address info
ipv4_info = re.findall(
r'.*<ifIpAddr>(.*)</ifIpAddr>.*\s*<subnetMask>(.*)'
r'</subnetMask>.*\s*<addrType>(.*)</addrType>.*', rcv_xml)
intf_info["am4CfgAddr"] = list()
for info in ipv4_info:
intf_info["am4CfgAddr"].append(
dict(ifIpAddr=info[0], subnetMask=info[1], addrType=info[2]))
# get interface ipv6 address info
ipv6_info = re.findall(
r'.*<ifmAm6>.*\s*<enableFlag>(.*)</enableFlag>.*', rcv_xml)
if not ipv6_info:
self.module.fail_json(msg='Error: Fail to get interface %s IPv6 state.' % self.interface)
else:
intf_info["enableFlag"] = ipv6_info[0]
# get interface ipv6 enable info
ipv6_info = re.findall(
r'.*<ifIp6Addr>(.*)</ifIp6Addr>.*\s*<addrPrefixLen>(.*)'
r'</addrPrefixLen>.*\s*<addrType6>(.*)</addrType6>.*', rcv_xml)
intf_info["am6CfgAddr"] = list()
for info in ipv6_info:
intf_info["am6CfgAddr"].append(
dict(ifIp6Addr=info[0], addrPrefixLen=info[1], addrType6=info[2]))
return intf_info
def convert_len_to_mask(self, masklen):
"""convert mask length to ip address mask, i.e. 24 to 255.255.255.0"""
mask_int = ["0"] * 4
length = int(masklen)
if length > 32:
self.module.fail_json(msg='Error: IPv4 ipaddress mask length is invalid.')
if length < 8:
mask_int[0] = str(int((0xFF << (8 - length % 8)) & 0xFF))
if length >= 8:
mask_int[0] = '255'
mask_int[1] = str(int((0xFF << (16 - (length % 16))) & 0xFF))
if length >= 16:
mask_int[1] = '255'
mask_int[2] = str(int((0xFF << (24 - (length % 24))) & 0xFF))
if length >= 24:
mask_int[2] = '255'
mask_int[3] = str(int((0xFF << (32 - (length % 32))) & 0xFF))
if length == 32:
mask_int[3] = '255'
return '.'.join(mask_int)
def is_ipv4_exist(self, addr, maskstr, ipv4_type):
""""Check IPv4 address exist"""
addrs = self.intf_info["am4CfgAddr"]
if not addrs:
return False
for address in addrs:
if address["ifIpAddr"] == addr:
return address["subnetMask"] == maskstr and address["addrType"] == ipv4_type
return False
def get_ipv4_main_addr(self):
"""get IPv4 main address"""
addrs = self.intf_info["am4CfgAddr"]
if not addrs:
return None
for address in addrs:
if address["addrType"] == "main":
return address
return None
def is_ipv6_exist(self, addr, masklen):
"""Check IPv6 address exist"""
addrs = self.intf_info["am6CfgAddr"]
if not addrs:
return False
for address in addrs:
if address["ifIp6Addr"] == addr.upper():
if address["addrPrefixLen"] == masklen and address["addrType6"] == "global":
return True
else:
self.module.fail_json(
msg="Error: Input IPv6 address or mask is invalid.")
return False
def set_ipv4_addr(self, ifname, addr, mask, ipv4_type):
"""Set interface IPv4 address"""
if not addr or not mask or not type:
return
maskstr = self.convert_len_to_mask(mask)
if self.state == "present":
if not self.is_ipv4_exist(addr, maskstr, ipv4_type):
# primary IP address
if ipv4_type == "main":
main_addr = self.get_ipv4_main_addr()
if not main_addr:
# no ipv4 main address in this interface
xml_str = CE_NC_ADD_IPV4 % (ifname, addr, maskstr, ipv4_type)
self.netconf_set_config(xml_str, "ADD_IPV4_ADDR")
else:
# remove old address and set new
xml_str = CE_NC_MERGE_IPV4 % (ifname, main_addr["ifIpAddr"],
main_addr["subnetMask"],
addr, maskstr)
self.netconf_set_config(xml_str, "MERGE_IPV4_ADDR")
# secondary IP address
else:
xml_str = CE_NC_ADD_IPV4 % (ifname, addr, maskstr, ipv4_type)
self.netconf_set_config(xml_str, "ADD_IPV4_ADDR")
self.updates_cmd.append("interface %s" % ifname)
if ipv4_type == "main":
self.updates_cmd.append("ip address %s %s" % (addr, maskstr))
else:
self.updates_cmd.append("ip address %s %s sub" % (addr, maskstr))
self.changed = True
else:
if self.is_ipv4_exist(addr, maskstr, ipv4_type):
xml_str = CE_NC_DEL_IPV4 % (ifname, addr, maskstr, ipv4_type)
self.netconf_set_config(xml_str, "DEL_IPV4_ADDR")
self.updates_cmd.append("interface %s" % ifname)
if ipv4_type == "main":
self.updates_cmd.append("undo ip address %s %s" % (addr, maskstr))
else:
self.updates_cmd.append("undo ip address %s %s sub" % (addr, maskstr))
self.changed = True
def set_ipv6_addr(self, ifname, addr, mask):
"""Set interface IPv6 address"""
if not addr or not mask:
return
if self.state == "present":
self.updates_cmd.append("interface %s" % ifname)
if self.intf_info["enableFlag"] == "false":
xml_str = CE_NC_MERGE_IPV6_ENABLE % (ifname, "true")
self.netconf_set_config(xml_str, "SET_IPV6_ENABLE")
self.updates_cmd.append("ipv6 enable")
self.changed = True
if not self.is_ipv6_exist(addr, mask):
xml_str = CE_NC_ADD_IPV6 % (ifname, addr, mask)
self.netconf_set_config(xml_str, "ADD_IPV6_ADDR")
self.updates_cmd.append("ipv6 address %s %s" % (addr, mask))
self.changed = True
if not self.changed:
self.updates_cmd.pop()
else:
if self.is_ipv6_exist(addr, mask):
xml_str = CE_NC_DEL_IPV6 % (ifname, addr, mask)
self.netconf_set_config(xml_str, "DEL_IPV6_ADDR")
self.updates_cmd.append("interface %s" % ifname)
self.updates_cmd.append(
"undo ipv6 address %s %s" % (addr, mask))
self.changed = True
def set_ipv6_enable(self, ifname):
"""Set interface IPv6 enable"""
if self.state == "present":
if self.intf_info["enableFlag"] == "false":
xml_str = CE_NC_MERGE_IPV6_ENABLE % (ifname, "true")
self.netconf_set_config(xml_str, "SET_IPV6_ENABLE")
self.updates_cmd.append("interface %s" % ifname)
self.updates_cmd.append("ipv6 enable")
self.changed = True
else:
if self.intf_info["enableFlag"] == "true":
xml_str = CE_NC_MERGE_IPV6_ENABLE % (ifname, "false")
self.netconf_set_config(xml_str, "SET_IPV6_DISABLE")
self.updates_cmd.append("interface %s" % ifname)
self.updates_cmd.append("undo ipv6 enable")
self.changed = True
def check_params(self):
"""Check all input params"""
# check interface type
if self.interface:
self.intf_type = get_interface_type(self.interface)
if not self.intf_type:
self.module.fail_json(
msg='Error: Interface name of %s '
'is error.' % self.interface)
# ipv4 addr and mask check
if self.version == "v4":
if not is_valid_v4addr(self.addr):
self.module.fail_json(
msg='Error: The %s is not a valid address.' % self.addr)
if not self.mask.isdigit():
self.module.fail_json(msg='Error: mask is invalid.')
if int(self.mask) > 32 or int(self.mask) < 1:
self.module.fail_json(
msg='Error: mask must be an integer between 1 and 32.')
# ipv6 mask check
if self.version == "v6":
if self.addr:
if not self.mask.isdigit():
self.module.fail_json(msg='Error: mask is invalid.')
if int(self.mask) > 128 or int(self.mask) < 1:
self.module.fail_json(
msg='Error: mask must be an integer between 1 and 128.')
# interface and layer3 check
self.intf_info = self.get_interface_dict(self.interface)
if not self.intf_info:
self.module.fail_json(msg='Error: interface %s does not exist.' % self.interface)
if self.intf_info["isL2SwitchPort"] == "true":
self.module.fail_json(msg='Error: interface %s is layer2.' % self.interface)
def get_proposed(self):
"""get proposed info"""
self.proposed["state"] = self.state
self.proposed["addr"] = self.addr
self.proposed["mask"] = self.mask
self.proposed["ipv4_type"] = self.ipv4_type
self.proposed["version"] = self.version
self.proposed["interface"] = self.interface
def get_existing(self):
"""get existing info"""
self.existing["interface"] = self.interface
self.existing["ipv4addr"] = self.intf_info["am4CfgAddr"]
self.existing["ipv6addr"] = self.intf_info["am6CfgAddr"]
self.existing["ipv6enalbe"] = self.intf_info["enableFlag"]
def get_end_state(self):
"""get end state info"""
intf_info = self.get_interface_dict(self.interface)
self.end_state["interface"] = self.interface
self.end_state["ipv4addr"] = intf_info["am4CfgAddr"]
self.end_state["ipv6addr"] = intf_info["am6CfgAddr"]
self.end_state["ipv6enalbe"] = intf_info["enableFlag"]
def work(self):
"""worker"""
self.check_params()
self.get_existing()
self.get_proposed()
# deal present or absent
if self.version == "v4":
self.set_ipv4_addr(self.interface, self.addr, self.mask, self.ipv4_type)
else:
if not self.addr and not self.mask:
self.set_ipv6_enable(self.interface)
else:
self.set_ipv6_addr(self.interface, self.addr, self.mask)
self.get_end_state()
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
if self.changed:
self.results['updates'] = self.updates_cmd
else:
self.results['updates'] = list()
self.module.exit_json(**self.results)
def main():
"""Module main"""
argument_spec = dict(
interface=dict(required=True),
addr=dict(required=False),
version=dict(required=False, choices=['v4', 'v6'],
default='v4'),
mask=dict(type='str', required=False),
ipv4_type=dict(required=False, choices=['main', 'sub'], default='main'),
state=dict(required=False, default='present',
choices=['present', 'absent'])
)
argument_spec.update(ce_argument_spec)
module = IpInterface(argument_spec)
module.work()
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
ab1acbf9736b4d8dcf92e279bca3053be513b34f
|
f795a505b4c92e7e12b2b905dcfe0a889c9c99a6
|
/BIOMD0000000497/model.py
|
43f603dce6f353e607e36970b7b15756e45ccb32
|
[
"CC0-1.0",
"LicenseRef-scancode-public-domain"
] |
permissive
|
biomodels/BIOMD0000000497
|
a953ad543534bc2cc6f5197caeb96d17c5fb6931
|
ceb94fac6887cfbdc2ba5e33cd31909b102a64cc
|
refs/heads/master
| 2016-09-06T11:59:06.453896 | 2014-10-16T05:19:41 | 2014-10-16T05:19:41 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 427 |
py
|
import os
path = os.path.dirname(os.path.realpath(__file__))
sbmlFilePath = os.path.join(path, 'BIOMD0000000497.xml')
with open(sbmlFilePath,'r') as f:
sbmlString = f.read()
def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
if module_exists('libsbml'):
import libsbml
sbml = libsbml.readSBMLFromString(sbmlString)
|
[
"[email protected]"
] | |
c86a379eb2e6c400bfa33f54e0069383be891482
|
854245317cb8031ea9b65347a7f3b78b4ea251b4
|
/amazon_scraper/scraper_app/__init__.py
|
cc9ac8b8bb2b4bd4b4dcc027af07e35d6411c589
|
[] |
no_license
|
tahirs95/Web-Scraping-Portfolio
|
d9bfe8fcc5effec96f5fe123aac6a6f8941c500f
|
973b9ba7f0d930f83cbce5ae64dea1a86b882fda
|
refs/heads/master
| 2023-02-27T15:00:40.164995 | 2021-09-22T15:53:48 | 2021-09-22T15:53:48 | 213,682,348 | 3 | 1 | null | 2023-02-16T04:17:55 | 2019-10-08T15:29:31 |
SCSS
|
UTF-8
|
Python
| false | false | 548 |
py
|
from flask import Flask, render_template, request
from .functions import process_data, scrape
app = Flask(__name__)
@app.route('/', methods=['GET','POST'])
def index():
data = {'None': 'None'}
data_available = False
if request.form:
asin = request.form['search']
if asin:
url = "https://amazon.com/dp/{}".format(asin)
data = scrape(url)
data = process_data(data)
data_available = True
return render_template('index.html', data_available=data_available, data=data)
|
[
"[email protected]"
] | |
51836f374510131b73000b64d570b39a3bc1aeeb
|
d233b312cd3f0c83a06c1cce830252a3664023ff
|
/catkin_ws/build/universal_robot/ur3_moveit_config/catkin_generated/pkg.installspace.context.pc.py
|
9ebc475624671b0e651c4ea0c32ffe85e6e6776c
|
[] |
no_license
|
leopauly/Observation-learning-Real-world-UR5
|
32691f989ed60d92eca82bea0f47b960a6d48afa
|
72e61d07582d05d40a1bde31c99ab5d9cf97f70d
|
refs/heads/main
| 2023-07-05T07:05:00.554268 | 2021-08-12T11:51:58 | 2021-08-12T11:51:58 | 395,289,055 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 381 |
py
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "ur3_moveit_config"
PROJECT_SPACE_DIR = "/home/wisdom/catkin_ws/install"
PROJECT_VERSION = "1.2.5"
|
[
"[email protected]"
] | |
4adbab1657f6fe87b55dcd77902ca289972684a1
|
a893537a71aa285071a68035c968ba6f5c0ca57d
|
/ch02/18/18.py
|
054f16fcb33111a2f4f27ccd17242175023354dd
|
[] |
no_license
|
sinchir0/2020_NLP100
|
0a1810b0c299c29fa1a811f68fa87be74f9b3cf9
|
772123da5b5ac4094c26fdce2e192637dc55190a
|
refs/heads/main
| 2023-07-18T04:03:04.123302 | 2021-09-08T22:54:44 | 2021-09-08T22:54:44 | 257,416,868 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,021 |
py
|
# 18. 各行を3コラム目の数値の降順にソート
# 各行を3コラム目の数値の逆順で整列せよ(注意: 各行の内容は変更せずに並び替えよ).確認にはsortコマンドを用いよ(この問題はコマンドで実行した時の結果と合わなくてもよい).
def file_line_reader_generator(file_path):
"""ファイルの行を返すジェネレータ"""
with open(file_path, encoding="utf-8") as in_file:
for line in in_file:
yield line
if __name__ == "__main__":
# generatorで読み込む
popular_names = file_line_reader_generator("../10/popular-names.txt")
popular_names = sorted(popular_names, key=lambda x: x.split("\t")[2], reverse=True)
for name in popular_names:
print(name)
# UNIXコマンド
# sort -n -k 3 -r ../10/popular-names.txt
# -n: 文字列を数値と見なして並べ替える
# -k 指定: 場所と並び替え種別を指定する
# -r: 逆順に並び替える
|
[
"[email protected]"
] | |
b3bf86f703dabc845c1b7f0e94b45a8abf13cf86
|
61e32bb5ebf868965d3af68b5717672f7b4c51c2
|
/denominations.py
|
53ed3f12c39a6488ee1fa1c2e7fadf3e2ffa6ff8
|
[] |
no_license
|
billaanil3/PRACTICE
|
e4f885473cc10a17ab422da2f6d29aea576c7b94
|
9e0d6512bd8cbfc0fa4e0d1472ac23d1482bacf9
|
refs/heads/master
| 2020-11-26T18:25:30.318395 | 2019-12-20T02:22:45 | 2019-12-24T11:00:14 | 229,172,095 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 316 |
py
|
from collections import OrderedDict
notes = [2000, 500, 200, 100, 50, 20, 10, 5, 2, 1]
amount = int(input("Enter a number:"))
res_notes = OrderedDict()
for note in notes:
if amount % note:
if amount // note > 0:
res_notes[note] = amount // note
amount = amount % note
print res_notes
|
[
"[email protected]"
] | |
9d56ea4a1e627c0491afb179e8aa9e1ff000be66
|
c1dd2020da8c3e117aec41619aa56b22cc62b5be
|
/django1/src/blog/views.py
|
2718fc0cb6201b2e0106fd0e77d82a4aa6d846f8
|
[] |
no_license
|
rlaqhdwns/django1
|
039dadc5212704222f9bcab389c4eb9f27ede412
|
74180aa0ce2d9e5217ac8df7855482bd86d07ff1
|
refs/heads/master
| 2020-04-21T23:11:17.801454 | 2019-02-10T03:20:47 | 2019-02-10T03:20:47 | 169,938,177 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,153 |
py
|
from django.shortcuts import render
#제네릭뷰 : 장고에서 제공하는 여러가지 뷰 기능을 구현한 클래스
#ListView : 특정 모델클래스의 객체 목록을 다루는 기능이 구현된 뷰
#DetailView : 특정 모델클래스의 객체 1개를 다루는 기능이 구현
#FormView : 특정 폼클래스를 다루는 기능이 구현
from django.views.generic.detail import DetailView
from django.views.generic.edit import FormView
from django.views.generic.list import ListView
from pyexpat import model
from blog.models import Post, PostFile, PostImage
from blog.forms import PostForm
from django.http.response import HttpResponseRedirect
from django.urls.base import reverse
from django.contrib.auth.mixins import LoginRequiredMixin
#index : 글목록이 뜨는 메인페이지
class Index(ListView):
#해당 뷰가 사용할 html 파일의 경로
template_name='blog/index.html'
#리스트로 뽑을 모델클래스
model = Post
#템플릿에게 객체리스트를 넘겨줄 때 사용할 키값
context_object_name='list'
#한페이지에 최대 몇개의 객체가 보여질지 설정
paginate_by= 5
#detail : 글 상세보기 페이지
class Detail(DetailView):
template_name = 'blog/detail.html'
model = Post
context_object_name = 'obj'
#posting : 글쓰기 페이지
class Posting(LoginRequiredMixin, FormView):
template_name = 'blog/posting.html'
#연동할 폼클래스 저장
form_class = PostForm
context_object_name = 'f'
#is_valid()함수가 True를 반환한 뒤의 처리를 form_valid()함수를 오버라이딩해서 작성
def form_valid(self, form):
#매개변수 form : is_valid()함수를 통과한 PostForm객체
#PostForm객체를 바탕으로 Post객체 저장
#글쓴이(author) 변수가 비어있으므로, 데이터베이스에 저장하지않음
p = form.save(commit=False)#p : Post 객체
#request.user : 요청한 클라이언트의 로그인정보(User 모델클래스 객체)
p.author = self.request.user
p.save() #Post 객체가 데이터베이스에 저장됨
#클라이언트가 보낸 첨부파일, 이미지파일을 바탕으로 PostFile,
#PostImage객체 생성 및 저장
#requeset.FILES : 클라이언트가 서버로 보낸 파일정보를 관리하는 변수
#PostFile객체를 생성
for f in self.request.FILES.getlist('files'):
# f : 파일 정보
pf = PostFile() #새로운 PostFile 모델클래스의 객체 생성
pf.file = f
pf.post = p
pf.save()#데이터베이스에 새로운 PostFile 객체가 저장됨
#PostImage 객체를 생성
for i in self.request.FILES.getlist('images'):
#i : 이미지 정보
pi = PostImage()
pi.post = p
pi.image = i
pi.save()
#완성된 글페이지로 URL이동
return HttpResponseRedirect (reverse('blog:detail', args=(p.id,)))
|
[
"user@DESKTOP-37GULAI"
] |
user@DESKTOP-37GULAI
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.