blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
01987f0ad902f03e4f8143a9902372ec5a23fdac | 100802fd56febbe28e11d45802e0ad661a9b98c4 | /Community/migrations/0025_auto_20170917_1324.py | b00847fda82521674abe2ec4ce8c57358fab7431 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | ByrdOfAFeather/AlphaTrion | efc4cfcfa246adde5a0ce84eb5e295c0c61722f5 | 90b00b8f4e4c7fe3c495a5ded14b47c3210119ea | refs/heads/master | 2021-01-22T07:35:43.403968 | 2017-12-03T15:50:41 | 2017-12-03T15:50:41 | 102,306,857 | 0 | 2 | null | 2017-11-12T18:32:35 | 2017-09-04T01:39:45 | Python | UTF-8 | Python | false | false | 502 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-17 17:24
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('Community', '0024_merge_20170906_2028'),
]
operations = [
migrations.AlterModelOptions(
name='communitypacingratings',
options={'verbose_name': 'Community Pacing Ratings', 'verbose_name_plural': 'Community Pacing Ratings'},
),
]
| [
"[email protected]"
] | |
9cbd73869738bbc8369c6bfc681a592db0c22d7f | 013ce5b341600adafb4bcbb2fc8fc3401d546fae | /rqalpha/mod/rqalpha_mod_sys_inplustrader/inplus_data_source.py | 9e041586cdf7f43117ff15647063ededbe08a56f | [
"MIT"
] | permissive | tianhm/InplusTrader_Linux | 75db2a653e977ea366fc50b731b4dc70b374062d | 5f7eb17004da0b76ceafb93cb314de7a6009cd04 | refs/heads/master | 2020-04-13T19:57:50.590512 | 2017-08-05T11:39:25 | 2017-08-05T11:39:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,738 | py | # -*- coding: utf-8 -*-
import six
import os, sys
import pandas as pd
import numpy as np
import datetime
import pymongo
try:
# For Python 2 兼容
from functools import lru_cache
except Exception as e:
from fastcache import lru_cache
from rqalpha.environment import Environment
#from rqalpha.interface import AbstractDataSource
from rqalpha.data.base_data_source import BaseDataSource
from rqalpha.data.future_info_cn import CN_FUTURE_INFO
from rqalpha.utils.datetime_func import convert_date_to_int, convert_int_to_date
from rqalpha.model.instrument import Instrument
from rqalpha.data import risk_free_helper
from rqalpha.data.risk_free_helper import YIELD_CURVE_TENORS
from data_types import getType
class DataSource(BaseDataSource):
def __init__(self, path):
"""
数据源接口。 通过 :class:`DataProxy` 进一步进行了封装,向上层提供更易用的接口。
在扩展模块中,可以通过调用 ``env.set_data_source`` 来替换默认的数据源。可参考 :class:`BaseDataSource`。
"""
self._env = Environment.get_instance()
self.conn = pymongo.MongoClient(path, 27017)
# Day DB
self.stocks_days_db = self.conn.InplusTrader_Stocks_Day_Db
self.indexes_days_db = self.conn.InplusTrader_Indexes_Day_Db
self.futures_days_db = self.conn.InplusTrader_Futures_Day_Db
self.funds_days_db = self.conn.InplusTrader_Funds_Day_Db
# Minute DB
self.stocks_mins_db = self.conn.InplusTrader_Stocks_Min_Db
self.futures_mins_db = self.conn.InplusTrader_Futures_Min_Db
# Tick DB
self.stocks_tick_db = self.conn.InplusTrader_Stocks_Tick_Db
self.futures_tick_db = self.conn.InplusTrader_Futures_Tick_Db
self.instruments_db = self.conn.InplusTrader_Instruments_Db
self.adjusted_dividends_db = self.conn.InplusTrader_Adjusted_Dividents_Db
self.original_dividends_db = self.conn.InplusTrader_Original_Dividents_Db
self.trading_dates_db = self.conn.InplusTrader_Trading_Dates_Db
self.yield_curve_db = self.conn.InplusTrader_Yield_Curve_Db
self.st_stock_days_db = self.conn.InplusTrader_St_Stocks_Day_Db
self.suspend_days_db = self.conn.InplusTrader_Suspend_Day_Db
self._day_bars = [self.stocks_days_db, self.indexes_days_db, self.futures_days_db, self.funds_days_db]
self._min_bars = [self.stocks_mins_db, self.futures_mins_db]
self._tick_bars = [self.stocks_tick_db, self.futures_tick_db]
self._instruments = self.instruments_db
self._adjusted_dividends = self.adjusted_dividends_db
self._original_dividends = self.original_dividends_db
self._trading_dates = self.trading_dates_db
self._yield_curve = self.yield_curve_db
self._st_stock_days = self.st_stock_days_db
self._suspend_days = self.suspend_days_db
def get_dividend(self, order_book_id, adjusted=True):
"""
获取股票/基金分红信息
:param str order_book_id: 合约名
:param bool adjusted: 是否经过前复权处理
:return:
"""
def fetchData(adjusted):
if adjusted:
mongo_data = self._adjusted_dividends[order_book_id].find({}, {"_id":0})
else:
mongo_data = self._original_dividends[order_book_id].find({}, {"_id":0})
return mongo_data
result = pd.DataFrame({
'book_closure_date': pd.Index(pd.Timestamp(d['book_closure_date']) for d in fetchData(adjusted)),
'ex_dividend_date': pd.Index(pd.Timestamp(d['ex_dividend_date']) for d in fetchData(adjusted)),
'payable_date': pd.Index(pd.Timestamp(d['payable_date']) for d in fetchData(adjusted)),
'dividend_cash_before_tax': [d['dividend_cash_before_tax'] for d in fetchData(adjusted)],
'round_lot': [d['round_lot'] for d in fetchData(adjusted)]
}, index = pd.Index(pd.Timestamp(d['announcement_date']) for d in fetchData(adjusted)))
return result
def get_trading_minutes_for(self, order_book_id, trading_dt):
"""
获取证券某天的交易时段,用于期货回测
:param instrument: 合约对象
:type instrument: :class:`~Instrument`
:param datetime.datetime trading_dt: 交易日。注意期货夜盘所属交易日规则。
:return: list[`datetime.datetime`]
"""
raise NotImplementedError
def get_trading_calendar(self):
"""
获取交易日历
:return: list[`pandas.Timestamp`]
"""
mongo_data = self._trading_dates["tradingDates"].find({}, {"_id":0})
result = pd.Index(pd.Timestamp(str(d["trading date"])) for d in mongo_data)
return result
def get_all_instruments(self):
"""
获取所有Instrument。
:return: list[:class:`~Instrument`]
"""
mongo_data = self._instruments["instruments"].find({}, {"_id":0})
return [Instrument(i) for i in mongo_data]
def is_suspended(self, order_book_id, dt):
if isinstance(dt, (int, np.int64, np.uint64)):
if dt > 100000000:
dt //= 1000000
else:
dt = dt.year*10000 + dt.month*100 + dt.day
result =set(np.uint32(d["date"]) for d in self._suspend_days[order_book_id].find({}, {"_id":0}))
return dt in result
def is_st_stock(self, order_book_id, dt):
if isinstance(dt, (int, np.int64, np.uint64)):
if dt > 100000000:
dt //= 1000000
else:
dt = dt.year*10000 + dt.month*100 + dt.day
result = set(np.uint32(d["date"]) for d in self._st_stock_days[order_book_id].find({}, {"_id":0}))
return dt in result
INSTRUMENT_TYPE_MAP = {
'CS': 0, # 股票
'INDX': 1, #指数
'Future': 2, #期货
'ETF': 3, #ETF
'LOF': 3, #LOF
'FenjiA': 3, #分级A基金
'FenjiB': 3, #分级B基金
'FenjiMu': 3, #分级母基金
}
def _index_of(self, instrument):
return self.INSTRUMENT_TYPE_MAP[instrument.type]
@lru_cache(None)
def _all_day_bars_of(self, instrument):
i = self._index_of(instrument)
mongo_data = self._day_bars[i][instrument.order_book_id].find({}, {"_id": 0})
fields = mongo_data[0].keys()
fields.remove('date')
result = []
dtype = np.dtype(getType(i))
result = np.empty(shape=(mongo_data.count(),), dtype=dtype)
for f in fields:
bar_attr = []
mongo_data = self._day_bars[i][instrument.order_book_id].find({}, {"_id": 0})
for bar in mongo_data:
bar_attr.append(bar[f])
result[f] = np.array(bar_attr)
bar_attr = []
mongo_data = self._day_bars[i][instrument.order_book_id].find({}, {"_id": 0})
for bar in mongo_data:
bar_attr.append(np.array(bar['date']).astype(np.uint64) * 1000000)
result['datetime'] = np.array(bar_attr)
return result
@lru_cache(None)
def _filtered_day_bars(self, instrument):
bars = self._all_day_bars_of(instrument)
if bars is None:
return None
return bars[bars['volume'] > 0]
def get_bar(self, instrument, dt, frequency):
"""
根据 dt 来获取对应的 Bar 数据
:param instrument: 合约对象
:type instrument: :class:`~Instrument`
:param datetime.datetime dt: calendar_datetime
:param str frequency: 周期频率,`1d` 表示日周期, `1m` 表示分钟周期
:return: `numpy.ndarray` | `dict`
"""
if frequency != '1d':
raise NotImplementedError
bars = self._all_day_bars_of(instrument)
if bars is None:
return
dt = convert_date_to_int(dt)
pos = bars['datetime'].searchsorted(dt)
if pos >= len(bars) or bars['datetime'][pos] != dt:
return None
return bars[pos]
def get_settle_price(self, instrument, date):
"""
获取期货品种在 date 的结算价
:param instrument: 合约对象
:type instrument: :class:`~Instrument`
:param datetime.date date: 结算日期
:return: `str`
"""
bar = self.get_bar(instrument, date, '1d')
if bar is None:
return np.nan
return bar['settlement']
@staticmethod
def _are_fields_valid(fields, valid_fields):
if fields is None:
return True
if isinstance(fields, six.string_types):
return fields in valid_fields
for field in fields:
if field not in valid_fields:
return False
return True
def get_yield_curve(self, start_date, end_date, tenor=None):
"""
获取国债利率
:param pandas.Timestamp str start_date: 开始日期
:param pandas.Timestamp end_date: 结束日期
:param str tenor: 利率期限
:return: pandas.DataFrame, [start_date, end_date]
"""
mongo_dates = self._yield_curve['dates'].find({}, {"_id":0}).sort('date', pymongo.ASCENDING)
_dates = np.array([np.uint32(d['date']) for d in mongo_dates])
d1 = start_date.year * 10000 + start_date.month * 100 + start_date.day
d2 = end_date.year * 10000 + end_date.month * 100 + end_date.day
s = _dates.searchsorted(d1)
e = _dates.searchsorted(d2, side='right')
if e == len(_dates):
e -= 1
if _dates[e] == d2:
# 包含 end_date
e += 1
if e < s:
return None
df = pd.DataFrame()
for d in YIELD_CURVE_TENORS.values():
mongo_data = self._yield_curve[d].find({}, {"_id":0}).sort('date', pymongo.ASCENDING)
df[d] = [k['data'] for k in mongo_data]
mongo_data = self._yield_curve['dates'].find({}, {"_id":0}).sort('date', pymongo.ASCENDING)
df.index = pd.Index(pd.Timestamp(str(d['date'])) for d in mongo_data)
df.rename(columns=lambda n: n[1:] + n[0], inplace=True)
if tenor is not None:
return df[tenor]
return df
def get_risk_free_rate(self, start_date, end_date):
mongo_dates = self._yield_curve['dates'].find({}, {"_id":0}).sort('date', pymongo.ASCENDING)
_dates = np.array([np.uint32(d['date']) for d in mongo_dates])
tenor = risk_free_helper.get_tenor_for(start_date, end_date)
tenor = tenor[-1] + tenor[:-1]
mongo_data = self._yield_curve[tenor].find({}, {"_id":0})
_table = np.array([d['data'] for d in mongo_data])
d = start_date.year * 10000 + start_date.month * 100 + start_date.day
pos = _dates.searchsorted(d)
if pos > 0 and (pos == len(_dates) or _dates[pos] != d):
pos -= 1
while pos >= 0 and np.isnan(_table[pos]):
# data is missing ...
pos -= 1
return _table[pos]
def current_snapshot(self, instrument, frequency, dt):
"""
获得当前市场快照数据。只能在日内交易阶段调用,获取当日调用时点的市场快照数据。
市场快照数据记录了每日从开盘到当前的数据信息,可以理解为一个动态的day bar数据。
在目前分钟回测中,快照数据为当日所有分钟线累积而成,一般情况下,最后一个分钟线获取到的快照数据应当与当日的日线行情保持一致。
需要注意,在实盘模拟中,该函数返回的是调用当时的市场快照情况,所以在同一个handle_bar中不同时点调用可能返回的数据不同。
如果当日截止到调用时候对应股票没有任何成交,那么snapshot中的close, high, low, last几个价格水平都将以0表示。
:param instrument: 合约对象
:type instrument: :class:`~Instrument`
:param str frequency: 周期频率,`1d` 表示日周期, `1m` 表示分钟周期
:param datetime.datetime dt: 时间
:return: :class:`~Snapshot`
"""
raise NotImplementedError
def get_split(self, order_book_id):
"""
获取拆股信息
:param str order_book_id: 合约名
:return: `pandas.DataFrame`
"""
return None
def available_data_range(self, frequency):
"""
此数据源能提供数据的时间范围
:param str frequency: 周期频率,`1d` 表示日周期, `1m` 表示分钟周期
:return: (earliest, latest)
"""
if frequency == '1d':
mongo_data = self._day_bars[self.INSTRUMENT_TYPE_MAP['INDX']]['000001.XSHG'].find({}, {"_id":0}).sort('date', pymongo.ASCENDING)
mongo_data = list(mongo_data)
s, e = np.uint32(mongo_data[0]['date']), np.uint32(mongo_data[-1]['date'])
return convert_int_to_date(s).date(), convert_int_to_date(e).date()
if frequency == '1m':
raise NotImplementedError
def history_bars(self, instrument, bar_count, frequency, fields, dt, skip_suspended=True, include_now=False):
"""
获取历史数据
:param instrument: 合约对象
:type instrument: :class:`~Instrument`
:param int bar_count: 获取的历史数据数量
:param str frequency: 周期频率,`1d` 表示日周期, `1m` 表示分钟周期
:param str fields: 返回数据字段
========================= ===================================================
fields 字段名
========================= ===================================================
datetime 时间戳
open 开盘价
high 最高价
low 最低价
close 收盘价
volume 成交量
total_turnover 成交额
datetime int类型时间戳
open_interest 持仓量(期货专用)
basis_spread 期现差(股指期货专用)
settlement 结算价(期货日线专用)
prev_settlement 结算价(期货日线专用)
========================= ===================================================
:param datetime.datetime dt: 时间
:param bool skip_suspended: 是否跳过停牌日
:return: `numpy.ndarray`
"""
if frequency != '1d':
raise NotImplementedError
if skip_suspended and instrument.type == 'CS':
bars = self._filtered_day_bars(instrument)
else:
bars = self._all_day_bars_of(instrument)
if bars is None or not self._are_fields_valid(fields, bars.dtype.names):
return None
dt = convert_date_to_int(dt)
i = bars['datetime'].searchsorted(dt, side='right')
left = i - bar_count if i >= bar_count else 0
if fields is None:
return bars[left:i]
else:
return bars[left:i][fields]
def get_future_info(self, instrument, hedge_type):
"""
获取期货合约手续费、保证金等数据
:param str order_book_id: 合约名
:param HEDGE_TYPE hedge_type: 枚举类型,账户对冲类型
:return: dict
"""
return CN_FUTURE_INFO[instrument.underlying_symbol][hedge_type.value]
def get_ticks(self, order_book_id, date):
raise NotImplementedError
| [
"[email protected]"
] | |
8f4684429d545ecf7164585be6213989c7b40f50 | 15581a76b36eab6062e71d4e5641cdfaf768b697 | /Topics/Bit Manipulation/Hamming Distance.py | 0f20d3169bb1b5a3f103505b9745db2a91f036ec | [] | no_license | MarianDanaila/Competitive-Programming | dd61298cc02ca3556ebc3394e8d635b57f58b4d2 | 3c5a662e931a5aa1934fba74b249bce65a5d75e2 | refs/heads/master | 2023-05-25T20:03:18.468713 | 2023-05-16T21:45:08 | 2023-05-16T21:45:08 | 254,296,597 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,147 | py | # Approach 1: Just check every bit in both numbers and increment when they are different
class Solution:
def hammingDistance(self, x: int, y: int) -> int:
hamming_distance = 0
while x != 0 or y != 0:
if x % 2 != y % 2:
hamming_distance += 1
x = x >> 1
y = y >> 1
return hamming_distance
# Approach 2: Just make XOR of x and y and after that count the number of '1' bits.
# because XOR of two different bits is always 1
class Solution:
def hammingDistance(self, x: int, y: int) -> int:
hamming_distance = 0
new = x ^ y
while new > 0:
if new % 2 == 1:
hamming_distance += 1
new = new >> 1
return hamming_distance
# Approach 3: Again make XOR of x and y but when we count the number of '1' bits
# we make the trick n&(n-1) which removes last '1' bit
class Solution:
def hammingDistance(self, x: int, y: int) -> int:
hamming_distance = 0
new = x ^ y
while new > 0:
new = new & (new-1)
hamming_distance += 1
return hamming_distance
| [
"[email protected]"
] | |
81043998a660fac47d3cc846d5dcfef5ca7aba85 | 1ef56dcfef70ee14df8956eedd171f74406594af | /cms/dal/content_dal.py | 6377504eafc1dd0140adba7c9c999684394a5e50 | [] | no_license | journeyends/webtest | 6c54ff19e01cd0cd99a34bcae55dd5701abf132f | 2a24c6d7c52aa627edfbba3dd5eb9ccc16abe9fb | refs/heads/master | 2020-04-17T07:38:52.873722 | 2019-03-21T09:36:50 | 2019-03-21T09:36:50 | 166,378,280 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 665 | py | from cms.entity.content_entity import ContentModel
class ContentDal:
def getListByCondition(self, channelId, categoryId):
search_dict = dict()
search_dict['is_on'] = 1
if channelId is not None and int(channelId) > 0:
search_dict['channel_id'] = int(channelId)
if categoryId is not None and int(categoryId) > 0:
search_dict['category_id'] = int(categoryId)
obj = ContentModel.objects.filter(**search_dict) \
.values('id', 'title', 'channel_id', 'category_id')
return obj
def getById(self, id):
obj = ContentModel.objects.filter(id=id).first()
return obj
| [
"[email protected]"
] | |
995de2292dda0406dc843356accff4f284d58da4 | 5ade44090b99ba19edd5cc0b07e4ebf1f8cc416e | /introduction.to.programming.with.turtle/5-1-1.45Tree.py | f06665fd7736a2cfefeee5d89c8d7619611ac0e3 | [
"MIT"
] | permissive | Mito445/programming_with_python | af2f201fc3f13cab88fdaa708ecda6da05ad1b46 | 1114eaa7432ea8581d880bcebd1813a9fb873cce | refs/heads/master | 2020-05-27T13:00:53.085344 | 2019-05-21T07:59:18 | 2019-05-21T07:59:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 311 | py | from easygraphics.turtle import *
def branch(len, level):
if level == 0:
return
fd(len)
lt(45)
branch(len / 2, level - 1)
rt(90)
branch(len / 2, level - 1)
lt(45)
bk(len)
create_world(800, 600)
set_speed(100)
pu()
bk(200)
pd()
branch(200, 5)
pause()
close_world()
| [
"[email protected]"
] | |
f063629eadd4d9ceba806ccfa8b53d59a11e2640 | 1a639d185f9c883b7bebf33c577c58b22ac93c7e | /tkinter_learn/54_bind_multiple.py | bfd735893e6af94287c8c1d035feacf70951cf2d | [] | no_license | gofr1/python-learning | bd09da5b5850b1533a88b858690ed4380b55d33e | 19343c985f368770dc01ce415506506d62a23285 | refs/heads/master | 2023-09-02T15:42:27.442735 | 2021-11-12T10:17:13 | 2021-11-12T10:17:13 | 237,828,887 | 0 | 0 | null | 2021-11-12T10:17:14 | 2020-02-02T20:03:42 | Python | UTF-8 | Python | false | false | 696 | py | from tkinter import Tk, mainloop
from tkinter import ttk
root = Tk()
label0 = ttk.Label(root, text='Label 0')
label1 = ttk.Label(root, text='Label 1')
label0.pack()
label1.pack()
label0.bind('<ButtonPress>', lambda e: print('<ButtonPress> Label'))
label0.bind('<1>', lambda e: print('<1> Label'))
root.bind('<1>', lambda e: print('<1> Root'))
# in that case when left button click is binded both on label and root window
# now when you click on label0 both events will occur:
# <1> Label
# <1> Root
# so lets unbind left-click event from label
label0.unbind('<1>')
# now we will have
# <ButtonPress> Label
# <1> Root
root.bind_all('<Escape>', lambda e: print('Escape!!!'))
root.mainloop() | [
"[email protected]"
] | |
142c6a78e2239ee290ca640c02952a948775537f | 1fc9a12c86be4e440f4f0d95c8b871c79df07545 | /ML Libariries/python_numpy/numpy_zeros.py | b2db37298bb56aad4e9478392ff9204d6e28f939 | [] | no_license | Rohit-Gupta-Web3/Articles | a56e7f1b36d6d5efd846eec2e1a4036716ac16eb | 0f584916e065059d4dd1e95e7de874a7830fdff4 | refs/heads/master | 2023-05-14T02:50:17.817951 | 2020-07-26T06:44:10 | 2020-07-26T06:44:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 49 | py | import numpy as np
np.zeros((3,3))
print(a)
| [
"[email protected]"
] | |
0fa4c393ffabac04abda009c1148efba8cede935 | 2ca120efab4f730df882e1f7e2d71a2dee61aa25 | /analytics/signals.py | a38f739d1fd97347dfdbe714ad434b309b46799b | [] | no_license | jach58/geolocator | c246bfa95f600e0618ea8606fd203ac5b01b21dc | f00ba47df8893aff87da9e2142fd94faa3ca3cbd | refs/heads/master | 2021-04-06T00:10:18.439145 | 2018-03-11T05:42:20 | 2018-03-11T05:42:20 | 124,726,848 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 89 | py | from django.dispatch import Signal
user_logged_in = Signal(providing_args=['request']) | [
"[email protected]"
] | |
22c0cf9ad666b05c4b4de4efde05cbcbc5637ef7 | 32cb84dd41e4be24c065bb205f226f9b121a6db2 | /swimmingpool/apps.py | 858653df0fdd5d3f7907fc3a24965f943afd53a9 | [] | no_license | InformatykaNaStart/staszic-sio2 | b38fda84bd8908472edb2097774838ceed08fcfa | 60a127e687ef8216d2ba53f9f03cfaa201c59e26 | refs/heads/master | 2022-06-29T11:09:28.765166 | 2022-06-13T21:56:19 | 2022-06-13T21:56:19 | 115,637,960 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 140 | py | from __future__ import unicode_literals
from django.apps import AppConfig
class SwimmingpoolConfig(AppConfig):
name = 'swimmingpool'
| [
"[email protected]"
] | |
4791c02757534b823afff98e8e1f831eef411ee8 | 0cad32e2abd77ba43150b6e6a5f3a056f5d0cb13 | /rec_test/spark_test2.py | 93e887aa7a7d7ee6e6a5cb6a88eb79beec3a727a | [] | no_license | luohuayong/spark | 73cf3720e588cf0c6a1d2f9f6c05387ac8d45295 | 1ff2180ac132405796daa8e5664ec4272f64a2f6 | refs/heads/master | 2020-04-06T06:53:58.201572 | 2016-08-24T11:21:24 | 2016-08-24T11:21:24 | 63,468,310 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 734 | py | from pyspark.mllib.recommendation import ALS,Rating
from pyspark import SparkContext
from pyspark.sql import SQLContext,Row
import os
os.environ['SPARK_CLASSPATH'] = "/home/leo/spark/lib/postgresql-9.3-1103.jdbc41.jar"
sc = SparkContext("local[2]","first spark app")
sqlContext = SQLContext(sc)
url = "jdbc:postgresql://localhost/sparktest?user=leo&password=123123"
data = sqlContext.load(source="jdbc",url=url,dbtable="public.user_rating")
print data.first()
ratings = data.map(lambda x:Rating(int(x[0]),int(x[1]),float(x[2])))
#print ratings.first()
model = ALS.train(ratings,50)
#features = model.userFeatures()
#print features.take(2)
predict = model.predict(2,2)
print predict
top = model.recommendProducts(2,10)
print top
| [
"[email protected]"
] | |
2f45c288c1a0d41d6ff5a1c1d40c94d3c9c32698 | 2e86ab87cb86ae4ffed162bc3e1109da7b362915 | /supervisor/dbus/rauc.py | 0a29112edaff028d4afed4370813fd5ff60e637c | [
"Apache-2.0"
] | permissive | swipswaps/supervisor | c5cae4ecad4d9bc53191c3882f4337d3edc5219a | 2d294f68418fbbcdbad776cdd9c614ef952074c4 | refs/heads/main | 2023-01-24T13:18:51.463358 | 2020-11-30T17:00:12 | 2020-11-30T17:00:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,321 | py | """D-Bus interface for rauc."""
import logging
from typing import Optional
from ..exceptions import DBusError, DBusInterfaceError
from ..utils.gdbus import DBus
from .const import (
DBUS_ATTR_BOOT_SLOT,
DBUS_ATTR_COMPATIBLE,
DBUS_ATTR_LAST_ERROR,
DBUS_ATTR_OPERATION,
DBUS_ATTR_VARIANT,
DBUS_NAME_RAUC,
DBUS_NAME_RAUC_INSTALLER,
DBUS_NAME_RAUC_INSTALLER_COMPLETED,
DBUS_OBJECT_BASE,
RaucState,
)
from .interface import DBusInterface
from .utils import dbus_connected
_LOGGER: logging.Logger = logging.getLogger(__name__)
class Rauc(DBusInterface):
"""Handle D-Bus interface for rauc."""
def __init__(self):
"""Initialize Properties."""
self._operation: Optional[str] = None
self._last_error: Optional[str] = None
self._compatible: Optional[str] = None
self._variant: Optional[str] = None
self._boot_slot: Optional[str] = None
async def connect(self):
"""Connect to D-Bus."""
try:
self.dbus = await DBus.connect(DBUS_NAME_RAUC, DBUS_OBJECT_BASE)
except DBusError:
_LOGGER.warning("Can't connect to rauc")
except DBusInterfaceError:
_LOGGER.warning("Host has no rauc support. OTA updates have been disabled.")
@property
def operation(self) -> Optional[str]:
"""Return the current (global) operation."""
return self._operation
@property
def last_error(self) -> Optional[str]:
"""Return the last message of the last error that occurred."""
return self._last_error
@property
def compatible(self) -> Optional[str]:
"""Return the system compatible string."""
return self._compatible
@property
def variant(self) -> Optional[str]:
"""Return the system variant string."""
return self._variant
@property
def boot_slot(self) -> Optional[str]:
"""Return the used boot slot."""
return self._boot_slot
@dbus_connected
def install(self, raucb_file):
"""Install rauc bundle file.
Return a coroutine.
"""
return self.dbus.Installer.Install(raucb_file)
@dbus_connected
def get_slot_status(self):
"""Get slot status.
Return a coroutine.
"""
return self.dbus.Installer.GetSlotStatus()
@dbus_connected
def signal_completed(self):
"""Return a signal wrapper for completed signal.
Return a coroutine.
"""
return self.dbus.wait_signal(DBUS_NAME_RAUC_INSTALLER_COMPLETED)
@dbus_connected
def mark(self, state: RaucState, slot_identifier: str):
"""Get slot status.
Return a coroutine.
"""
return self.dbus.Installer.Mark(state, slot_identifier)
@dbus_connected
async def update(self):
"""Update Properties."""
data = await self.dbus.get_properties(DBUS_NAME_RAUC_INSTALLER)
if not data:
_LOGGER.warning("Can't get properties for rauc")
return
self._operation = data.get(DBUS_ATTR_OPERATION)
self._last_error = data.get(DBUS_ATTR_LAST_ERROR)
self._compatible = data.get(DBUS_ATTR_COMPATIBLE)
self._variant = data.get(DBUS_ATTR_VARIANT)
self._boot_slot = data.get(DBUS_ATTR_BOOT_SLOT)
| [
"[email protected]"
] | |
8882c1a68a42f5b8e201b5424ed071c14c1c054b | 255e19ddc1bcde0d3d4fe70e01cec9bb724979c9 | /dockerized-gists/6737282/snippet.py | 5c2d162fb0a8a82270d9ca2decf243da38c25c92 | [
"MIT"
] | permissive | gistable/gistable | 26c1e909928ec463026811f69b61619b62f14721 | 665d39a2bd82543d5196555f0801ef8fd4a3ee48 | refs/heads/master | 2023-02-17T21:33:55.558398 | 2023-02-11T18:20:10 | 2023-02-11T18:20:10 | 119,861,038 | 76 | 19 | null | 2020-07-26T03:14:55 | 2018-02-01T16:19:24 | Python | UTF-8 | Python | false | false | 638 | py | """
Installs your SSH key on other hosts. A fabfile for lazy people.
"""
from fabric.api import task, run, put, env, cd
# Use sh instead of bash.
env.shell = '/bin/sh -l -c'
@task
def add_ssh_key(identity='~/.ssh/id_rsa.pub'):
# Copy the key over.
REMOTE_PATH = '~/id.pub'
put(identity, REMOTE_PATH)
with cd('~'):
# Make sure the SSH directory is created.
run('mkdir -p .ssh')
# And append to the authrized keys.
run('cat %(REMOTE_PATH)s >> ~/.ssh/authorized_keys' % locals())
# Be thourough and leave no trace of this interaction!
run('rm %(REMOTE_PATH)s' % locals())
| [
"[email protected]"
] | |
a13e095a11f32454452ddb7811aeced0983a69bb | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p3BR/R2/benchmark/startQiskit269.py | 0d004daf589a46c1043373aa682a4e2930b9c167 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,721 | py | # qubit number=3
# total number=54
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
# oracle.draw('mpl', filename=(kernel + '-oracle.png'))
return oracle
def build_circuit(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the Bernstein-Vazirani circuit
zero = np.binary_repr(0, n)
b = f(zero)
# initial n + 1 bits
input_qubit = QuantumRegister(n+1, "qc")
classicals = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classicals)
# inverse last one (can be omitted if using O_f^\pm)
prog.x(input_qubit[n])
# circuit begin
prog.h(input_qubit[1]) # number=1
prog.h(input_qubit[2]) # number=38
prog.cz(input_qubit[0],input_qubit[2]) # number=39
prog.h(input_qubit[2]) # number=40
prog.h(input_qubit[2]) # number=51
prog.cz(input_qubit[0],input_qubit[2]) # number=52
prog.h(input_qubit[2]) # number=53
prog.h(input_qubit[2]) # number=42
prog.cz(input_qubit[0],input_qubit[2]) # number=43
prog.h(input_qubit[2]) # number=44
prog.h(input_qubit[2]) # number=48
prog.cz(input_qubit[0],input_qubit[2]) # number=49
prog.h(input_qubit[2]) # number=50
prog.x(input_qubit[2]) # number=46
prog.cx(input_qubit[0],input_qubit[2]) # number=47
prog.cx(input_qubit[0],input_qubit[2]) # number=37
prog.cx(input_qubit[0],input_qubit[2]) # number=33
prog.h(input_qubit[2]) # number=25
prog.cz(input_qubit[0],input_qubit[2]) # number=26
prog.h(input_qubit[2]) # number=27
prog.h(input_qubit[1]) # number=7
prog.cz(input_qubit[2],input_qubit[1]) # number=8
prog.rx(0.17592918860102857,input_qubit[2]) # number=34
prog.rx(-0.3989822670059037,input_qubit[1]) # number=30
prog.h(input_qubit[1]) # number=9
prog.h(input_qubit[1]) # number=18
prog.cz(input_qubit[2],input_qubit[1]) # number=19
prog.h(input_qubit[1]) # number=20
prog.y(input_qubit[1]) # number=14
prog.h(input_qubit[1]) # number=22
prog.cz(input_qubit[2],input_qubit[1]) # number=23
prog.h(input_qubit[1]) # number=24
prog.z(input_qubit[2]) # number=3
prog.z(input_qubit[1]) # number=41
prog.x(input_qubit[1]) # number=17
prog.y(input_qubit[2]) # number=5
prog.x(input_qubit[2]) # number=21
# apply H to get superposition
for i in range(n):
prog.h(input_qubit[i])
prog.h(input_qubit[n])
prog.barrier()
# apply oracle O_f
oracle = build_oracle(n, f)
prog.append(
oracle.to_gate(),
[input_qubit[i] for i in range(n)] + [input_qubit[n]])
# apply H back (QFT on Z_2^n)
for i in range(n):
prog.h(input_qubit[i])
prog.barrier()
# measure
return prog
def get_statevector(prog: QuantumCircuit) -> Any:
state_backend = Aer.get_backend('statevector_simulator')
statevec = execute(prog, state_backend).result()
quantum_state = statevec.get_statevector()
qubits = round(log2(len(quantum_state)))
quantum_state = {
"|" + np.binary_repr(i, qubits) + ">": quantum_state[i]
for i in range(2 ** qubits)
}
return quantum_state
def evaluate(backend_str: str, prog: QuantumCircuit, shots: int, b: str) -> Any:
# Q: which backend should we use?
# get state vector
quantum_state = get_statevector(prog)
# get simulate results
# provider = IBMQ.load_account()
# backend = provider.get_backend(backend_str)
# qobj = compile(prog, backend, shots)
# job = backend.run(qobj)
# job.result()
backend = Aer.get_backend(backend_str)
# transpile/schedule -> assemble -> backend.run
results = execute(prog, backend, shots=shots).result()
counts = results.get_counts()
a = Counter(counts).most_common(1)[0][0][::-1]
return {
"measurements": counts,
# "state": statevec,
"quantum_state": quantum_state,
"a": a,
"b": b
}
def bernstein_test_1(rep: str):
"""011 . x + 1"""
a = "011"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_2(rep: str):
"""000 . x + 0"""
a = "000"
b = "0"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_3(rep: str):
"""111 . x + 1"""
a = "111"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
if __name__ == "__main__":
n = 2
a = "11"
b = "1"
f = lambda rep: \
bitwise_xor(bitwise_dot(a, rep), b)
prog = build_circuit(n, f)
sample_shot =4000
writefile = open("../data/startQiskit269.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
backend = BasicAer.get_backend('qasm_simulator')
circuit1 = transpile(prog, FakeYorktown())
circuit1.h(qubit=2)
circuit1.x(qubit=3)
circuit1.measure_all()
info = execute(circuit1,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| [
"[email protected]"
] | |
c93c551d735e66aaea519dea5549a52276049c22 | cba54fcdf5333aec90a70a6495f962f1c34818e9 | /Data/ColorLUT/GenerateDefault.py | f2b8d3b2f8c34cee19657f30bf1e1ade794f1c65 | [
"WTFPL"
] | permissive | 2lost4u/RenderPipeline | f89fcb1b40e6fd5e5d001b311c8d6c8048a05c13 | 12131b115775f97927633d71832af65b99eebd09 | refs/heads/master | 2020-04-30T12:36:43.617264 | 2015-10-03T15:39:22 | 2015-10-03T15:39:22 | 44,781,091 | 1 | 0 | null | 2015-10-23T00:23:48 | 2015-10-23T00:23:47 | null | UTF-8 | Python | false | false | 340 | py |
from panda3d.core import PNMImage, Vec3
lutSize = 32
image = PNMImage(lutSize * lutSize, lutSize, 3, 2**16 - 1)
for r in xrange(lutSize):
for g in xrange(lutSize):
for b in xrange(lutSize):
image.setXel(r + b * lutSize, g, r / float(lutSize), g / float(lutSize), b / float(lutSize))
image.write("Default.png") | [
"[email protected]"
] | |
b812bca292f39c58ce9c994d00b04ae5e4ff59e6 | 4a2aed648b3890e51732ac7d9ceaaeabfd985f99 | /Linked list/Link2Doubly..py | aca4d43528d5c3464374a148787f7ce4e5e42eb1 | [] | no_license | bhatnagaranshika02/Data-Structures-Python | f544adeebc8af3b313354699dddd85af59889f4c | 74de8a1c895b6ce18094d6c50beaee15a2715404 | refs/heads/master | 2023-03-13T06:47:20.247213 | 2021-03-01T13:58:53 | 2021-03-01T13:58:53 | 258,513,920 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,565 | py | class Node:
def __init__(self,data,prev=0,next=0):
self.data=data
self.prev=prev
self.next=next
class linked_list:
def __init__ (self):
self.start=None
def addfront(self,data):
new_node=Node(data)
new_node.prev=None
new_node.next=self.start
if self.start is not None:
self.start.prev=new_node
else:
self.start=new_node
def deletefront(self,data):
if self.start==None:
print("List is empty")
else:
self.start=self.start.next
def delete(self,node):
temp=self.start
while temp:
if temp.data==node.data:
prevs=temp.prev
temp.next.prev=prevs
prevs.next=temp.next
break
else:
temp=temp.next
def removedupli(self):
temp=self.start
l=[]
while temp:
if temp.data not in l:
l.append(temp.data)
temp=temp.next
else:
nxt=temp.next
self.delete(temp)
temp=nxt
def addbefore(self,data,beforewhat):
new_node=Node(data)
if self.start==None:
print("List is empty")
else:
temp=self.start
before=temp
while temp!=None:
if temp.data!=beforewhat:
before=temp
temp=temp.next
else:
before.next=new_node
new_node.next=temp
break
def deletebefore(self,beforewhat):
if self.start==None or self.start==beforewhat:
print("Cant delete")
else:
temp=self.start
while temp!=None:
if temp.data==beforewhat:
if temp.prev==self.start:
self.start=temp
else:
prevs=temp.prev.prev
temp.prev=prevs
break
else:
temp=temp.next
def addafter(self,data,addafter):
new_node=Node(data)
temp=self.start
while temp!=None:
if temp.data==addafter:
temp2=temp.next
temp.next=new_node
new_node.next=temp2
new_node.prev=temp
break
else:
temp=temp.next
def deleteafter(self,afterwhat):
if self.start==None or self.start==afterwhat:
print("Cant delete")
else:
temp=self.start
while temp.next!=None:
if temp.data==afterwhat:
nexts=temp.next.next
temp.next=nexts
break
else:
temp=temp.next
def addlast(self,data):
new_node=Node(data)
temp=self.start
while temp!=None:
if temp.next==None:
temp.next=new_node
new_node.prev=temp
new_node.next=None
break
else:
temp=temp.next
def traverse(self):
if self.start==None:
print("List is empty")
else:
temp=self.start
while temp!=None:
print(temp.data,end=' ')
temp=temp.next
print("\n")
def count(self):
if self.start==None:
print("List is empty")
else:
temp=self.start
count=0
while temp!=None:
count+=1
temp=temp.next
print("Total count is:",count)
obj=linked_list()
obj.count()
obj.addfront(6)
obj.addafter(7,6)
obj.addafter(8,7)
obj.addafter(9,8)
obj.traverse()
obj.addbefore(88,8)
obj.traverse()
obj.deleteafter(88)
obj.traverse()
obj.deletebefore(7)
obj.traverse()
obj.deleteafter(7)
obj.traverse()
obj.deletebefore(9)
obj.traverse()
obj.addafter(10,9)
obj.addafter(11,10)
obj.addafter(11,11)
obj.traverse()
obj.removedupli()
obj.traverse()
| [
"[email protected]"
] | |
52373f03597ca24ce183d53200144aa87abed7a4 | 9cda2257468d0ef2f7706d4d07099a7b6d897f02 | /letecode/121-240/121-144/139.py | 356b9d3a4011116e168da519c86020449cab5cc9 | [] | no_license | hshrimp/letecode_for_me | 4ba4031803687d7a309da9af4f003a328b48e53e | 6dc5b8968b6bef0186d3806e4aa35ee7b5d75ff2 | refs/heads/master | 2021-12-11T07:37:22.323713 | 2021-08-23T08:16:00 | 2021-08-23T08:16:00 | 204,437,378 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,962 | py | #!/usr/bin/env python
# encoding: utf-8
"""
@author: wushaohong
@time: 2020-05-27 11:25
"""
"""139. 单词拆分
给定一个非空字符串 s 和一个包含非空单词列表的字典 wordDict,判定 s 是否可以被空格拆分为一个或多个在字典中出现的单词。
说明:
拆分时可以重复使用字典中的单词。
你可以假设字典中没有重复的单词。
示例 1:
输入: s = "leetcode", wordDict = ["leet", "code"]
输出: true
解释: 返回 true 因为 "leetcode" 可以被拆分成 "leet code"。
示例 2:
输入: s = "applepenapple", wordDict = ["apple", "pen"]
输出: true
解释: 返回 true 因为 "applepenapple" 可以被拆分成 "apple pen apple"。
注意你可以重复使用字典中的单词。
示例 3:
输入: s = "catsandog", wordDict = ["cats", "dog", "sand", "and", "cat"]
输出: false"""
class Solution:
def wordBreak(self, s: str, wordDict) -> bool:
seq = [s]
while seq:
string = seq.pop()
if string in wordDict:
return True
length = len(string)
for i in range(length - 1, 0, -1):
if string[:i] in wordDict:
seq.append(string[i:])
return False
def wordBreak2(self, s: str, wordDict) -> bool:
n = len(s)
dp = [False] * (n + 1)
dp[0] = True
for i in range(n):
for j in range(i + 1, n + 1):
if dp[i] and s[i:j] in wordDict:
dp[j] = True
return dp[-1]
if __name__ == '__main__':
sol = Solution()
s = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
wordDict = ["a", "aa", "aaa", "aaaa", "aaaaa", "aaaaaa", "aaaaaaa", "aaaaaaaa", "aaaaaaaaa", "aaaaaaaaaa"]
s = "leetcode"
wordDict = ["leet", "code"]
print(sol.wordBreak2(s, wordDict))
| [
"[email protected]"
] | |
7caa93da7217e249804fdfd49de654c404d566ca | 4d9e7292ff108d54c43d16b5c964c916f0971d26 | /network/utils.py | 9f5aec88619ff006f402f79886f3ea7ce146c087 | [
"Apache-2.0",
"MIT"
] | permissive | jun-ge/captcha_trainer | b0504b5504138046e49d59c2f2acc5a2bb694b48 | 06df8201c9a91e550fd0ddc4df55964647b758be | refs/heads/master | 2020-12-03T23:31:06.756164 | 2020-01-01T02:14:40 | 2020-01-01T02:14:40 | 231,522,612 | 1 | 0 | Apache-2.0 | 2020-01-03T06:03:10 | 2020-01-03T06:03:10 | null | UTF-8 | Python | false | false | 9,976 | py | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
# Author: kerlomz <[email protected]>
import math
import tensorflow as tf
from tensorflow.python.keras.regularizers import l2, l1_l2, l1
from config import *
class NetworkUtils(object):
def __init__(self, mode: RunMode):
self.extra_train_ops = []
self.mode: RunMode = mode
self.training = self.mode == RunMode.Trains
@staticmethod
def msra_initializer(kl, dl):
""" MSRA weight initializer
(https://arxiv.org/pdf/1502.01852.pdf)
Keyword arguments:
kl -- kernel size
dl -- filter numbers
"""
stddev = math.sqrt(2. / (kl ** 2 * dl))
return tf.keras.initializers.TruncatedNormal(stddev=stddev)
def reshape_layer(self, input_tensor, loss_func, shape_list):
if loss_func == LossFunction.CTC:
output_tensor = tf.keras.layers.TimeDistributed(
layer=tf.keras.layers.Flatten(),
)(inputs=input_tensor, training=self.training)
elif loss_func == LossFunction.CrossEntropy:
output_tensor = tf.keras.layers.Reshape([shape_list[1], shape_list[2] * shape_list[3]])(input_tensor)
else:
raise exception("The current loss function is not supported.", ConfigException.LOSS_FUNC_NOT_SUPPORTED)
return output_tensor
def cnn_layer(self, index, inputs, filters, kernel_size, strides):
"""卷积-BN-激活函数-池化结构生成器"""
with tf.keras.backend.name_scope('unit-{}'.format(index + 1)):
x = tf.keras.layers.Conv2D(
filters=filters,
kernel_size=kernel_size,
strides=strides[0],
kernel_regularizer=l1(0.01),
kernel_initializer=self.msra_initializer(kernel_size, filters),
padding='same',
name='cnn-{}'.format(index + 1),
)(inputs)
x = tf.layers.BatchNormalization(
fused=True,
renorm_clipping={
'rmax': 3,
'rmin': 0.3333,
'dmax': 5
} if index == 0 else None,
epsilon=1.001e-5,
name='bn{}'.format(index + 1))(x, training=self.training)
x = tf.keras.layers.LeakyReLU(0.01)(x)
x = tf.keras.layers.MaxPooling2D(
pool_size=(2, 2),
strides=strides[1],
padding='same',
)(x)
return x
def dense_building_block(self, input_tensor, growth_rate, name, dropout_rate=None):
"""A building block for a dense block.
# Arguments
input_tensor: input tensor.
growth_rate: float, growth rate at dense layers.
name: string, block label.
# Returns
Output tensor for the block.
"""
# 1x1 Convolution (Bottleneck layer)
x = tf.layers.BatchNormalization(epsilon=1.001e-5, name=name + '_0_bn')(input_tensor, training=self.training)
x = tf.keras.layers.LeakyReLU(0.01, name=name + '_0_relu')(x)
x = tf.keras.layers.Conv2D(
filters=4 * growth_rate,
kernel_size=1,
use_bias=False,
padding='same',
name=name + '_1_conv')(x)
if dropout_rate:
x = tf.keras.layers.Dropout(dropout_rate)(x)
# 3x3 Convolution
x = tf.layers.BatchNormalization(epsilon=1.001e-5, name=name + '_1_bn')(x, training=self.training)
x = tf.keras.layers.LeakyReLU(0.01, name=name + '_1_relu')(x)
x = tf.keras.layers.Conv2D(
filters=growth_rate,
kernel_size=3,
padding='same',
use_bias=False,
name=name + '_2_conv')(x)
if dropout_rate:
x = tf.keras.layers.Dropout(dropout_rate)(x)
x = tf.keras.layers.Concatenate(name=name + '_concat')([input_tensor, x])
return x
def dense_block(self, input_tensor, blocks, name):
"""A dense block.
# Arguments
input_tensor: input tensor.
blocks: integer, the number of building blocks.
name: string, block label.
# Returns conv_block
output tensor for the block.
"""
for i in range(blocks):
input_tensor = self.dense_building_block(input_tensor, 32, name=name + '_block' + str(i + 1))
return input_tensor
def transition_block(self, input_tensor, reduction, name):
"""A transition block.
# Arguments
input_tensor: input tensor.
reduction: float, compression rate at transition layers.
name: string, block label.
# Returns
output tensor for the block.
"""
x = tf.layers.BatchNormalization(epsilon=1.001e-5, name=name + '_bn')(input_tensor, training=self.training)
x = tf.keras.layers.LeakyReLU(0.01)(x)
x = tf.keras.layers.Conv2D(
filters=int(tf.keras.backend.int_shape(x)[3] * reduction),
kernel_size=1,
use_bias=False,
padding='same',
name=name + '_conv')(x)
x = tf.keras.layers.AveragePooling2D(2, strides=2, name=name + '_pool', padding='same')(x)
return x
def residual_building_block(self, input_tensor, kernel_size, filters, stage, block, strides=(2, 2)):
"""A block that has a conv layer at shortcut.
# Arguments
input_tensor: input tensor
kernel_size: default 3, the kernel size of
middle conv layer at main path
filters: list of integers, the filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
strides: Strides for the first conv layer in the block.
# Returns
Output tensor for the block.
Note that from stage 3,
the first conv layer at main path is with strides=(2, 2)
And the shortcut should have strides=(2, 2) as well
"""
filters1, filters2, filters3 = filters
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
x = tf.keras.layers.Conv2D(
filters=filters1,
kernel_size=(1, 1),
strides=strides,
kernel_initializer='he_normal',
padding='same',
name=conv_name_base + '2a')(input_tensor)
x = tf.layers.BatchNormalization(name=bn_name_base + '2a')(x, training=self.training)
x = tf.keras.layers.LeakyReLU(0.01)(x)
x = tf.keras.layers.Conv2D(
filters=filters2,
kernel_size=kernel_size,
padding='same',
kernel_initializer='he_normal',
name=conv_name_base + '2b')(x)
x = tf.layers.BatchNormalization(name=bn_name_base + '2b')(x, training=self.training)
x = tf.keras.layers.LeakyReLU(0.01)(x)
x = tf.keras.layers.Conv2D(
filters=filters3,
kernel_size=(1, 1),
kernel_initializer='he_normal',
padding='same',
name=conv_name_base + '2c')(x)
x = tf.layers.BatchNormalization(name=bn_name_base + '2c')(x, training=self.training)
shortcut = tf.keras.layers.Conv2D(
filters=filters3,
kernel_size=(1, 1),
strides=strides,
kernel_initializer='he_normal',
padding='same',
name=conv_name_base + '1')(input_tensor)
shortcut = tf.layers.BatchNormalization(name=bn_name_base + '1')(shortcut, training=self.training)
x = tf.keras.layers.add([x, shortcut])
x = tf.keras.layers.LeakyReLU(0.01)(x)
return x
def identity_block(self, input_tensor, kernel_size, filters, stage, block):
"""The identity block is the block that has no conv layer at shortcut.
# Arguments
input_tensor: input tensor
kernel_size: default 3, the kernel size of
middle conv layer at main path
filters: list of integers, the filters of 3 conv layer at main path
stage: integer, current stage label, used for generating layer names
block: 'a','b'..., current block label, used for generating layer names
# Returns
Output tensor for the block.
"""
filters1, filters2, filters3 = filters
bn_axis = 3
conv_name_base = 'res' + str(stage) + block + '_branch'
bn_name_base = 'bn' + str(stage) + block + '_branch'
x = tf.keras.layers.Conv2D(
filters=filters1,
kernel_size=(1, 1),
kernel_initializer='he_normal',
padding='same',
name=conv_name_base + '2a'
)(input_tensor)
x = tf.layers.BatchNormalization(
axis=bn_axis,
name=bn_name_base + '2a'
)(x, training=self.training)
x = tf.keras.layers.LeakyReLU(0.01)(x)
x = tf.keras.layers.Conv2D(
filters=filters2,
kernel_size=kernel_size,
padding='same',
kernel_initializer='he_normal',
name=conv_name_base + '2b'
)(x)
x = tf.layers.BatchNormalization(
axis=bn_axis, name=bn_name_base + '2b'
)(x, training=self.training)
x = tf.keras.layers.LeakyReLU(0.01)(x)
x = tf.keras.layers.Conv2D(
filters=filters3,
kernel_size=(1, 1),
padding='same',
kernel_initializer='he_normal',
name=conv_name_base + '2c')(x)
x = tf.layers.BatchNormalization(axis=bn_axis, name=bn_name_base + '2c')(x, training=self.training)
x = tf.keras.layers.add([x, input_tensor])
x = tf.keras.layers.LeakyReLU(0.01)(x)
return x
| [
"[email protected]"
] | |
b04bd6e4fc179a3156752e3180ad68f515dc2426 | d52cb4c2e880875944b14da0b8a9542235942ac8 | /geeksforgeeks/strings/13_remove_common_characters_and_concatenate.py | f25b216069ea2a31f0095869d2573c87de6a3fb4 | [] | no_license | saparia-data/data_structure | fbd61535b68f92143b2cb2679377c0f56f424670 | 2e8700cfdaeefe0093e5b4fb2704b1abcd300d02 | refs/heads/master | 2023-05-08T18:54:52.250941 | 2021-06-04T05:44:29 | 2021-06-04T05:44:29 | 296,071,146 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,919 | py | '''
Given two strings s1 and s2. Modify string s1 such that all the common characters of s1 and s2 is to be removed
and the uncommon characters of s1 and s2 is to be concatenated.
Note: If no modification is possible print -1.
Input:
The first line consists of an integer T i.e number of test cases.
The first line of each test case consists of a string s1.The next line consists of a string s2.
Output:
Print the concatenated string.
User Task:
The task is to complete the function concatenatedString() which removes the commong characters, concatenates, and returns the string.
Constraints:
1 <= T <= 200
1 <= |Length of Strings| <= 104
Example:
Input:
2
aacdb
gafd
abcs
cxzca
Output:
cbgf
bsxz
Explanation:
Testcase 1:The common characters of s1 and s2 are: a, d.The uncommon characters of s1 and s2 are: c, b, g and f.
Thus the modified string with uncommon characters concatenated is: cbgf.
Testcase 2: The common characters of s1 and s2 are: a,c . The uncommon characters of s1 and s2 are: b,s,x and z.
Thus the modified string with uncommon characters concantenated is: bsxz.
hints:
1)
The idea is to use an array of size 26 (or a hash) where key is character and value is number of strings in which character is present.
If a character is present in one string, then count is 1, else if character is present in both strings, count is 2.
2)
-Initialize result as empty string.
-Push all characters of 2nd string in map with count as 1.
-Traverse first string and append all those characters to result that are not present in map. Characters that are present in map, make count 2.
-Traverse second string and append all those characters to result whose count is 1.
'''
def concatenatedString(s,p):
res = ""
for i in s:
if(i in p):
continue
else:
res += i
for j in p:
if(j in s):
continue
else:
res += j
if(len(res)):
return res
return -1
#Another method
def concatenatedString1(s,p):
occurrence_s=[0 for i in range(256)]
occurrence_p=[0 for i in range(256)]
# storing the count of chars in s1
for i in range(len(s)):
occurrence_s[ord(s[i])]+=1
# storing the count of chars in p
for i in range(len(p)):
occurrence_p[ord(p[i])]+=1
concatenated_str=""
# Find characters of s1 that are not
# present in s2 and append to result
for i in range(len(s)):
if(occurrence_p[ord(s[i])]==0):
concatenated_str+=s[i]
# Find characters of s2 that are not
# present in s1.
for i in range(len(p)):
if(occurrence_s[ord(p[i])]==0):
concatenated_str+=p[i]
if(len(concatenated_str)):
return concatenated_str
return -1
s = "abcs"
p = "cxzca"
print(concatenatedString(s, p))
print(concatenatedString1(s, p)) | [
"[email protected]"
] | |
5c6d0e07034d72dd7133d62d6aa98fa6ad3f90d7 | 13d222bc3332378d433835914da26ed16b583c8b | /src/pemjh/challenge81/__init__.py | f9bee2a0acfcdc2b1206d732b54ae7657c08b554 | [] | no_license | mattjhussey/pemjh | c27a09bab09cd2ade31dc23fffac07374bea9366 | 2ebb0a525d2d1c0ee28e83fdc2638c2bec97ac99 | refs/heads/master | 2023-04-16T03:08:59.390698 | 2023-04-08T10:54:00 | 2023-04-08T10:54:00 | 204,912,926 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 67 | py | """ challenge81 """
from .main import main
__all__ = ['main']
| [
"[email protected]"
] | |
2706d20d8b42846ef0a92769ab0e1862cc9f67d6 | 7a11b6d4063685cb08b074ac8d08ab6e1d045ff5 | /src/11_list_filter_map.py | fe22c07e7451ee77e9be1a6eb40d958c54a0fd5e | [] | no_license | slavaider/python | 8a9f5769bd519e0e270c5814ef46ec5c653ab7c1 | f98896b8e9dd93fe7d2b4a495b67704ef5f08373 | refs/heads/master | 2023-03-02T15:12:56.218871 | 2021-02-07T16:20:08 | 2021-02-07T16:20:32 | 301,493,207 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 329 | py | def square(number):
return number * number
def is_a(age): return age > 18
if __name__ == '__main__':
list1 = [1, 2, 3, 4, 5]
list2 = list(map(square, list1))
print(list2)
l1 = [10, 12, 19, 20]
l2 = list(filter(lambda age: age > 18, l1))
print(l2)
l2 = [i for i in l1 if is_a(i)]
print(l2)
| [
"[email protected]"
] | |
230d7556304b6d1e9e84253b384f89a7032e7e7c | 04ea3d2a060151fc21179ca28373bfa516adcbfe | /deadly_corridor_scenario/data_preprocess.py | 80aad30a1cf5cd87fb70ff51165b4c1f57794052 | [] | no_license | zhouziqunzzq/vizdoom-naive-agent | 394bdea6f88272603a5e6bf8b012e719d6f24701 | 4406b280567eef169d448fa77266812094aea736 | refs/heads/main | 2023-03-04T19:21:13.129770 | 2021-02-06T11:27:21 | 2021-02-06T11:27:21 | 333,699,839 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,593 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @File : data_preprocess.py
# @Author: harry
# @Date : 1/27/21 7:05 PM
# @Desc : Data preprocessor of raw play data
import numpy as np
import glob
import os
import pickle
import matplotlib.pyplot as plt
import matplotlib.colors as colors
from constants import *
from typing import Any, List, Tuple, Optional
def load_raw_data(path: str) -> List[Tuple['np.array', List[float], float]]:
"""
Load multiple raw play data from path and merge them together.
:param path: the path containing multiple raw data pickles.
:return: merged list of raw data.
"""
if not os.path.exists(path):
raise RuntimeError("raw data path not exist")
history = list()
h_list = glob.glob(os.path.join(path, '*.pkl'))
for h in h_list:
with open(h, 'rb') as f:
history.extend(pickle.load(f))
return history
def preprocess_raw_data(history: List[Tuple['np.array', List[float], float]]) \
-> ('np.array', 'np.array'):
"""
Filtering, normalizing, and concatenating raw data into np arrays.
:param history: a list of raw data.
:return: images, labels.
"""
imgs = list()
labels = list()
for h in history:
img, label, _ = h
# determine label
l_int = 0
label = list(np.array(label, dtype=bool))
try:
l_int = ACTION_LIST.index(label)
except ValueError:
# for now we skip sample whose action is not in ACTION_LIST
continue
# skip non-action sample
# if l_int == 0:
# continue
# normalize img
img = img.astype(np.float)
img /= 255.0
imgs.append(img)
labels.append(l_int)
return np.stack(imgs, axis=0), np.array(labels, dtype=np.int)
def test_data_preprocess():
his = load_raw_data(RAW_DATA_PATH)
print('num of raw data samples: ', len(his))
# samp_i = np.random.randint(0, len(his))
# print(his[samp_i][0])
# print(his[samp_i][1])
# print(his[samp_i][2])
# print(his[samp_i][0].shape)
# im = plt.imshow(his[samp_i][0], cmap='gray')
# plt.show()
x_train, y_train = preprocess_raw_data(his)
assert x_train.shape[0] == y_train.shape[0]
print('x_train.shape: ', x_train.shape)
print('y_train.shape: ', y_train.shape)
samp_i = np.random.randint(0, x_train.shape[0])
print('label of the displayed example: ', y_train[samp_i])
im = plt.imshow(x_train[samp_i], cmap='gray')
plt.show()
if __name__ == '__main__':
test_data_preprocess()
| [
"[email protected]"
] | |
cd0710aad9c6efbdf3cb3e9dcc4e8904d93f7c7a | 0754e2e7aa1ffb90b54d563ce5a9317e41cfebf9 | /Algorithm/Programmers/예선문제_0912_1.py | bf343580cadc687be1aa13eba4c93677632489b6 | [] | no_license | ChaeMyungSeock/Study | 62dcf4b13696b1f483c816af576ea8883c57e531 | 6f726a6ecb43387e4a3b9d068a9c491b115c74c0 | refs/heads/master | 2023-01-24T20:59:52.053394 | 2020-12-07T14:54:34 | 2020-12-07T14:54:34 | 263,255,793 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,832 | py | import re
def solution(new_id):
new_id = new_id.lower()
new_id = re.sub('[^[a-z0-9-_.]','',new_id)
c = 0
while 1:
if len(new_id) >=2 and new_id[c]=='[':
new_id = new_id[:c] + new_id[c+1:]
c -=1
elif len(new_id) == 1 and new_id[c] == '[':
new_id = ""
if c == len(new_id)-1:
break
c +=1
print(new_id)
b = 0
while 1:
if len(new_id)>=1 and b>=1 and new_id[b]=='.':
if new_id[b-1] == '.':
new_id = new_id[:b] + new_id[b+1:]
b -=1
if b == len(new_id)-1:
break
b +=1
a=0
while 1:
if a == 0 and new_id[a]=='.':
if len(new_id)>=2:
new_id = new_id[1:]
a = -1
else:
new_id = ""
break
if new_id[0] != '.' :
break
a += 1
if len(new_id)>=2 and new_id[-1] == '.':
new_id = new_id[:-1]
elif len(new_id) == 1 and new_id[-1] == '.':
new_id = ""
if len(new_id) == 0:
new_id += "a"
elif len(new_id) >=16:
new_id = new_id[:15]
if new_id[-1] == '.':
new_id = new_id[:-1]
if len(new_id)<=2:
while 1:
new_id += new_id[-1]
if new_id[-1] == '.':
new_id = new_id[:-1]
if len(new_id) == 3:
break
return new_id
new_id ="=+[{]}:?,<>/-_.~!@#$%^&*()=+[{]}:?,<>/"
print(solution(new_id))
# new_id ="z-+.^."
# print(solution(new_id))
# new_id ="=.="
# print(solution(new_id))
# new_id ="123_.def"
# print(solution(new_id))
# new_id ="abcdefghijklmn.p"
# print(solution(new_id))
| [
"[email protected]"
] | |
24ea2a5d3090b4d31a336fddafb320974492ea58 | f13acd0d707ea9ab0d2f2f010717b35adcee142f | /ABC/abc151-abc200/abc154/a.py | 89362b6b10c2cd6de4bd06bbc15544c91d7f434a | [
"CC0-1.0",
"LicenseRef-scancode-public-domain"
] | permissive | KATO-Hiro/AtCoder | 126b9fe89fa3a7cffcbd1c29d42394e7d02fa7c7 | bf43320bc1af606bfbd23c610b3432cddd1806b9 | refs/heads/master | 2023-08-18T20:06:42.876863 | 2023-08-17T23:45:21 | 2023-08-17T23:45:21 | 121,067,516 | 4 | 0 | CC0-1.0 | 2023-09-14T21:59:38 | 2018-02-11T00:32:45 | Python | UTF-8 | Python | false | false | 248 | py | # -*- coding: utf-8 -*-
def main():
s, t = input().split()
a, b = map(int, input().split())
u = input()
if s == u:
print(a - 1, b)
else:
print(a, b - 1)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
df0e112a5ee8e6aa4e73c974f8a5193f413f1d51 | 8c205a39fd82f7f5782c6debeb1715b7ba8ca049 | /te.py | 7d471d8219db933a3a6f55ae1003b9651f6f73a7 | [] | no_license | Omkar02/-Crawler- | 8b96e9475bd24290dc176ce829414798e31fb893 | 6a51007f0b016d6e89f6c9b4294d2d652ce5ba22 | refs/heads/master | 2023-01-04T10:57:22.457112 | 2020-10-27T04:51:55 | 2020-10-27T04:51:55 | 307,585,199 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 61 | py | "This is a Code test File"
def Add(a, b):
return a + b
| [
"[email protected]"
] | |
e523c17f61fc2ea477066fd4d887bc7191e705e5 | d78309688232cf5f411af4eff6b466c8cd30846b | /xutils/examples/tushare_live_feed.py | c9ef42c14431ca68089a602f29c006cfe727850f | [
"Apache-2.0"
] | permissive | huangzhangfeng/x-utils | 5f8c1148789cf3dbbe6dff149562e3fd50c17c7a | 291d92832ee0e0c89bc22e10ecf2f44445e0d300 | refs/heads/master | 2020-06-15T03:31:33.613290 | 2018-04-16T05:43:06 | 2018-04-16T05:43:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 520 | py | # -*- coding: utf-8 -*-
from xutils.bar_builder import (LiveFeed,
BarFrequency)
import tushare as ts
if __name__ == '__main__':
live_feed = LiveFeed(tickers=['zh500'],
frequency=BarFrequency.MINUTE,
live_quote_arg_func=ts.get_realtime_quotes)
live_feed.start()
while not live_feed.eof():
bars = live_feed.get_next_bar()
if bars is not None:
print(bars['zh500'].date_time, bars['zh500'].price)
| [
"[email protected]"
] | |
e5ff327c9bc201ab0a1fb5930b3270a892f257b5 | 6c5c6871fc5c37247b4059c956f3ac9d7cdeb714 | /.venv/lib/python3.7/site-packages/pip/_internal/network/session.py | 7020dafaf36134220619b496771053d4b203ccaa | [] | no_license | masa48326/homework_c | 6596f91849b86ab4655712e4dddf2805231d5df3 | 86c9b83476ee5b18c5edef473f483a4336428d3c | refs/heads/master | 2020-09-05T16:12:09.051967 | 2019-11-07T04:39:47 | 2019-11-07T04:39:47 | 220,152,904 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,868 | py | """PipSession and supporting code, containing all pip-specific
network request configuration and behavior.
"""
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
import email.utils
import json
import logging
import mimetypes
import os
import platform
import sys
import warnings
from pip import __version__
from pip._internal.network.auth import MultiDomainBasicAuth
from pip._internal.network.cache import SafeFileCache
# Import ssl from compat so the initial import occurs in only one place.
from pip._internal.utils.compat import HAS_TLS, ipaddress, ssl
from pip._internal.utils.filesystem import check_path_owner
from pip._internal.utils.glibc import libc_ver
from pip._internal.utils.misc import (
build_url_from_netloc,
get_installed_version,
parse_netloc,
)
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.urls import url_to_path
from pip._vendor import requests, six, urllib3
from pip._vendor.cachecontrol import CacheControlAdapter
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
from pip._vendor.requests.models import Response
from pip._vendor.requests.structures import CaseInsensitiveDict
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.urllib3.exceptions import InsecureRequestWarning
if MYPY_CHECK_RUNNING:
from typing import (
Iterator, List, Optional, Tuple, Union,
)
from pip._internal.models.link import Link
SecureOrigin = Tuple[str, str, Optional[Union[int, str]]]
logger = logging.getLogger(__name__)
# Ignore warning raised when using --trusted-host.
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
SECURE_ORIGINS = [
# protocol, hostname, port
# Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
("https", "*", "*"),
("*", "localhost", "*"),
("*", "127.0.0.0/8", "*"),
("*", "::1/128", "*"),
("file", "*", None),
# ssh is always secure.
("ssh", "*", "*"),
] # type: List[SecureOrigin]
# These are environment variables present when running under various
# CI systems. For each variable, some CI systems that use the variable
# are indicated. The collection was chosen so that for each of a number
# of popular systems, at least one of the environment variables is used.
# This list is used to provide some indication of and lower bound for
# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive.
# For more background, see: https://github.com/pypa/pip/issues/5499
CI_ENVIRONMENT_VARIABLES = (
# Azure Pipelines
'BUILD_BUILDID',
# Jenkins
'BUILD_ID',
# AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI
'CI',
# Explicit environment variable.
'PIP_IS_CI',
)
def looks_like_ci():
# type: () -> bool
"""
Return whether it looks like pip is running under CI.
"""
# We don't use the method of checking for a tty (e.g. using isatty())
# because some CI systems mimic a tty (e.g. Travis CI). Thus that
# method doesn't provide definitive information in either direction.
return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES)
def user_agent():
"""
Return a string representing the user agent.
"""
data = {
"installer": {"name": "pip", "version": __version__},
"python": platform.python_version(),
"implementation": {
"name": platform.python_implementation(),
},
}
if data["implementation"]["name"] == 'CPython':
data["implementation"]["version"] = platform.python_version()
elif data["implementation"]["name"] == 'PyPy':
if sys.pypy_version_info.releaselevel == 'final':
pypy_version_info = sys.pypy_version_info[:3]
else:
pypy_version_info = sys.pypy_version_info
data["implementation"]["version"] = ".".join(
[str(x) for x in pypy_version_info]
)
elif data["implementation"]["name"] == 'Jython':
# Complete Guess
data["implementation"]["version"] = platform.python_version()
elif data["implementation"]["name"] == 'IronPython':
# Complete Guess
data["implementation"]["version"] = platform.python_version()
if sys.platform.startswith("linux"):
from pip._vendor import distro
distro_infos = dict(filter(
lambda x: x[1],
zip(["name", "version", "id"], distro.linux_distribution()),
))
libc = dict(filter(
lambda x: x[1],
zip(["lib", "version"], libc_ver()),
))
if libc:
distro_infos["libc"] = libc
if distro_infos:
data["distro"] = distro_infos
if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
if platform.system():
data.setdefault("system", {})["name"] = platform.system()
if platform.release():
data.setdefault("system", {})["release"] = platform.release()
if platform.machine():
data["cpu"] = platform.machine()
if HAS_TLS:
data["openssl_version"] = ssl.OPENSSL_VERSION
setuptools_version = get_installed_version("setuptools")
if setuptools_version is not None:
data["setuptools_version"] = setuptools_version
# Use None rather than False so as not to give the impression that
# pip knows it is not being run under CI. Rather, it is a null or
# inconclusive result. Also, we include some value rather than no
# value to make it easier to know that the check has been run.
data["ci"] = True if looks_like_ci() else None
user_data = os.environ.get("PIP_USER_AGENT_USER_DATA")
if user_data is not None:
data["user_data"] = user_data
return "{data[installer][name]}/{data[installer][version]} {json}".format(
data=data,
json=json.dumps(data, separators=(",", ":"), sort_keys=True),
)
class LocalFSAdapter(BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
pathname = url_to_path(request.url)
resp = Response()
resp.status_code = 200
resp.url = request.url
try:
stats = os.stat(pathname)
except OSError as exc:
resp.status_code = 404
resp.raw = exc
else:
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
resp.headers = CaseInsensitiveDict({
"Content-Type": content_type,
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = open(pathname, "rb")
resp.close = resp.raw.close
return resp
def close(self):
pass
class InsecureHTTPAdapter(HTTPAdapter):
def cert_verify(self, conn, url, verify, cert):
conn.cert_reqs = 'CERT_NONE'
conn.ca_certs = None
class PipSession(requests.Session):
timeout = None # type: Optional[int]
def __init__(self, *args, **kwargs):
"""
:param trusted_hosts: Domains not to emit warnings for when not using
HTTPS.
"""
retries = kwargs.pop("retries", 0)
cache = kwargs.pop("cache", None)
trusted_hosts = kwargs.pop("trusted_hosts", []) # type: List[str]
index_urls = kwargs.pop("index_urls", None)
super(PipSession, self).__init__(*args, **kwargs)
# Namespace the attribute with "pip_" just in case to prevent
# possible conflicts with the base class.
self.pip_trusted_origins = [] # type: List[Tuple[str, Optional[int]]]
# Attach our User Agent to the request
self.headers["User-Agent"] = user_agent()
# Attach our Authentication handler to the session
self.auth = MultiDomainBasicAuth(index_urls=index_urls)
# Create our urllib3.Retry instance which will allow us to customize
# how we handle retries.
retries = urllib3.Retry(
# Set the total number of retries that a particular request can
# have.
total=retries,
# A 503 error from PyPI typically means that the Fastly -> Origin
# connection got interrupted in some way. A 503 error in general
# is typically considered a transient error so we'll go ahead and
# retry it.
# A 500 may indicate transient error in Amazon S3
# A 520 or 527 - may indicate transient error in CloudFlare
status_forcelist=[500, 503, 520, 527],
# Add a small amount of back off between failed requests in
# order to prevent hammering the service.
backoff_factor=0.25,
)
# Check to ensure that the directory containing our cache directory
# is owned by the user current executing pip. If it does not exist
# we will check the parent directory until we find one that does exist.
if cache and not check_path_owner(cache):
logger.warning(
"The directory '%s' or its parent directory is not owned by "
"the current user and the cache has been disabled. Please "
"check the permissions and owner of that directory. If "
"executing pip with sudo, you may want sudo's -H flag.",
cache,
)
cache = None
# We want to _only_ cache responses on securely fetched origins. We do
# this because we can't validate the response of an insecurely fetched
# origin, and we don't want someone to be able to poison the cache and
# require manual eviction from the cache to fix it.
if cache:
secure_adapter = CacheControlAdapter(
cache=SafeFileCache(cache),
max_retries=retries,
)
else:
secure_adapter = HTTPAdapter(max_retries=retries)
# Our Insecure HTTPAdapter disables HTTPS validation. It does not
# support caching (see above) so we'll use it for all http:// URLs as
# well as any https:// host that we've marked as ignoring TLS errors
# for.
insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
# Save this for later use in add_insecure_host().
self._insecure_adapter = insecure_adapter
self.mount("https://", secure_adapter)
self.mount("http://", insecure_adapter)
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
for host in trusted_hosts:
self.add_trusted_host(host, suppress_logging=True)
def add_trusted_host(self, host, source=None, suppress_logging=False):
# type: (str, Optional[str], bool) -> None
"""
:param host: It is okay to provide a host that has previously been
added.
:param source: An optional source string, for logging where the host
string came from.
"""
if not suppress_logging:
msg = 'adding trusted host: {!r}'.format(host)
if source is not None:
msg += ' (from {})'.format(source)
logger.info(msg)
host_port = parse_netloc(host)
if host_port not in self.pip_trusted_origins:
self.pip_trusted_origins.append(host_port)
self.mount(build_url_from_netloc(host) + '/', self._insecure_adapter)
if not host_port[1]:
# Mount wildcard ports for the same host.
self.mount(
build_url_from_netloc(host) + ':',
self._insecure_adapter
)
def iter_secure_origins(self):
# type: () -> Iterator[SecureOrigin]
for secure_origin in SECURE_ORIGINS:
yield secure_origin
for host, port in self.pip_trusted_origins:
yield ('*', host, '*' if port is None else port)
def is_secure_origin(self, location):
# type: (Link) -> bool
# Determine if this url used a secure transport mechanism
parsed = urllib_parse.urlparse(str(location))
origin_protocol, origin_host, origin_port = (
parsed.scheme, parsed.hostname, parsed.port,
)
# The protocol to use to see if the protocol matches.
# Don't count the repository type as part of the protocol: in
# cases such as "git+ssh", only use "ssh". (I.e., Only verify against
# the last scheme.)
origin_protocol = origin_protocol.rsplit('+', 1)[-1]
# Determine if our origin is a secure origin by looking through our
# hardcoded list of secure origins, as well as any additional ones
# configured on this PackageFinder instance.
for secure_origin in self.iter_secure_origins():
secure_protocol, secure_host, secure_port = secure_origin
if origin_protocol != secure_protocol and secure_protocol != "*":
continue
try:
# We need to do this decode dance to ensure that we have a
# unicode object, even on Python 2.x.
addr = ipaddress.ip_address(
origin_host
if (
isinstance(origin_host, six.text_type) or
origin_host is None
)
else origin_host.decode("utf8")
)
network = ipaddress.ip_network(
secure_host
if isinstance(secure_host, six.text_type)
# setting secure_host to proper Union[bytes, str]
# creates problems in other places
else secure_host.decode("utf8") # type: ignore
)
except ValueError:
# We don't have both a valid address or a valid network, so
# we'll check this origin against hostnames.
if (
origin_host and
origin_host.lower() != secure_host.lower() and
secure_host != "*"
):
continue
else:
# We have a valid address and network, so see if the address
# is contained within the network.
if addr not in network:
continue
# Check to see if the port matches.
if (
origin_port != secure_port and
secure_port != "*" and
secure_port is not None
):
continue
# If we've gotten here, then this origin matches the current
# secure origin and we should return True
return True
# If we've gotten to this point, then the origin isn't secure and we
# will not accept it as a valid location to search. We will however
# log a warning that we are ignoring it.
logger.warning(
"The repository located at %s is not a trusted or secure host and "
"is being ignored. If this repository is available via HTTPS we "
"recommend you use HTTPS instead, otherwise you may silence "
"this warning and allow it anyway with '--trusted-host %s'.",
origin_host,
origin_host,
)
return False
def request(self, method, url, *args, **kwargs):
# Allow setting a default timeout on a session
kwargs.setdefault("timeout", self.timeout)
# Dispatch the actual request
return super(PipSession, self).request(method, url, *args, **kwargs)
| [
"[email protected]"
] | |
61a35ed86f7dcce431d0db4afc8fc306fcacbf12 | c430b5b5b45f2fe94f7110fd56922bcf55c61269 | /ixia/hlapi/4.98.122.39/library/common/ixiangpf/python/ixiangpf_commands/emulation_lacp_info.py | 81a80b9fc2ad01c12e3b5d986294a671fe9f9876 | [] | no_license | rayjiang2013/RF | 08189671398095d864d41ea5a3af1958e8eb6252 | 936d32629061c4685d8e18b5cf9f001255514ec1 | refs/heads/master | 2016-08-09T11:38:51.990559 | 2016-01-06T20:20:02 | 2016-01-06T20:20:02 | 47,857,352 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,096 | py | # -*- coding: utf-8 -*-
import sys
from ixiaerror import IxiaError
from ixiangpf import IxiaNgpf
from ixiautil import PartialClass, make_hltapi_fail
class IxiaNgpf(PartialClass, IxiaNgpf):
def emulation_lacp_info(self, mode, **kwargs):
r'''
#Procedure Header
Name:
emulation_lacp_info
Description:
Retrieves information about the LACP protocol
The following operations are done:
aggregate_stats learned_info clear_stats configuration
Synopsis:
emulation_lacp_info
-mode CHOICES aggregate_stats
CHOICES global_learned_info
CHOICES per_port
CHOICES per_device_group
CHOICES per_lag_statistics
CHOICES clear_stats
CHOICES configuration
[-session_type CHOICES lacp staticLag
DEFAULT lacp]
[-handle ANY]
[-port_handle REGEXP ^[0-9]+/[0-9]+/[0-9]+$]
Arguments:
-mode
-session_type
The LACP to be emulated. CHOICES: lacp static_lag.
-handle
-port_handle
Return Values:
$::SUCCESS | $::FAILURE
key:status value:$::SUCCESS | $::FAILURE
On status of failure, gives detailed information.
key:log value:On status of failure, gives detailed information.
key:Aggregate stats: value:
key:<port_handle>.aggregate.port_name value:
key:<port_handle>.aggregate.sessions_up value:
key:<port_handle>.aggregate.sessions_flap value:
key:<port_handle>.aggregate.sessions_not_started value:
key:<port_handle>.aggregate.sessions_down value:
key:<port_handle>.aggregate.link_state value:
key:<port_handle>.aggregate.lag_id value:
key:<port_handle>.aggregate.total_lag_member_ports value:
key:<port_handle>.aggregate.lag_member_ports_up value:
key:<port_handle>.aggregate.lacpdu_tx value:
key:<port_handle>.aggregate.lacpdu_rx value:
key:<port_handle>.aggregate.lacpu_malformed_rx value:
key:<port_handle>.aggregate.marker_pdu_tx value:
key:<port_handle>.aggregate.marker_pdu_rx value:
key:<port_handle>.aggregate.marker_res_pdu_tx value:
key:<port_handle>.aggregate.marker_res_pdu_rx value:
key:<port_handle>.aggregate.marker_res_timeout_count value:
key:<port_handle>.aggregate.lacpdu_tx_rate_violation_count value:
key:<port_handle>.aggregate.marker_pdu_tx_rate_violation_count value:
key:<port_handle>.aggregate.lag_id value:
key:lag_id value:
key:actor_system_id value:
key:actor_system_priority value:
key:actor_port_number value:
key:administrative_key value:
key:actor_operationalkey value:
key:actor_lacp_activity value:
key:actor_lacp_activity value:
key:actor_lacpdu_timeout value:
key:actor_aggregration_enabled value:
key:actor_synchronized_flag value:
key:actor_synchronized_flag value:
key:actor_collecting_flag value:
key:actor_defaulted_flag value:
key:actor_expired_flag value:
key:link_aggregration_status value:
key:partner_system_id value:
key:partner_system_priority value:
key:partner_port_number value:
key:partner_port_priority value:
key:partner_operational_key value:
key:partner_lacp_activity value:
key:partner_lacpdu_timeout value:
key:partner_aggregration value:
key:partner_synchronized_flag value:
key:partner_collecting_flag value:
key:partner_distributing_flag value:
key:partner_defaulted_flag value:
key:partner_expired_flag value:
key:collectors_max_delay value:
key:other_lag_member_count value:
key:details value:
Examples:
Sample Input:
Sample Output:
Notes:
See Also:
'''
hlpy_args = locals().copy()
hlpy_args.update(kwargs)
del hlpy_args['self']
del hlpy_args['kwargs']
not_implemented_params = []
mandatory_params = []
file_params = []
try:
return self.__execute_command(
'emulation_lacp_info',
not_implemented_params, mandatory_params, file_params,
hlpy_args
)
except (IxiaError, ):
e = sys.exc_info()[1]
return make_hltapi_fail(e.message)
| [
"[email protected]"
] | |
bfd39877647de6747a51adede1939f0ff10c6d7c | 871e1b0295c0fbbfca8191236d674866cf62ff01 | /TrainEffNetB5_5FP_crossentropyloss_Block5_2FC_nonCrop_3.py | a4f1aba22a0059253ac6987c0239cd90c9ad026d | [] | no_license | Peckkie/USAI_ABnormal_Screening | ce31a813e9303a7d43def912ab731cc633268cb7 | 82cd63ac9ab72fbe68eae254c15c7bf7ef906022 | refs/heads/master | 2023-02-16T13:32:33.678500 | 2021-01-07T02:36:35 | 2021-01-07T02:36:35 | 277,981,638 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,871 | py | import PIL
from keras import models
from keras import layers
from tensorflow.keras import optimizers
import os
import glob
import shutil
import sys
import numpy as np
from skimage.io import imread
import matplotlib.pyplot as plt
import os
from tensorflow.keras import callbacks
import pandas as pd
os.environ["CUDA_VISIBLE_DEVICES"]="1"
from PIL import Image, ImageFile
ImageFile.LOAD_TRUNCATED_IMAGES = True
batch_size = 16
epochs = 200
#Train
dataframe = pd.read_csv('/home/yupaporn/codes/USAI/traindf_fold1_3.csv')
base_dir = '/media/tohn/SSD/Images/Image1'
os.chdir(base_dir)
train_dir = os.path.join(base_dir, 'train')
#validation
valframe = pd.read_csv( '/home/yupaporn/codes/USAI/validationdf_fold1_3.csv')
validation_dir = os.path.join(base_dir, 'validation')
#load model
import efficientnet.tfkeras
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.models import load_model
model_dir = '/media/tohn/SSD/ModelTrainByImages/R1_1/models/B5_R1_5FP_relu_2FC_nonCrop_3.h5'
model = load_model(model_dir)
height = width = model.input_shape[1]
from tensorflow.keras.preprocessing.image import ImageDataGenerator
train_datagen = ImageDataGenerator(
rescale=1./255,
rotation_range=30,
width_shift_range=0.2,
height_shift_range=0.2,
brightness_range=[0.5,1.5],
shear_range=0.4,
zoom_range=0.2,
horizontal_flip=False,
fill_mode='nearest')
test_datagen = ImageDataGenerator(rescale=1./255)
train_generator = train_datagen.flow_from_dataframe(
dataframe = dataframe,
directory = train_dir,
x_col = 'Path Full',
y_col = 'Views',
target_size = (height, width),
batch_size=batch_size,
color_mode= 'rgb',
class_mode='categorical')
test_generator = test_datagen.flow_from_dataframe(
dataframe = valframe,
directory = validation_dir,
x_col = 'Path Full',
y_col = 'Views',
target_size = (height, width),
batch_size=batch_size,
color_mode= 'rgb',
class_mode='categorical')
os.chdir('/media/tohn/SSD/ModelTrainByImages/R2_1')
root_logdir = '/media/tohn/SSD/ModelTrainByImages/R2_1/my_logs_block52_5FP_1FC_nonCrop_3'
def get_run_logdir():
import time
run_id = time.strftime("run_%Y_%m_%d_%H_%M_%S")
return os.path.join(root_logdir,run_id)
run_logdir = get_run_logdir()
tensorboard_cb = callbacks.TensorBoard(log_dir = run_logdir)
# os.makedirs("./models_6", exist_ok=True)
def avoid_error(gen):
while True:
try:
data, labels = next(gen)
yield data, labels
except:
pass
#Unfreez
model.trainable = True
set_trainable = False
for layer in model.layers:
if layer.name == 'block5a_se_excite':
set_trainable = True
if set_trainable:
layer.trainable = True
else:
layer.trainable = False
print('This is the number of trainable layers '
'after freezing the conv base:', len(model.trainable_weights))
model.compile(loss='categorical_crossentropy',
optimizer=optimizers.RMSprop(lr=2e-5),
metrics=['acc'])
run_logdir = get_run_logdir()
tensorboard_cb = callbacks.TensorBoard(run_logdir)
#early_stop_cb = callbacks.EarlyStopping(monitor='val_acc', patience=66, mode= 'max')
history = model.fit_generator(
avoid_error(train_generator),
steps_per_epoch= len(dataframe)//batch_size,
epochs=epochs,
validation_data=avoid_error(test_generator),
validation_steps= len(valframe) //batch_size,
callbacks = [tensorboard_cb])
model.save('./models/B5_R2b5_5FP_relu_2FC_nonCrop_3.h5')
| [
"[email protected]"
] | |
51c6159b731d8e3312ebfcff8878205082045ed9 | f921ba30c773da9772293f69aa88c87b23929cc6 | /src/main.py | 80b5bb6dcf36d3ae1797299e1f62a20284465be6 | [
"MIT"
] | permissive | R-Mielamud/Telegram_BooksDelivery | 56349673b0bdb87204c35d4bce2cdb01d6d18722 | 0745e60a4541f38fba8ac378185aff558ec95147 | refs/heads/master | 2023-03-01T03:26:42.038174 | 2021-02-11T17:52:52 | 2021-02-11T17:52:52 | 326,045,333 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,056 | py | from telebot import TeleBot as Bot
from helpers.conversation import ConversationsStorage, Conversation
from helpers.messaging import parse_manifest, send_until_question
from api import UsersAPI, OrdersAPI, RequisitesAPI, BillsAPI
from constants import BOT_TOKEN
bot = Bot(BOT_TOKEN)
welcome, manifest = parse_manifest()
conversations = ConversationsStorage()
users = UsersAPI()
orders = OrdersAPI()
requisites = RequisitesAPI()
bills = BillsAPI()
@bot.message_handler(commands=["start"])
def on_start(command):
uid = command.from_user.id
user = users.get_by_messenger_id(uid)
if not user:
users.create(messenger_id=uid, messenger="Telegram")
elif user.phone:
users.partial_update(user.id, phone=None)
bot.send_message(command.chat.id, welcome)
@bot.message_handler(content_types=["text"])
def on_message(message):
uid = message.from_user.id
prev_answer = message.text
user = users.get_by_messenger_id(uid)
if not user:
user = users.create(messenger_id=uid, phone=prev_answer, messenger="Telegram")
prev_answer = None
elif not user.phone:
users.partial_update(user.id, phone=prev_answer)
prev_answer = None
send = lambda text: bot.send_message(message.chat.id, text)
if not conversations.exists(uid):
conversations.add(uid, manifest, default_answers=user.convers_answers_data)
conversation = conversations.get(uid)
conversation, question = send_until_question(send, conversation, prev_answer)
if conversation.answers.stopped:
users.partial_update(user.id, convers_answers_data={})
conversation, _ = send_until_question(send, Conversation(manifest, default_answers={}), None)
elif not question:
update_data = {"convers_answers_data": {}}
action = conversation.answers.get("action")
if action == "order":
orders.create(
books=conversation.answers.get("books"),
user=user.id
)
elif action == "requisites":
result = requisites.create(
delivery_name=conversation.answers.get("delivery_name"),
delivery_phone=conversation.answers.get("delivery_phone"),
delivery_address=conversation.answers.get("delivery_address"),
post_service=conversation.answers.get("post_service")
)
update_data["requisites"] = result.id
elif action == "bill":
bills.create(
amount=conversation.answers.get("amount"),
comment=conversation.answers.get("comment"),
user=user.id
)
conversation, _ = send_until_question(send, Conversation(manifest, default_answers={}), None)
users.partial_update(user.id, **update_data)
elif not question.skip:
users.partial_update(user.id, convers_answers_data=conversation.answers.data)
conversations.set(uid, conversation)
if __name__ == "__main__":
print("Bot started!")
bot.polling()
| [
"[email protected]"
] | |
ffc449b45823b68adcab9b5cbbc47f8bb26e9c3e | b9f0399cf7ea0a66fb76900f0c2ceac2d4859d34 | /app/models.py | 25dafd0a7d71dca36697937b7a3967d96a57e0a2 | [] | no_license | huangtaosdt/QA-website-zsb | eea0fcd6a2415cf5c61f01f6692d39a544ed900a | 518470a3b37d6561797a38de42fe0c81d27c6ceb | refs/heads/master | 2021-09-20T15:19:44.559747 | 2018-08-11T03:53:17 | 2018-08-11T03:53:17 | 100,498,996 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 13,814 | py | from . import db, login_manager
from flask_login import UserMixin, AnonymousUserMixin
from werkzeug.security import generate_password_hash, check_password_hash
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from flask import current_app, request, url_for
from datetime import datetime
import hashlib
from markdown import markdown
import bleach
from app.exceptions import ValidationError
class Follow(db.Model):
__tablename__ = 'follows'
follower_id = db.Column(db.Integer, db.ForeignKey('users.id'), primary_key=True)
followed_id = db.Column(db.Integer, db.ForeignKey('users.id'), primary_key=True)
timestamp = db.Column(db.DateTime, default=datetime.utcnow)
class Role(db.Model):
__tablename__='roles'
id=db.Column(db.Integer,primary_key=True)
name=db.Column(db.String(64),unique=True)
default = db.Column(db.Boolean, default=False, index=True)
permissions = db.Column(db.Integer)
users=db.relationship('User',backref='role',lazy='dynamic')
@staticmethod
def insert_roles():
roles = {
# 利用各权限对并集表示角色
'User': (Permission.FOLLOW | Permission.COMMENT | Permission.WRITE_ARTICLES, True),
'Moderator': (
Permission.FOLLOW | Permission.COMMENT | Permission.WRITE_ARTICLES | Permission.MODERATE_COMMENTS,
False),
'Administrator': (0xff, False)
}
for r in roles:
role = Role.query.filter_by(name=r).first()
if role is None:
role = Role(name=r)
role.permissions = roles.get(r)[0]
role.default = roles.get(r)[1]
print('role.permissions:', role.permissions)
print('role.default:', role.default)
db.session.add(role)
db.session.commit()
def __repr__(self):
return '<Role %r>' % self.name
class User(UserMixin, db.Model):
__tablename__='users'
id=db.Column(db.Integer,primary_key=True)
username=db.Column(db.String(64),unique=True,index=True)
password_hash=db.Column(db.String(128))
email = db.Column(db.String(64), unique=True, index=True)
# foreignkey里面要写我们自己定义的那个表明,即__tablename__
role_id=db.Column(db.Integer,db.ForeignKey('roles.id'))
posts = db.relationship('Post', backref='author', lazy='dynamic')
comments = db.relationship('Comment', backref='author', lazy='dynamic')
# 用户信息字段,用于自我介绍
name = db.Column(db.String(64))
location = db.Column(db.String(64))
about_me = db.Column(db.Text())
member_since = db.Column(db.DateTime(), default=datetime.utcnow)
last_seen = db.Column(db.DateTime(), default=datetime.utcnow)
confirmed = db.Column(db.Boolean, default=False)
# 增加头型缓存字段:默认头像
avatar_hash = db.Column(db.String(32))
# 用户自定义头像
avatar = db.Column(db.String(128), default=None)
followed = db.relationship('Follow', foreign_keys=[Follow.follower_id],
backref=db.backref('follower', lazy='joined'),
lazy='dynamic',
cascade='all,delete-orphan')
followers = db.relationship('Follow', foreign_keys=[Follow.followed_id],
backref=db.backref('followed', lazy='joined'),
lazy='dynamic',
cascade='all,delete-orphan')
def __init__(self, **kwargs):
super(User, self).__init__(**kwargs)
if self.role is None:
if self.email == current_app.config['FLASKY_ADMIN']:
self.role = Role.query.filter_by(permissions=0xff).first()
if self.role is None:
self.role = Role.query.filter_by(default=True).first()
if self.email is not None and self.avatar_hash is None:
self.avatar_hash = hashlib.md5(self.email.encode('utf-8')).hexdigest()
@property
def followed_posts(self):
return Post.query.join(Follow, Follow.followed_id == Post.author_id).filter(Follow.follower_id == self.id)
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self,password):
self.password_hash=generate_password_hash(password)
def verify_password(self,password):
return check_password_hash(self.password_hash, password)
# 生成安全令牌+确认用户账户
def generate_confirmation_token(self, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'confirm': self.id})
def confirm(self, token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('confirm') != self.id:
return False
self.confirmed = True
db.session.add(self)
return True
def generate_auth_token(self, expiration):
s = Serializer(current_app.config['SECRET_KEY'], expires_in=expiration)
return s.dumps({'id': self.id}).decode('ascii')
@staticmethod
def verify_auth_token(token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return None
return User.query.get(data['id'])
# Following: generate reset-token and reset password
def generate_reset_token(self, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'reset': self.id})
def reset_password(self, token, new_password):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('reset') != self.id:
return False
self.password = new_password
db.session.add(self)
db.session.commit()
return True
def generate_email_change_token(self, new_email, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'change_email': self.id, 'new_email': new_email})
def change_email(self, token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('change_email') != self.id:
return False
new_email = data.get('new_email')
if new_email is None:
return False
if self.query.filter_by(email=new_email).first() is not None:
return False
self.email = new_email
self.avatar_hash = hashlib.md5(self.email.encode('utf-8')).hexdigest()
db.session.add(self)
return True
# 检查用户权限
def can(self, permissions):
return self.role is not None and (self.role.permissions & permissions) == permissions
def is_administrator(self):
return self.can(Permission.ADMINISTER)
def ping(self):
self.last_seen = datetime.utcnow()
db.session.add(self)
# 构建gravatar url,生成头像
def gravatar(self, size=100, default='identicon', rating='g'):
if request.is_secure:
url = 'https://secure.gravatar.com/avatar'
else:
url = 'https://www.gravatar.com/avatar'
hash = self.avatar_hash or hashlib.md5(self.email.encode('utf-8')).hexdigest()
return '{url}/{hash}?s={size}&d={default}&r={rating}'.format(
url=url, hash=hash, size=size, default=default, rating=rating)
def __repr__(self):
return '<User %r>' % self.username
@staticmethod
def generate_fake(count=100):
from sqlalchemy.exc import IntegrityError
from random import seed
import forgery_py
seed()
for i in range(count):
u = User(email=forgery_py.internet.email_address(), username=forgery_py.internet.user_name(True),
password=forgery_py.lorem_ipsum.word(), confirmed=True, name=forgery_py.name.full_name(),
location=forgery_py.address.city(), about_me=forgery_py.lorem_ipsum.sentence(),
member_since=forgery_py.date.date(True))
db.session.add(u)
try:
db.session.commit()
except IntegrityError:
db.session.rollback()
@staticmethod
def add_self_follows():
for user in User.query.all():
if not user.is_following(user):
user.follow(user)
db.session.add(user)
db.session.commit()
def follow(self, user):
if not self.is_following(user):
f = Follow(follower=self, followed=user)
db.session.add(f)
def unfollow(self, user):
f = self.followed.filter_by(followed_id=user.id).first()
if f:
db.session.delete(f)
def is_following(self, user):
return self.followed.filter_by(followed_id=user.id).first() is not None
def is_followed_by(self, user):
return self.followers.filter_by(follower_id=user.id).first() is not None
def to_json(self):
json_user = {
'url': url_for('api.get_user', id=self.id, _external=True),
'username': self.username,
'member_since': self.member_since,
'last_seen': self.last_seen,
'posts': url_for('api.get_user_posts', id=self.id, _external=True),
'followed_posts': url_for('api.get_user_followed_posts',
id=self.id, _external=True),
'post_count': self.posts.count()
}
return json_user
class AnonymousUser(AnonymousUserMixin):
def can(self, permission):
return False
def is_administrator(self):
return False
login_manager.anonymous_user = AnonymousUser
class Permission:
FOLLOW = 0x01
COMMENT = 0x02
WRITE_ARTICLES = 0x04
MODERATE_COMMENTS = 0x08
ADMINISTER = 0x80
# 文章模型
class Post(db.Model):
__tablename__ = 'posts'
id = db.Column(db.Integer, primary_key=True)
body = db.Column(db.Text)
timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)
author_id = db.Column(db.Integer, db.ForeignKey('users.id'))
body_html = db.Column(db.Text)
comments = db.relationship('Comment', backref='post', lazy='dynamic')
@staticmethod
def on_changed_body(target, value, oldvalue, initiator):
allowed_tags = ['a', 'abbr', 'acronym', 'b', 'blockquote', 'code',
'em', 'i', 'li', 'ol', 'pre', 'strong', 'ul',
'h1', 'h2', 'h3', 'p']
target.body_html = bleach.linkify(
bleach.clean(markdown(value, output_format='html'), tags=allowed_tags, strip=True))
@staticmethod
def generate_fake(count=100):
from random import seed, randint
import forgery_py
seed()
user_count = User.query.count()
for i in range(count):
u = User.query.offset(randint(0, user_count - 1)).first()
p = Post(body=forgery_py.lorem_ipsum.sentences(randint(1, 3)),
timestamp=forgery_py.date.date(True),
author=u)
db.session.add(p)
db.session.commit()
def to_json(self):
json_post = {
'url': url_for('api.get_post', id=self.id, _external=True),
'body': self.body,
'body_html': self.body_html,
'timestamp': self.timestamp,
'author': url_for('api.get_user', id=self.author_id, _external=True),
'comments': url_for('api.get_post_comments', id=self.id, _external=True),
'comment_count': self.comments.count()
}
return json_post
@staticmethod
def from_json(json_post):
body = json_post.get('body')
if body is None or body == '':
raise ValidationError('post does not have a body')
return Post(body=body)
class Comment(db.Model):
__tablename__ = 'comments'
id = db.Column(db.Integer, primary_key=True)
body = db.Column(db.Text)
body_html = db.Column(db.Text)
timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)
disabled = db.Column(db.Boolean)
author_id = db.Column(db.Integer, db.ForeignKey('users.id'))
post_id = db.Column(db.Integer, db.ForeignKey('posts.id'))
@staticmethod
def on_changed_body(target, value, oldvalue, initiator):
allowed_tags = ['a', 'abbr', 'acronym', 'b', 'code', 'em', 'i', 'strong']
target.body_html = bleach.linkify(
bleach.clean(markdown(value, output_format='html'), tags=allowed_tags, strip=True))
def to_json(self):
json_comment = {
'url': url_for('api.get_comment', id=self.id, _external=True),
'post': url_for('api.get_post', id=self.post_id, _external=True),
'body': self.body,
'body_html': self.body_html,
'timestamp': self.timestamp,
'author': url_for('api.get_user', id=self.author_id,
_external=True),
}
return json_comment
@staticmethod
def from_json(json_comment):
body = json_comment.get('body')
if body is None or body == '':
raise ValidationError('comment does not have a body')
return Comment(body=body)
db.event.listen(Post.body, 'set', Post.on_changed_body)
db.event.listen(Comment.body, 'set', Comment.on_changed_body)
# 加载用户的回调函数
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
| [
"[email protected]"
] | |
47ba284a792b5f983cbaf547bb02cc9069dde73b | 4195cfc592b8c6ee42c5ea7b7d7d08b9899dd7c0 | /spektral/datasets/qm7.py | 0b226669c663f48b60d05bc6e3885431fd6d6681 | [
"MIT"
] | permissive | mbrukman/spektral | 4f5cc708a2996469ebbf2b6133acca42c6a869bc | d720de476d04a8d9ed23570336eddfedb97dd7de | refs/heads/master | 2023-02-03T09:16:32.579795 | 2020-12-15T18:00:24 | 2020-12-15T18:00:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,404 | py | import os.path as osp
import numpy as np
import scipy.sparse as sp
from scipy.io import loadmat
from tensorflow.keras.utils import get_file
from spektral.data import Dataset, Graph
class QM7(Dataset):
"""
The QM7b dataset of molecules from the paper:
> [MoleculeNet: A Benchmark for Molecular Machine Learning](https://arxiv.org/abs/1703.00564)<br>
> Zhenqin Wu et al.
The dataset has no node features.
Edges and edge features are obtained from the Coulomb matrices of the
molecules.
Each graph has a 14-dimensional label for regression.
"""
url = 'http://deepchem.io.s3-website-us-west-1.amazonaws.com/datasets/qm7b.mat'
def __init__(self, **kwargs):
super().__init__(**kwargs)
def download(self):
get_file('qm7b.mat', self.url, extract=True, cache_dir=self.path,
cache_subdir=self.path)
def read(self):
print('Loading QM7 dataset.')
mat_file = osp.join(self.path, 'qm7b.mat')
data = loadmat(mat_file)
coulomb_matrices = data['X']
labels = data['T']
output = []
for i in range(len(coulomb_matrices)):
row, col, data = sp.find(coulomb_matrices[i])
a = sp.csr_matrix((np.ones_like(data), (row, col)))
e = data[:, None]
y = labels[i]
output.append(Graph(a=a, e=e, y=y))
return output
| [
"[email protected]"
] | |
eb68771d4e777087643a92bffb715b1eae059a48 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/dev/cv/image_classification/MSPN_ID0960_for_PyTorch/dataset/MPII/mpii.py | bc97e12ddafae1e8b4752d26c074ea0a1695bb97 | [
"BSD-3-Clause",
"GPL-1.0-or-later",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 8,487 | py | #
# BSD 3-Clause License
#
# Copyright (c) 2017 xxxx
# All rights reserved.
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ============================================================================
#
"""
@author: Wenbo Li
@contact: [email protected]
"""
import cv2
import json
import numpy as np
import os
from scipy.io import loadmat
from collections import OrderedDict
from dataset.JointsDataset import JointsDataset
class MPIIDataset(JointsDataset):
def __init__(self, DATASET, stage, transform=None):
super().__init__(DATASET, stage, transform)
#self.cur_dir = os.path.split(os.path.realpath(__file__))[0]
self.cur_dir = '/npu/traindata/ID0960_CarPeting_Pytorch_MSPN/MPII'
self.train_gt_file = 'train.json'
self.train_gt_path = os.path.join(self.cur_dir, 'gt_json',
self.train_gt_file)
self.val_gt_file = 'valid.json'
self.val_gt_path = os.path.join(self.cur_dir, 'gt_json',
self.val_gt_file)
self.val_gt_mat = os.path.join(self.cur_dir, 'gt_json', 'valid.mat')
self.test_det_file = 'test.json'
self.test_det_path = os.path.join(self.cur_dir, 'det_json',
self.test_det_file)
self.data = self._get_data()
self.data_num = len(self.data)
def _get_data(self):
data = list()
if self.stage == 'train':
mpii = json.load(open(self.train_gt_path))
elif self.stage == 'val':
mpii = json.load(open(self.val_gt_path))
else:
mpii = json.load(open(self.test_det_path))
for d in mpii:
img_name = d['image']
img_id = img_name.split('.')[0]
img_path = os.path.join(self.cur_dir, 'images', img_name)
center = np.array(d['center'], dtype=np.float32)
scale = np.array([d['scale'], d['scale']], dtype=np.float32)
if center[0] != -1:
center[1] = center[1] + 15 * scale[1]
center -= 1
if self.stage == 'test':
joints = np.zeros((self.keypoint_num, 3), dtype=np.float32)
else:
joints = np.array(d['joints'], dtype=np.float32)
joints -= 1
joints_vis = np.array(d['joints_vis'], dtype=np.float32)
joints_vis = joints_vis.reshape(-1, 1) * 2
joints = np.concatenate((joints, joints_vis), axis=1)
data.append(dict(center=center,
img_id=img_id,
img_path=img_path,
img_name=img_name,
joints=joints,
scale=scale))
return data
# referring msra high resolution
def evaluate(self, preds):
preds = preds[:, :, 0:2] + 1.0
SC_BIAS = 0.6
threshold = 0.5
gt_file = os.path.join(self.val_gt_mat)
gt_dict = loadmat(gt_file)
dataset_joints = gt_dict['dataset_joints']
jnt_missing = gt_dict['jnt_missing']
pos_gt_src = gt_dict['pos_gt_src']
headboxes_src = gt_dict['headboxes_src']
pos_pred_src = np.transpose(preds, [1, 2, 0])
head = np.where(dataset_joints == 'head')[1][0]
lsho = np.where(dataset_joints == 'lsho')[1][0]
lelb = np.where(dataset_joints == 'lelb')[1][0]
lwri = np.where(dataset_joints == 'lwri')[1][0]
lhip = np.where(dataset_joints == 'lhip')[1][0]
lkne = np.where(dataset_joints == 'lkne')[1][0]
lank = np.where(dataset_joints == 'lank')[1][0]
rsho = np.where(dataset_joints == 'rsho')[1][0]
relb = np.where(dataset_joints == 'relb')[1][0]
rwri = np.where(dataset_joints == 'rwri')[1][0]
rkne = np.where(dataset_joints == 'rkne')[1][0]
rank = np.where(dataset_joints == 'rank')[1][0]
rhip = np.where(dataset_joints == 'rhip')[1][0]
jnt_visible = 1 - jnt_missing
uv_error = pos_pred_src - pos_gt_src
uv_err = np.linalg.norm(uv_error, axis=1)
headsizes = headboxes_src[1, :, :] - headboxes_src[0, :, :]
headsizes = np.linalg.norm(headsizes, axis=0)
headsizes *= SC_BIAS
scale = np.multiply(headsizes, np.ones((len(uv_err), 1)))
scaled_uv_err = np.divide(uv_err, scale)
scaled_uv_err = np.multiply(scaled_uv_err, jnt_visible)
jnt_count = np.sum(jnt_visible, axis=1)
less_than_threshold = np.multiply((scaled_uv_err <= threshold),
jnt_visible)
PCKh = np.divide(100.*np.sum(less_than_threshold, axis=1), jnt_count)
rng = np.arange(0, 0.5+0.01, 0.01)
pckAll = np.zeros((len(rng), 16))
for r in range(len(rng)):
threshold = rng[r]
less_than_threshold = np.multiply(scaled_uv_err <= threshold,
jnt_visible)
pckAll[r, :] = np.divide(100.*np.sum(less_than_threshold, axis=1),
jnt_count)
PCKh = np.ma.array(PCKh, mask=False)
PCKh.mask[6:8] = True
jnt_count = np.ma.array(jnt_count, mask=False)
jnt_count.mask[6:8] = True
jnt_ratio = jnt_count / np.sum(jnt_count).astype(np.float64)
name_value = [
('Head', PCKh[head]),
('Shoulder', 0.5 * (PCKh[lsho] + PCKh[rsho])),
('Elbow', 0.5 * (PCKh[lelb] + PCKh[relb])),
('Wrist', 0.5 * (PCKh[lwri] + PCKh[rwri])),
('Hip', 0.5 * (PCKh[lhip] + PCKh[rhip])),
('Knee', 0.5 * (PCKh[lkne] + PCKh[rkne])),
('Ankle', 0.5 * (PCKh[lank] + PCKh[rank])),
('Mean', np.sum(PCKh * jnt_ratio)),
('[email protected]', np.sum(pckAll[11, :] * jnt_ratio))
]
name_value = OrderedDict(name_value)
print(name_value)
def visualize(self, img, joints, score=None):
pairs = [[0, 1], [1, 2], [2, 6], [3, 4], [3, 6], [4, 5], [6, 7],
[7, 8], [8, 9], [8, 12], [8, 13], [10, 11], [11, 12],
[13, 14], [14, 15]]
color = np.random.randint(0, 256, (self.keypoint_num, 3)).tolist()
for i in range(self.keypoint_num):
if joints[i, 0] > 0 and joints[i, 1] > 0:
cv2.circle(img, tuple(joints[i, :2]), 2, tuple(color[i]), 2)
if score:
cv2.putText(img, score, (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 1.2,
(128, 255, 0), 2)
def draw_line(img, p1, p2):
c = (0, 0, 255)
if p1[0] > 0 and p1[1] > 0 and p2[0] > 0 and p2[1] > 0:
cv2.line(img, tuple(p1), tuple(p2), c, 2)
for pair in pairs:
draw_line(img, joints[pair[0] - 1], joints[pair[1] - 1])
return img
if __name__ == '__main__':
from dataset.attribute import load_dataset
dataset = load_dataset('MPII')
mpii = MPIIDataset(dataset, 'val')
print(mpii.data_num)
| [
"[email protected]"
] | |
3ec8a31d0882655804f8b5f2cb27daca6abfd5e7 | 2e8f0de7a1526ef511927783235edc93f7c90036 | /communicare/core/migrations/0043_event_external_subscriptions.py | f59f64eaec3ff4408f9560c7e05cd7fcea47da6d | [] | no_license | ConTTudOweb/CommunicareProject | 3d663578dfdeb455bc49419b3d103daec69c8fab | 211a1124c8c4549c609832ad71069a55c714a430 | refs/heads/master | 2022-12-21T12:59:35.424560 | 2021-05-10T22:16:15 | 2021-05-10T22:16:15 | 163,891,380 | 0 | 1 | null | 2022-12-08T07:43:22 | 2019-01-02T21:27:42 | HTML | UTF-8 | Python | false | false | 439 | py | # Generated by Django 2.1.8 on 2020-03-02 17:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0042_auto_20191104_0945'),
]
operations = [
migrations.AddField(
model_name='event',
name='external_subscriptions',
field=models.URLField(blank=True, null=True, verbose_name='inscrição externa'),
),
]
| [
"[email protected]"
] | |
2ac85d35e477f5d3e3fe61212bc2ef57463c03a6 | 637ab3853b560485d1b3c3ecbb469ff48114f3fb | /RepublicHyundai/taxapp/admin.py | 66f846da14758850dcc5f7d4b76283e1f2e96d62 | [] | no_license | vishalrathodgithub/republic_hyundai | 19b7af4f84cec5cec037f2ed3a77ec5dfd4be8b9 | cb1241f7c696a5b067a6b1ad1ce27dd371b41e2e | refs/heads/master | 2020-11-24T13:09:21.491228 | 2019-12-15T09:29:16 | 2019-12-15T09:29:16 | 228,159,070 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 297 | py | from django.contrib import admin
from taxapp.models import *
# Register your models here.
class TaxMasterAdmin(admin.ModelAdmin):
list_display =['tax_product_category','tax_hsn','tax_sgst','tax_cgst','tax_igst']
admin.site.register(TaxMaster,TaxMasterAdmin)
admin.site.register(FinancialYear)
| [
"[email protected]"
] | |
8dc37cc18c0c38e4156e6ad424ef221774f15007 | 5a394c53a7099bc871401e32cf3fc782546f9f7d | /.history/lab1/Graph_20210130222041.py | a941a151506209424208f45209345b15f8b3979d | [] | no_license | ajaygc95/advPy | fe32d67ee7910a1421d759c4f07e183cb7ba295b | 87d38a24ef02bcfe0f050840179c6206a61384bd | refs/heads/master | 2023-03-27T10:10:25.668371 | 2021-03-23T08:28:44 | 2021-03-23T08:28:44 | 334,614,292 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 436 | py | from matplotlib import pyplot as plt
import numpy as np
import sklearn as sk
class Graph:
def __init__(self) -> None:
self.plot = plt
def plotGraph(self, x, y):
plt.plot(x,y)
plt.xlabel('Year')
plt.ylabel('Change in Temperature')
plt.title('Temperature change by year')
plt.legend('Change')
plt.show()
def plotlinear(self,x,y):
plt.plot(x,y)
plt. | [
"[email protected]"
] | |
e8c4bb6b0a28096de4b86e45d42f7d4cf7c240ff | e837db39c9609830ab8e77dac2077ea30cadc5b3 | /core/migrations/0003_auto_20190915_1905.py | 368d065348ec67a18de5829c24f80f2c257f1185 | [] | no_license | windundschnee/accountneu | 9c8ff1507f725a5179604be2640d76b5302a0299 | da9066840a312a95bc628556c94738010787a01f | refs/heads/master | 2022-12-10T06:00:42.449898 | 2019-10-25T18:29:23 | 2019-10-25T18:29:23 | 211,513,631 | 0 | 0 | null | 2022-12-08T05:22:15 | 2019-09-28T14:34:00 | Python | UTF-8 | Python | false | false | 868 | py | # Generated by Django 2.2 on 2019-09-15 17:05
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0002_auto_20190915_1758'),
]
operations = [
migrations.AddField(
model_name='allgeingaben',
name='schneelast',
field=models.DecimalField(decimal_places=2, default=2, max_digits=5, validators=[django.core.validators.MinValueValidator(0)]),
),
migrations.AddField(
model_name='allgeingaben',
name='schneelast_benutzerdefiniert',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='allgeingaben',
name='schneelastzone',
field=models.CharField(default='2', max_length=10),
),
]
| [
"[email protected]"
] | |
9c1dec43e89521c2bace3fda5c4a36ee10c09131 | 320280bfce76713436b76ffc3125ccf37e65a324 | /AnalyzeMiniPlusSubstructure/test/ttbar/ttbar_82.py | 439df1a9e7ad24d35f3968ac9303d4353832a0cf | [] | no_license | skhalil/MiniValidation | 75ea5c0d7cde17bf99c7d31501f8384560ee7b99 | 1a7fb8377e29172483ea6d3c7b3e427ff87e7e37 | refs/heads/master | 2016-09-05T10:31:38.562365 | 2015-01-29T05:30:32 | 2015-01-29T05:30:32 | 29,898,162 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,860 | py | import FWCore.ParameterSet.Config as cms
###############################################
useMiniAOD = True
# AOD
pfcandidates = 'particleFlow'
chsstring = 'pfNoPileUpJME'
genjetparticles = 'genParticles'
importantgenparticles = 'genParticles'
tracks = 'generalTracks'
vertices = 'offlinePrimaryVertices'
mergedvertices = 'inclusiveMergedVertices'
mergedvertices2 = ''
primaryvertices = 'offlinePrimaryVertices'
#miniAOD
if useMiniAOD:
pfcandidates = 'packedPFCandidates'
genjetparticles = 'packedGenParticles'
importantgenparticles = 'prunedGenParticles'
tracks = 'unpackedTracksAndVertices'
vertices = 'unpackedTracksAndVertices'
mergedvertices = 'unpackedTracksAndVertices'
mergedvertices2 = 'secondary'
primaryvertices = 'offlineSlimmedPrimaryVertices'
print 'useMiniAOD = '+str(useMiniAOD)
print ' pfcandidates = '+pfcandidates
print ' genjetparticles = '+genjetparticles
print ' importantgenparticles = '+importantgenparticles
print ' tracks = '+tracks
print ' vertices = '+vertices
print ' mergedvertices = '+mergedvertices
print ' mergedvertices2 = '+mergedvertices2
print ' primaryvertices = '+primaryvertices
###############################################
# SETUP
process = cms.Process("USER")
process.load("FWCore.MessageService.MessageLogger_cfi")
process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(False) , allowUnscheduled = cms.untracked.bool(True) )
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
process.MessageLogger.cerr.FwkReport.reportEvery = 1000
process.MessageLogger.cerr.FwkJob.limit=1
process.MessageLogger.cerr.ERROR = cms.untracked.PSet( limit = cms.untracked.int32(0) )
###############################################
# SOURCE
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring(
'root://cmsxrootd-site.fnal.gov//store/mc/Phys14DR/TTJets_MSDecaysCKM_central_Tune4C_13TeV-madgraph-tauola/MINIAODSIM/PU20bx25_PHYS14_25_V1-v1/00000/4881873E-BE76-E411-BDE9-0025901D4764.root'
)
)
###############################################
# ANA
process.demo = cms.EDAnalyzer("AnalyzeMiniPlusSubstructure",
vertices = cms.InputTag("offlineSlimmedPrimaryVertices"),
muons = cms.InputTag("slimmedMuons"),
electrons = cms.InputTag("slimmedElectrons"),
taus = cms.InputTag("slimmedTaus"),
photons = cms.InputTag("slimmedPhotons"),
jets = cms.InputTag("slimmedJets"),
fatjets = cms.InputTag("slimmedJetsAK8"),
mets = cms.InputTag("slimmedMETs"),
pfCands = cms.InputTag("packedPFCandidates"),
packed = cms.InputTag("packedGenParticles"),
pruned = cms.InputTag("prunedGenParticles"),
bits = cms.InputTag("TriggerResults","","HLT"),
prescales = cms.InputTag("patTrigger")
)
process.TFileService = cms.Service("TFileService",
fileName = cms.string("ttbar82.root"),
closeFileFast = cms.untracked.bool(True)
)
###############################################
# RECO AND GEN SETUP
process.load('PhysicsTools.PatAlgos.producersLayer1.patCandidates_cff')
process.load('Configuration.EventContent.EventContent_cff')
process.load('Configuration.StandardSequences.Geometry_cff')
process.load('Configuration.StandardSequences.MagneticField_38T_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
process.GlobalTag.globaltag ='PHYS14_25_V2'
#'START70_V6::All'
#'START70_V6::All'
process.load('RecoJets.Configuration.RecoPFJets_cff')
process.load('RecoJets.Configuration.RecoGenJets_cff')
#process.fixedGridRhoFastjetAll.pfCandidatesTag = pfcandidates
process.fixedGridRhoFastjetAll.pfCandidatesTag = 'packedPFCandidates'
process.fixedGridRhoAll.pfCandidatesTag = 'packedPFCandidates'
# process.fixedGridRhoAll.pfCandidatesTag = .InputTag("packedPFCandidates")
# process.fixedGridRhoFastjetAll = fixedGridRhoFastjetAll.clone( pfCandidatesTag = cms.InputTag("packedPFCandidates"))
# process.fixedGridRhoAll = fixedGridRhoAll.clone( pfCandidatesTag = cms.InputTag("packedPFCandidates"))
from RecoJets.JetProducers.SubJetParameters_cfi import SubJetParameters
from RecoJets.JetProducers.PFJetParameters_cfi import *
from RecoJets.JetProducers.CaloJetParameters_cfi import *
from RecoJets.JetProducers.AnomalousCellParameters_cfi import *
from RecoJets.JetProducers.CATopJetParameters_cfi import *
from RecoJets.JetProducers.GenJetParameters_cfi import *
from RecoJets.JetProducers.caTopTaggers_cff import *
###############################################
process.content = cms.EDAnalyzer("EventContentAnalyzer")
process.p = cms.Path(
#process.fixedGridRhoFastjetAll
process.demo
)
| [
"[email protected]"
] | |
ef32a3e70644ccee481a6bb2836324e2d3e9e4bf | b7f45072d056b80ed49e6bcde91877d8576e970d | /SQL/inc/py/all-names.py | b16948d631042a84c093b9e28cfe892992ea0f3f | [] | no_license | jrminter/tips | 128a18ee55655a13085c174d532c77bcea412754 | f48f8b202f8bf9e36cb6d487a23208371c79718e | refs/heads/master | 2022-06-14T08:46:28.972743 | 2022-05-30T19:29:28 | 2022-05-30T19:29:28 | 11,463,325 | 5 | 8 | null | 2019-12-18T16:24:02 | 2013-07-17T00:16:43 | Jupyter Notebook | UTF-8 | Python | false | false | 294 | py | from sqlite3 import dbapi2 as sqlite
connection = sqlite.connect("experiments.db")
cursor = connection.cursor()
cursor.execute("SELECT FirstName, Lastname FROM Person ORDER BY LastName;")
results = cursor.fetchall();
for r in results:
print(r[0], r[1])
cursor.close();
connection.close();
| [
"[email protected]"
] | |
b3068976266831d415316817e2e3f7e468c472bc | e85b47c61947b13afd2d8855acff6c3fd0acd3e5 | /accounts/models.py | 2e83c57c385b45f7da7d1acf0d87954f0e2f8239 | [] | no_license | Zhoroev/django_exam5 | e2c2c5165d1dc5eeff87708de47267ec35135034 | af5646b16732f8dc5bf0068ce34467cb29d70541 | refs/heads/main | 2023-06-30T16:49:25.077947 | 2021-08-07T17:33:19 | 2021-08-07T17:33:19 | 393,749,728 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 476 | py | from django.db import models
from django.contrib.auth.models import AbstractUser
from django.db import models
class User(AbstractUser):
class TypeUserChoice(models.TextChoices):
ADMIN = 'admin'
SPECIALIST = 'specialist'
PERSONAL_CABINET = 'personal_cabinet'
type_user = models.CharField(max_length=120,
choices=TypeUserChoice.choices,
default=TypeUserChoice.PERSONAL_CABINET)
| [
"[email protected]"
] | |
a4b2ee1468ea48c5f1588fd61509edd4dad20960 | e36c5a91306f8d8cf487368d3a1dfae4c03da3c0 | /build/yujin_ocs/yocs_ar_pair_approach/catkin_generated/pkg.develspace.context.pc.py | c4742daf75020b6b00db90c7f39c4b5afa7d64d6 | [] | no_license | DocDouze/RobMob | 84ae5b96a16028586c9da2008f7c7772bdaa1334 | 6a2e7505eb2207d61b1c354cfd255075b1efbc73 | refs/heads/master | 2020-04-11T07:24:28.958201 | 2018-12-17T11:56:54 | 2018-12-17T11:56:54 | 161,607,677 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 390 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "yocs_ar_pair_approach"
PROJECT_SPACE_DIR = "/home/aubailly/Bureau/RobMob/devel"
PROJECT_VERSION = "0.12.1"
| [
"[email protected]"
] | |
c20262a0f63933cce6ec99765eb0929fd4f684d8 | ed21823488a1cca51009793efa0b124e40d224a4 | /neurobioseg/161207_multiple_training_segmentations/p161207_03_compute_paths.py | c8d1a821dce33c10b72ca127676a412318bedebd | [] | no_license | jhennies/py_devel | 4a41e13ec8cd9b834c3d5acf64becc0fa8ffc479 | 9fc860be95ae91064a40f25e26d4024fbae6eb1f | refs/heads/master | 2021-01-16T23:25:56.716283 | 2017-03-10T17:49:55 | 2017-03-10T17:49:55 | 45,381,183 | 1 | 0 | null | 2017-03-10T17:49:56 | 2015-11-02T08:21:35 | Python | UTF-8 | Python | false | false | 3,955 | py |
import os
import inspect
from hdf5_image_processing import Hdf5ImageProcessing as IP, Hdf5ImageProcessingLib as ipl
from hdf5_processing import RecursiveDict as rdict
from shutil import copy, copyfile
import numpy as np
import matplotlib.pyplot as plt
import processing_libip as libip
import sys
from yaml_parameters import YamlParams
__author__ = 'jhennies'
def load_images(filepath, skeys=None, recursive_search=False, logger=None):
if logger is not None:
logger.logging('Loading data from \n{}', filepath)
else:
print 'Loading data from \n{}'.format(filepath)
data = ipl()
data.data_from_file(
filepath=filepath,
skeys=skeys,
recursive_search=recursive_search,
nodata=True
)
return data
def compute_paths(yparams):
params = yparams.get_params()
thisparams = rdict(params['compute_paths'])
data = ipl()
for sourcekey, source in thisparams['sources'].iteritems():
# Load the necessary images
# 1. Determine the settings for fetching the data
try:
recursive_search = False
recursive_search = thisparams['skwargs', 'default', 'recursive_search']
recursive_search = thisparams['skwargs', sourcekey, 'recursive_search']
except KeyError:
pass
if len(source) > 2:
skeys = source[2]
else:
skeys = None
# 2. Load the data
yparams.logging('skeys = {}', skeys)
yparams.logging('recursive_search = {}', recursive_search)
data[sourcekey] = load_images(
params[source[0]] + params[source[1]], skeys=skeys, recursive_search=recursive_search,
logger=yparams
)
data['contacts'].reduce_from_leafs(iterate=True)
data['disttransf'].reduce_from_leafs(iterate=True)
# Set targetfile
targetfile = params[thisparams['target'][0]] \
+ params[thisparams['target'][1]]
yparams.logging('\nInitial datastructure: \n\n{}', data.datastructure2string(maxdepth=3))
for d, k, v, kl in data['segmentation'].data_iterator(yield_short_kl=True, leaves_only=True):
yparams.logging('===============================\nWorking on image: {}', kl + [k])
# # TODO: Implement copy full logger
# data[kl].set_logger(data.get_logger())
# prepare the dict for the path computation
indata = ipl()
indata['segmentation'] = np.array(data['segmentation'][kl][k])
indata['contacts'] = np.array(data['contacts'][kl][k])
indata['groundtruth'] = np.array(data['groundtruth'][kl][params['gtruthname']])
indata['disttransf'] = np.array(data['disttransf'][kl][k])
yparams.logging('Input datastructure: \n\n{}', indata.datastructure2string())
# Compute the paths sorted into their respective class
paths = ipl()
paths[kl + [k]] = libip.compute_paths_with_class(
indata, 'segmentation', 'contacts', 'disttransf', 'groundtruth',
thisparams,
ignore=thisparams['ignorelabels'],
max_end_count=thisparams['max_end_count'],
max_end_count_seed=thisparams['max_end_count_seed'],
debug=params['debug']
)
# Write the result to file
paths.write(filepath=targetfile)
def run_compute_paths(yamlfile, logging=True):
yparams = YamlParams(filename=yamlfile)
params = yparams.get_params()
# Logger stuff
yparams.set_indent(1)
yparams.startlogger(
filename=params['resultfolder'] + 'compute_paths.log',
type='w', name='ComputePaths'
)
try:
compute_paths(yparams)
yparams.logging('')
yparams.stoplogger()
except:
yparams.errout('Unexpected error')
if __name__ == '__main__':
yamlfile = os.path.dirname(os.path.abspath(__file__)) + '/parameters_ref.yml'
run_compute_paths(yamlfile, logging=False) | [
"[email protected]"
] | |
0d4208c30ec0aab0cdd5a4405a79fedd5cf74c17 | ffe555768c86e03e8528e1d77a3b763ef8decea1 | /pygcn/smtest.py | 91378e0c2848cc9a91f55cfdda709108851373b0 | [] | no_license | LittleQili/Toy-GCN | f4ba7dfa94468b44ef4687262d625678eea25007 | 2eb4ed0453b6acdff543aed7e965c80d424abc50 | refs/heads/main | 2023-03-25T17:41:04.459508 | 2020-12-27T07:40:02 | 2020-12-27T07:40:02 | 324,107,780 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,170 | py | import time
import argparse
import numpy as np
import torch
import torch.nn.functional as F
import torch.optim as optim
import torch.utils as tutil
# from utils import accuracy
from smmodel import GCN
from smdata import load_test_data
# Training settings
parser = argparse.ArgumentParser()
parser.add_argument('--no-cuda', action='store_true', default=False,
help='Disables CUDA training.')
parser.add_argument('--seed', type=int, default=42, help='Random seed.')
args = parser.parse_args()
args.cuda = not args.no_cuda and torch.cuda.is_available()
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if args.cuda:
torch.cuda.manual_seed(args.seed)
id,adj_smiles,feature_smiles,allinput = load_test_data()
model = torch.load('weight/yijiaGCN1.pt')
model.eval()
if args.cuda:
model.cuda()
feature_smiles = feature_smiles.cuda()
adj_smiles = adj_smiles.cuda()
finalact = torch.nn.Sigmoid()
f = open('output_518030910146_1.txt','w')
f.write('Chemical,Label\n')
output = finalact(model(adj_smiles,feature_smiles))
for i in range(adj_smiles.shape[0]):
tmpf = output[i].item()
f.write(id[i] + ',%f\n' % tmpf)
f.close() | [
"[email protected]"
] | |
67e65a797c551dbb3fbd1cc8e37359c580ca3a81 | 46225b4cd6234b3aeb31b0e88f85df44fddd3d53 | /common_crawlers/common_crawlers/spiders/job_bole3.py | ed2a90fc2506289e2de9890670e8270d0ed127db | [] | no_license | git-wsf/crawler_project | cdbd8aaa0e4f232ffb07cdc5d4db90fc858e1d40 | 665945214036da1d312d16de83b13430d2e529c6 | refs/heads/master | 2020-07-27T15:14:16.152390 | 2018-10-30T13:46:34 | 2018-10-30T13:46:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,267 | py | # -*- coding: utf-8 -*-
import scrapy
from scrapy.http import Request
from common_crawlers.utils.common import get_md5
from common_crawlers.items import JobBoleItem, CustomItemLoader
from urllib import parse
class JobBole3Spider(scrapy.Spider):
name = 'job_bole3'
allowed_domains = ['jobbole.com']
start_urls = ['http://blog.jobbole.com/all-posts/page/166/']
def parse(self, response):
all_links = response.xpath('//div[@id="archive"]/div/div[@class="post-thumb"]/a')
if all_links:
for each_link in all_links:
each_url = each_link.xpath('@href')
img_url = each_link.xpath('img/@src')
if img_url:
thumbnail_url = img_url.extract()[0]
else:
thumbnail_url = ""
yield Request(parse.urljoin(response.url, each_url.extract()[0]),
callback=self.parse_detail, meta={'thumbnail_url': thumbnail_url})
# next_page = response.xpath('//a[@class="next page-numbers"]/@href').extract_first()
# self.logger.info('下一页的链接是:{}'.format(next_page))
# if next_page:
# yield Request(next_page, callback=self.parse)
def parse_detail(self, response):
"""
使用xpath方法
获取文章页面的标题、发布时间、内容、点赞数、评论数、文章标签等
"""
self.logger.info('正在抓取的url是:{0}'.format(response.url))
l = CustomItemLoader(item=JobBoleItem(), response=response)
l.add_xpath('title', '//div[@class="entry-header"]/h1/text()')
l.add_value('thumbnail_url', response.meta['thumbnail_url'])
l.add_value('article_url', response.url)
l.add_value('article_url_id', get_md5(response.url))
l.add_xpath('create_time', '//p[@class="entry-meta-hide-on-mobile"]/text()')
# l.add_xpath('content', '//div[@class="entry"]')
l.add_xpath('like_num', '//h10[contains(@id,"votetotal")]/text()')
l.add_xpath('comment_num', '//a[@href="#article-comment"]/span/text()')
l.add_xpath('tags', '//p[@class="entry-meta-hide-on-mobile"]/a[not(contains(text(),"评论"))]/text()')
return l.load_item()
| [
"[email protected]"
] | |
7d0ebc005536be7a6d2ce1733d115bea7b53644b | 0d0263ab9a24fc1bea785bcd913a8a113c571444 | /03.02.object_detection_predict/SSD_detect.py | 4df5bb9df70b4346ead58a4d2a3f46116c1d060b | [] | no_license | chenbobaoge/BeginnerDL | 3b4f393802c230a7a9ed60828af96573fb340282 | e038334880ee521fcf2d92953b530a5518346c35 | refs/heads/master | 2021-04-28T17:15:48.345072 | 2018-01-20T06:41:36 | 2018-01-20T06:41:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,693 | py | from keras.preprocessing import image
from scipy.misc import imread
from keras.applications.imagenet_utils import preprocess_input
import numpy as np
from keras.models import load_model
from keras_layers.SSD import PriorBox,Normalize, BBoxUtility
import pickle
import matplotlib.pyplot as plt
from keras_config.SSDConfig import SSDConfig
cfg = SSDConfig()
custom_objects = {
'PriorBox' : PriorBox,
'Normalize' : Normalize,
'BBoxUtility' : BBoxUtility
}
model = load_model('../weights/ssd.h5', custom_objects=custom_objects)
files = ('fish-bike.jpg', 'cat.jpg', 'boys.jpg', 'car_cat.jpg', 'car_cat2.jpg')
inputs = []
images = []
for f in files:
f = "./pics/{}".format(f)
img = image.load_img(""+f, target_size=(300, 300))
img = image.img_to_array(img)
images.append(imread(f))
inputs.append(img.copy())
inputs = preprocess_input(np.array(inputs))
preds = model.predict(inputs, batch_size=1, verbose=1)
# preds.shape (5, 7308, 33)
priors = pickle.load(open('./SSD300/prior_boxes_ssd300.pkl', 'rb'))
bbox_util = BBoxUtility(cfg.NUM_CLASS, priors)
results = bbox_util.detection_out(preds)
# type(results): list, len(results): 5, len(result[0]): 200, results[0].shape: (200, 6)
for i, img in enumerate(images):
# Parse the outputs.
det_label = results[i][:, 0]
det_conf = results[i][:, 1]
det_xmin = results[i][:, 2]
det_ymin = results[i][:, 3]
det_xmax = results[i][:, 4]
det_ymax = results[i][:, 5]
# Get detections with confidence higher than 0.6.
top_indices = [i for i, conf in enumerate(det_conf) if conf >= 0.5]
top_conf = det_conf[top_indices]
top_label_indices = det_label[top_indices].tolist()
top_xmin = det_xmin[top_indices]
top_ymin = det_ymin[top_indices]
top_xmax = det_xmax[top_indices]
top_ymax = det_ymax[top_indices]
colors = plt.cm.hsv(np.linspace(0, 1, 21)).tolist()
plt.imshow(img / 255.)
currentAxis = plt.gca()
for i in range(top_conf.shape[0]):
xmin = int(round(top_xmin[i] * img.shape[1]))
ymin = int(round(top_ymin[i] * img.shape[0]))
xmax = int(round(top_xmax[i] * img.shape[1]))
ymax = int(round(top_ymax[i] * img.shape[0]))
score = top_conf[i]
label = int(top_label_indices[i])
label_name = cfg.VOC_CLASSES[label - 1]
display_txt = '{:0.2f}, {}'.format(score, label_name)
coords = (xmin, ymin), xmax-xmin+1, ymax-ymin+1
color = colors[label]
currentAxis.add_patch(plt.Rectangle(*coords, fill=False, edgecolor=color, linewidth=2))
currentAxis.text(xmin, ymin, display_txt, bbox={'facecolor':color, 'alpha':0.5})
plt.show() | [
"[email protected]"
] | |
85e4240770dd910e3b1334465f3b688842fb6c4e | 8fa9174a6136c3f27c9090c2528e8d69835656a2 | /app/mutant/tests/test_mutant_viewsets.py | dace4ccaf1ab5573dfd3bb347b3d654029504e2f | [] | no_license | hugoseabra/xmen-magneto-ambition | 61336a8ce0eec7425f747c548dba87cb1bf8e88e | 36caec2ae9102fc31ec502fe23d080b4625325b4 | refs/heads/master | 2023-07-08T05:57:36.007412 | 2021-08-18T00:36:47 | 2021-08-18T00:36:47 | 397,329,902 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,929 | py | from django.test import TestCase
from django.urls import reverse
class MutantEndpointTests(TestCase):
def _get_url(self):
return reverse('mutant:mutant-list')
def test_validation_of_how_many_items(self):
"""Tests error when less than 6 sequences is sent """
data = {
'dna': [
"ATGCGA",
"CAGTGC",
"TTATGT",
"AGAAGG",
"CCCCTA"
]
}
response = self.client.post(self._get_url(), data=data)
self.assertContains(response, 'DNA is not valid', status_code=400)
data = {
'dna': [
"ATGCGA",
"CABTGC", # <-- WRONG char
"TTATGT",
"AGAAGG",
"CCCCTA",
"TCACTG",
]
}
response = self.client.post(self._get_url(), data=data)
self.assertContains(
response,
'You must provide correct amino acid values with 6 digits:'
' A, C, G, T',
status_code=400
)
def test_post_mutant_check(self):
"""Tests whether a sequence of amino acids of a DNA is mutant """
data = {
'dna': [
"TTATTT",
"CAGTGC",
"TTATTT",
"TTATTT",
"GCGTCA",
"TTATTT",
]
}
response = self.client.post(self._get_url(), data=data)
self.assertContains(response, 'DNA is not mutant', status_code=403)
data = {
'dna': [
"ATGCGA",
"CAGTGC",
"TTATGT",
"AGAAGG",
"CCCCTA",
"TCACTG"
]
}
response = self.client.post(self._get_url(), data=data)
self.assertContains(response, 'DNA is mutant', status_code=200)
| [
"[email protected]"
] | |
dcd2341b9a1ca25f9d958de48d6f57195b81b110 | 114372880a520f30f2d07b1b13a146f96454fd2e | /backend/channel_plugin/channel_plugin/contrib/sites/migrations/0003_set_site_domain_and_name.py | ff7bbcef9c5f692431794448bd76e7cd3743344e | [
"MIT"
] | permissive | wenotch/zc_plugin_channels | 0ddb92064e6013a3f65d6ee65c526e0e605df25a | 4e88aa4a3a1a140848c22a0fd90c6486560c3deb | refs/heads/main | 2023-07-19T04:24:40.479183 | 2021-09-03T14:31:28 | 2021-09-03T14:31:28 | 401,643,447 | 1 | 0 | null | 2021-08-31T09:20:54 | 2021-08-31T09:20:53 | null | UTF-8 | Python | false | false | 1,012 | py | """
To understand why this file is here, please read:
http://cookiecutter-django.readthedocs.io/en/latest/faq.html#why-is-there-a-django-contrib-sites-directory-in-cookiecutter-django
"""
from django.conf import settings
from django.db import migrations
def update_site_forward(apps, schema_editor):
"""Set site domain and name."""
Site = apps.get_model("sites", "Site")
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
"domain": "example.com",
"name": "Channel Plugin",
},
)
def update_site_backward(apps, schema_editor):
"""Revert site domain and name to default."""
Site = apps.get_model("sites", "Site")
Site.objects.update_or_create(
id=settings.SITE_ID, defaults={"domain": "example.com", "name": "example.com"}
)
class Migration(migrations.Migration):
dependencies = [("sites", "0002_alter_domain_unique")]
operations = [migrations.RunPython(update_site_forward, update_site_backward)]
| [
"[email protected]"
] | |
e237d04b87dc293db59d1d7946558444d2c591d0 | ce76b3ef70b885d7c354b6ddb8447d111548e0f1 | /know_first_place_about_other_year/think_fact.py | 5ccb9491f21faeaab2c0396088561a505e242764 | [] | no_license | JingkaiTang/github-play | 9bdca4115eee94a7b5e4ae9d3d6052514729ff21 | 51b550425a91a97480714fe9bc63cb5112f6f729 | refs/heads/master | 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 219 | py |
#! /usr/bin/env python
def world(str_arg):
part_or_different_eye(str_arg)
print('early_work')
def part_or_different_eye(str_arg):
print(str_arg)
if __name__ == '__main__':
world('new_place_and_way')
| [
"[email protected]"
] | |
bcc158707990ef3fd6461688eaaac914048eb6f5 | 783bbebfafd546b310fe3425e463ce064d88aabc | /msgraph-cli-extensions/v1_0/sites_v1_0/azext_sites_v1_0/vendored_sdks/sites/_sites.py | ea7966ba52e08d879263aedaf0d50f500c3d5838 | [
"MIT"
] | permissive | thewahome/msgraph-cli | 0d45bcc487bfa038023dee0b43127c3f59f6e12d | 80ce5f9ff1ce2105af473b28576aedcfa76e0d6a | refs/heads/main | 2023-06-02T05:29:13.317801 | 2021-05-28T04:58:55 | 2021-05-28T04:58:55 | 371,593,690 | 0 | 0 | NOASSERTION | 2021-05-28T05:48:41 | 2021-05-28T05:48:41 | null | UTF-8 | Python | false | false | 40,750 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
from azure.mgmt.core import ARMPipelineClient
from msrest import Deserializer, Serializer
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Optional
from azure.core.credentials import TokenCredential
from ._configuration import SitesConfiguration
from .operations import GroupsOperations
from .operations import SitesSiteOperations
from .operations import SitesOperations
from .operations import SitesContentTypesOperations
from .operations import SitesListsOperations
from .operations import SitesListsContentTypesOperations
from .operations import SitesListsItemsOperations
from .operations import SitesListsItemsVersionsOperations
from .operations import SitesOnenoteNotebooksOperations
from .operations import SitesOnenoteNotebooksSectionGroupsParentNotebookOperations
from .operations import SitesOnenoteNotebooksSectionGroupsSectionsOperations
from .operations import SitesOnenoteNotebooksSectionGroupsSectionsPagesOperations
from .operations import SitesOnenoteNotebooksSectionGroupsSectionsPagesParentNotebookOperations
from .operations import SitesOnenoteNotebooksSectionGroupsSectionsPagesParentSectionOperations
from .operations import SitesOnenoteNotebooksSectionGroupsSectionsParentNotebookOperations
from .operations import SitesOnenoteNotebooksSectionsOperations
from .operations import SitesOnenoteNotebooksSectionsPagesOperations
from .operations import SitesOnenoteNotebooksSectionsPagesParentNotebookOperations
from .operations import SitesOnenoteNotebooksSectionsPagesParentSectionOperations
from .operations import SitesOnenoteNotebooksSectionsParentNotebookOperations
from .operations import SitesOnenoteNotebooksSectionsParentSectionGroupParentNotebookOperations
from .operations import SitesOnenoteNotebooksSectionsParentSectionGroupSectionsOperations
from .operations import SitesOnenotePagesOperations
from .operations import SitesOnenotePagesParentNotebookOperations
from .operations import SitesOnenotePagesParentNotebookSectionGroupsParentNotebookOperations
from .operations import SitesOnenotePagesParentNotebookSectionGroupsSectionsOperations
from .operations import SitesOnenotePagesParentNotebookSectionGroupsSectionsPagesOperations
from .operations import SitesOnenotePagesParentNotebookSectionGroupsSectionsParentNotebookOperations
from .operations import SitesOnenotePagesParentNotebookSectionsOperations
from .operations import SitesOnenotePagesParentNotebookSectionsPagesOperations
from .operations import SitesOnenotePagesParentNotebookSectionsParentNotebookOperations
from .operations import SitesOnenotePagesParentNotebookSectionsParentSectionGroupParentNotebookOperations
from .operations import SitesOnenotePagesParentNotebookSectionsParentSectionGroupSectionsOperations
from .operations import SitesOnenotePagesParentSectionOperations
from .operations import SitesOnenotePagesParentSectionPagesOperations
from .operations import SitesOnenotePagesParentSectionParentNotebookOperations
from .operations import SitesOnenotePagesParentSectionParentNotebookSectionGroupsParentNotebookOperations
from .operations import SitesOnenotePagesParentSectionParentNotebookSectionGroupsSectionsOperations
from .operations import SitesOnenotePagesParentSectionParentNotebookSectionsOperations
from .operations import SitesOnenotePagesParentSectionParentSectionGroupParentNotebookOperations
from .operations import SitesOnenotePagesParentSectionParentSectionGroupParentNotebookSectionsOperations
from .operations import SitesOnenotePagesParentSectionParentSectionGroupSectionsOperations
from .operations import SitesOnenoteSectionGroupsParentNotebookOperations
from .operations import SitesOnenoteSectionGroupsParentNotebookSectionsOperations
from .operations import SitesOnenoteSectionGroupsParentNotebookSectionsPagesOperations
from .operations import SitesOnenoteSectionGroupsParentNotebookSectionsPagesParentNotebookOperations
from .operations import SitesOnenoteSectionGroupsParentNotebookSectionsPagesParentSectionOperations
from .operations import SitesOnenoteSectionGroupsParentNotebookSectionsParentNotebookOperations
from .operations import SitesOnenoteSectionGroupsSectionsOperations
from .operations import SitesOnenoteSectionGroupsSectionsPagesOperations
from .operations import SitesOnenoteSectionGroupsSectionsPagesParentNotebookOperations
from .operations import SitesOnenoteSectionGroupsSectionsPagesParentNotebookSectionsOperations
from .operations import SitesOnenoteSectionGroupsSectionsPagesParentSectionOperations
from .operations import SitesOnenoteSectionGroupsSectionsParentNotebookOperations
from .operations import SitesOnenoteSectionGroupsSectionsParentNotebookSectionsOperations
from .operations import SitesOnenoteSectionsOperations
from .operations import SitesOnenoteSectionsPagesOperations
from .operations import SitesOnenoteSectionsPagesParentNotebookOperations
from .operations import SitesOnenoteSectionsPagesParentNotebookSectionGroupsParentNotebookOperations
from .operations import SitesOnenoteSectionsPagesParentNotebookSectionGroupsSectionsOperations
from .operations import SitesOnenoteSectionsPagesParentNotebookSectionsOperations
from .operations import SitesOnenoteSectionsPagesParentSectionOperations
from .operations import SitesOnenoteSectionsParentNotebookOperations
from .operations import SitesOnenoteSectionsParentNotebookSectionGroupsParentNotebookOperations
from .operations import SitesOnenoteSectionsParentNotebookSectionGroupsSectionsOperations
from .operations import SitesOnenoteSectionsParentNotebookSectionsOperations
from .operations import SitesOnenoteSectionsParentSectionGroupParentNotebookOperations
from .operations import SitesOnenoteSectionsParentSectionGroupParentNotebookSectionsOperations
from .operations import SitesOnenoteSectionsParentSectionGroupSectionsOperations
from .operations import UsersOperations
from . import models
class Sites(object):
"""Sites.
:ivar groups: GroupsOperations operations
:vartype groups: sites.operations.GroupsOperations
:ivar sites_site: SitesSiteOperations operations
:vartype sites_site: sites.operations.SitesSiteOperations
:ivar sites: SitesOperations operations
:vartype sites: sites.operations.SitesOperations
:ivar sites_content_types: SitesContentTypesOperations operations
:vartype sites_content_types: sites.operations.SitesContentTypesOperations
:ivar sites_lists: SitesListsOperations operations
:vartype sites_lists: sites.operations.SitesListsOperations
:ivar sites_lists_content_types: SitesListsContentTypesOperations operations
:vartype sites_lists_content_types: sites.operations.SitesListsContentTypesOperations
:ivar sites_lists_items: SitesListsItemsOperations operations
:vartype sites_lists_items: sites.operations.SitesListsItemsOperations
:ivar sites_lists_items_versions: SitesListsItemsVersionsOperations operations
:vartype sites_lists_items_versions: sites.operations.SitesListsItemsVersionsOperations
:ivar sites_onenote_notebooks: SitesOnenoteNotebooksOperations operations
:vartype sites_onenote_notebooks: sites.operations.SitesOnenoteNotebooksOperations
:ivar sites_onenote_notebooks_section_groups_parent_notebook: SitesOnenoteNotebooksSectionGroupsParentNotebookOperations operations
:vartype sites_onenote_notebooks_section_groups_parent_notebook: sites.operations.SitesOnenoteNotebooksSectionGroupsParentNotebookOperations
:ivar sites_onenote_notebooks_section_groups_sections: SitesOnenoteNotebooksSectionGroupsSectionsOperations operations
:vartype sites_onenote_notebooks_section_groups_sections: sites.operations.SitesOnenoteNotebooksSectionGroupsSectionsOperations
:ivar sites_onenote_notebooks_section_groups_sections_pages: SitesOnenoteNotebooksSectionGroupsSectionsPagesOperations operations
:vartype sites_onenote_notebooks_section_groups_sections_pages: sites.operations.SitesOnenoteNotebooksSectionGroupsSectionsPagesOperations
:ivar sites_onenote_notebooks_section_groups_sections_pages_parent_notebook: SitesOnenoteNotebooksSectionGroupsSectionsPagesParentNotebookOperations operations
:vartype sites_onenote_notebooks_section_groups_sections_pages_parent_notebook: sites.operations.SitesOnenoteNotebooksSectionGroupsSectionsPagesParentNotebookOperations
:ivar sites_onenote_notebooks_section_groups_sections_pages_parent_section: SitesOnenoteNotebooksSectionGroupsSectionsPagesParentSectionOperations operations
:vartype sites_onenote_notebooks_section_groups_sections_pages_parent_section: sites.operations.SitesOnenoteNotebooksSectionGroupsSectionsPagesParentSectionOperations
:ivar sites_onenote_notebooks_section_groups_sections_parent_notebook: SitesOnenoteNotebooksSectionGroupsSectionsParentNotebookOperations operations
:vartype sites_onenote_notebooks_section_groups_sections_parent_notebook: sites.operations.SitesOnenoteNotebooksSectionGroupsSectionsParentNotebookOperations
:ivar sites_onenote_notebooks_sections: SitesOnenoteNotebooksSectionsOperations operations
:vartype sites_onenote_notebooks_sections: sites.operations.SitesOnenoteNotebooksSectionsOperations
:ivar sites_onenote_notebooks_sections_pages: SitesOnenoteNotebooksSectionsPagesOperations operations
:vartype sites_onenote_notebooks_sections_pages: sites.operations.SitesOnenoteNotebooksSectionsPagesOperations
:ivar sites_onenote_notebooks_sections_pages_parent_notebook: SitesOnenoteNotebooksSectionsPagesParentNotebookOperations operations
:vartype sites_onenote_notebooks_sections_pages_parent_notebook: sites.operations.SitesOnenoteNotebooksSectionsPagesParentNotebookOperations
:ivar sites_onenote_notebooks_sections_pages_parent_section: SitesOnenoteNotebooksSectionsPagesParentSectionOperations operations
:vartype sites_onenote_notebooks_sections_pages_parent_section: sites.operations.SitesOnenoteNotebooksSectionsPagesParentSectionOperations
:ivar sites_onenote_notebooks_sections_parent_notebook: SitesOnenoteNotebooksSectionsParentNotebookOperations operations
:vartype sites_onenote_notebooks_sections_parent_notebook: sites.operations.SitesOnenoteNotebooksSectionsParentNotebookOperations
:ivar sites_onenote_notebooks_sections_parent_section_group_parent_notebook: SitesOnenoteNotebooksSectionsParentSectionGroupParentNotebookOperations operations
:vartype sites_onenote_notebooks_sections_parent_section_group_parent_notebook: sites.operations.SitesOnenoteNotebooksSectionsParentSectionGroupParentNotebookOperations
:ivar sites_onenote_notebooks_sections_parent_section_group_sections: SitesOnenoteNotebooksSectionsParentSectionGroupSectionsOperations operations
:vartype sites_onenote_notebooks_sections_parent_section_group_sections: sites.operations.SitesOnenoteNotebooksSectionsParentSectionGroupSectionsOperations
:ivar sites_onenote_pages: SitesOnenotePagesOperations operations
:vartype sites_onenote_pages: sites.operations.SitesOnenotePagesOperations
:ivar sites_onenote_pages_parent_notebook: SitesOnenotePagesParentNotebookOperations operations
:vartype sites_onenote_pages_parent_notebook: sites.operations.SitesOnenotePagesParentNotebookOperations
:ivar sites_onenote_pages_parent_notebook_section_groups_parent_notebook: SitesOnenotePagesParentNotebookSectionGroupsParentNotebookOperations operations
:vartype sites_onenote_pages_parent_notebook_section_groups_parent_notebook: sites.operations.SitesOnenotePagesParentNotebookSectionGroupsParentNotebookOperations
:ivar sites_onenote_pages_parent_notebook_section_groups_sections: SitesOnenotePagesParentNotebookSectionGroupsSectionsOperations operations
:vartype sites_onenote_pages_parent_notebook_section_groups_sections: sites.operations.SitesOnenotePagesParentNotebookSectionGroupsSectionsOperations
:ivar sites_onenote_pages_parent_notebook_section_groups_sections_pages: SitesOnenotePagesParentNotebookSectionGroupsSectionsPagesOperations operations
:vartype sites_onenote_pages_parent_notebook_section_groups_sections_pages: sites.operations.SitesOnenotePagesParentNotebookSectionGroupsSectionsPagesOperations
:ivar sites_onenote_pages_parent_notebook_section_groups_sections_parent_notebook: SitesOnenotePagesParentNotebookSectionGroupsSectionsParentNotebookOperations operations
:vartype sites_onenote_pages_parent_notebook_section_groups_sections_parent_notebook: sites.operations.SitesOnenotePagesParentNotebookSectionGroupsSectionsParentNotebookOperations
:ivar sites_onenote_pages_parent_notebook_sections: SitesOnenotePagesParentNotebookSectionsOperations operations
:vartype sites_onenote_pages_parent_notebook_sections: sites.operations.SitesOnenotePagesParentNotebookSectionsOperations
:ivar sites_onenote_pages_parent_notebook_sections_pages: SitesOnenotePagesParentNotebookSectionsPagesOperations operations
:vartype sites_onenote_pages_parent_notebook_sections_pages: sites.operations.SitesOnenotePagesParentNotebookSectionsPagesOperations
:ivar sites_onenote_pages_parent_notebook_sections_parent_notebook: SitesOnenotePagesParentNotebookSectionsParentNotebookOperations operations
:vartype sites_onenote_pages_parent_notebook_sections_parent_notebook: sites.operations.SitesOnenotePagesParentNotebookSectionsParentNotebookOperations
:ivar sites_onenote_pages_parent_notebook_sections_parent_section_group_parent_notebook: SitesOnenotePagesParentNotebookSectionsParentSectionGroupParentNotebookOperations operations
:vartype sites_onenote_pages_parent_notebook_sections_parent_section_group_parent_notebook: sites.operations.SitesOnenotePagesParentNotebookSectionsParentSectionGroupParentNotebookOperations
:ivar sites_onenote_pages_parent_notebook_sections_parent_section_group_sections: SitesOnenotePagesParentNotebookSectionsParentSectionGroupSectionsOperations operations
:vartype sites_onenote_pages_parent_notebook_sections_parent_section_group_sections: sites.operations.SitesOnenotePagesParentNotebookSectionsParentSectionGroupSectionsOperations
:ivar sites_onenote_pages_parent_section: SitesOnenotePagesParentSectionOperations operations
:vartype sites_onenote_pages_parent_section: sites.operations.SitesOnenotePagesParentSectionOperations
:ivar sites_onenote_pages_parent_section_pages: SitesOnenotePagesParentSectionPagesOperations operations
:vartype sites_onenote_pages_parent_section_pages: sites.operations.SitesOnenotePagesParentSectionPagesOperations
:ivar sites_onenote_pages_parent_section_parent_notebook: SitesOnenotePagesParentSectionParentNotebookOperations operations
:vartype sites_onenote_pages_parent_section_parent_notebook: sites.operations.SitesOnenotePagesParentSectionParentNotebookOperations
:ivar sites_onenote_pages_parent_section_parent_notebook_section_groups_parent_notebook: SitesOnenotePagesParentSectionParentNotebookSectionGroupsParentNotebookOperations operations
:vartype sites_onenote_pages_parent_section_parent_notebook_section_groups_parent_notebook: sites.operations.SitesOnenotePagesParentSectionParentNotebookSectionGroupsParentNotebookOperations
:ivar sites_onenote_pages_parent_section_parent_notebook_section_groups_sections: SitesOnenotePagesParentSectionParentNotebookSectionGroupsSectionsOperations operations
:vartype sites_onenote_pages_parent_section_parent_notebook_section_groups_sections: sites.operations.SitesOnenotePagesParentSectionParentNotebookSectionGroupsSectionsOperations
:ivar sites_onenote_pages_parent_section_parent_notebook_sections: SitesOnenotePagesParentSectionParentNotebookSectionsOperations operations
:vartype sites_onenote_pages_parent_section_parent_notebook_sections: sites.operations.SitesOnenotePagesParentSectionParentNotebookSectionsOperations
:ivar sites_onenote_pages_parent_section_parent_section_group_parent_notebook: SitesOnenotePagesParentSectionParentSectionGroupParentNotebookOperations operations
:vartype sites_onenote_pages_parent_section_parent_section_group_parent_notebook: sites.operations.SitesOnenotePagesParentSectionParentSectionGroupParentNotebookOperations
:ivar sites_onenote_pages_parent_section_parent_section_group_parent_notebook_sections: SitesOnenotePagesParentSectionParentSectionGroupParentNotebookSectionsOperations operations
:vartype sites_onenote_pages_parent_section_parent_section_group_parent_notebook_sections: sites.operations.SitesOnenotePagesParentSectionParentSectionGroupParentNotebookSectionsOperations
:ivar sites_onenote_pages_parent_section_parent_section_group_sections: SitesOnenotePagesParentSectionParentSectionGroupSectionsOperations operations
:vartype sites_onenote_pages_parent_section_parent_section_group_sections: sites.operations.SitesOnenotePagesParentSectionParentSectionGroupSectionsOperations
:ivar sites_onenote_section_groups_parent_notebook: SitesOnenoteSectionGroupsParentNotebookOperations operations
:vartype sites_onenote_section_groups_parent_notebook: sites.operations.SitesOnenoteSectionGroupsParentNotebookOperations
:ivar sites_onenote_section_groups_parent_notebook_sections: SitesOnenoteSectionGroupsParentNotebookSectionsOperations operations
:vartype sites_onenote_section_groups_parent_notebook_sections: sites.operations.SitesOnenoteSectionGroupsParentNotebookSectionsOperations
:ivar sites_onenote_section_groups_parent_notebook_sections_pages: SitesOnenoteSectionGroupsParentNotebookSectionsPagesOperations operations
:vartype sites_onenote_section_groups_parent_notebook_sections_pages: sites.operations.SitesOnenoteSectionGroupsParentNotebookSectionsPagesOperations
:ivar sites_onenote_section_groups_parent_notebook_sections_pages_parent_notebook: SitesOnenoteSectionGroupsParentNotebookSectionsPagesParentNotebookOperations operations
:vartype sites_onenote_section_groups_parent_notebook_sections_pages_parent_notebook: sites.operations.SitesOnenoteSectionGroupsParentNotebookSectionsPagesParentNotebookOperations
:ivar sites_onenote_section_groups_parent_notebook_sections_pages_parent_section: SitesOnenoteSectionGroupsParentNotebookSectionsPagesParentSectionOperations operations
:vartype sites_onenote_section_groups_parent_notebook_sections_pages_parent_section: sites.operations.SitesOnenoteSectionGroupsParentNotebookSectionsPagesParentSectionOperations
:ivar sites_onenote_section_groups_parent_notebook_sections_parent_notebook: SitesOnenoteSectionGroupsParentNotebookSectionsParentNotebookOperations operations
:vartype sites_onenote_section_groups_parent_notebook_sections_parent_notebook: sites.operations.SitesOnenoteSectionGroupsParentNotebookSectionsParentNotebookOperations
:ivar sites_onenote_section_groups_sections: SitesOnenoteSectionGroupsSectionsOperations operations
:vartype sites_onenote_section_groups_sections: sites.operations.SitesOnenoteSectionGroupsSectionsOperations
:ivar sites_onenote_section_groups_sections_pages: SitesOnenoteSectionGroupsSectionsPagesOperations operations
:vartype sites_onenote_section_groups_sections_pages: sites.operations.SitesOnenoteSectionGroupsSectionsPagesOperations
:ivar sites_onenote_section_groups_sections_pages_parent_notebook: SitesOnenoteSectionGroupsSectionsPagesParentNotebookOperations operations
:vartype sites_onenote_section_groups_sections_pages_parent_notebook: sites.operations.SitesOnenoteSectionGroupsSectionsPagesParentNotebookOperations
:ivar sites_onenote_section_groups_sections_pages_parent_notebook_sections: SitesOnenoteSectionGroupsSectionsPagesParentNotebookSectionsOperations operations
:vartype sites_onenote_section_groups_sections_pages_parent_notebook_sections: sites.operations.SitesOnenoteSectionGroupsSectionsPagesParentNotebookSectionsOperations
:ivar sites_onenote_section_groups_sections_pages_parent_section: SitesOnenoteSectionGroupsSectionsPagesParentSectionOperations operations
:vartype sites_onenote_section_groups_sections_pages_parent_section: sites.operations.SitesOnenoteSectionGroupsSectionsPagesParentSectionOperations
:ivar sites_onenote_section_groups_sections_parent_notebook: SitesOnenoteSectionGroupsSectionsParentNotebookOperations operations
:vartype sites_onenote_section_groups_sections_parent_notebook: sites.operations.SitesOnenoteSectionGroupsSectionsParentNotebookOperations
:ivar sites_onenote_section_groups_sections_parent_notebook_sections: SitesOnenoteSectionGroupsSectionsParentNotebookSectionsOperations operations
:vartype sites_onenote_section_groups_sections_parent_notebook_sections: sites.operations.SitesOnenoteSectionGroupsSectionsParentNotebookSectionsOperations
:ivar sites_onenote_sections: SitesOnenoteSectionsOperations operations
:vartype sites_onenote_sections: sites.operations.SitesOnenoteSectionsOperations
:ivar sites_onenote_sections_pages: SitesOnenoteSectionsPagesOperations operations
:vartype sites_onenote_sections_pages: sites.operations.SitesOnenoteSectionsPagesOperations
:ivar sites_onenote_sections_pages_parent_notebook: SitesOnenoteSectionsPagesParentNotebookOperations operations
:vartype sites_onenote_sections_pages_parent_notebook: sites.operations.SitesOnenoteSectionsPagesParentNotebookOperations
:ivar sites_onenote_sections_pages_parent_notebook_section_groups_parent_notebook: SitesOnenoteSectionsPagesParentNotebookSectionGroupsParentNotebookOperations operations
:vartype sites_onenote_sections_pages_parent_notebook_section_groups_parent_notebook: sites.operations.SitesOnenoteSectionsPagesParentNotebookSectionGroupsParentNotebookOperations
:ivar sites_onenote_sections_pages_parent_notebook_section_groups_sections: SitesOnenoteSectionsPagesParentNotebookSectionGroupsSectionsOperations operations
:vartype sites_onenote_sections_pages_parent_notebook_section_groups_sections: sites.operations.SitesOnenoteSectionsPagesParentNotebookSectionGroupsSectionsOperations
:ivar sites_onenote_sections_pages_parent_notebook_sections: SitesOnenoteSectionsPagesParentNotebookSectionsOperations operations
:vartype sites_onenote_sections_pages_parent_notebook_sections: sites.operations.SitesOnenoteSectionsPagesParentNotebookSectionsOperations
:ivar sites_onenote_sections_pages_parent_section: SitesOnenoteSectionsPagesParentSectionOperations operations
:vartype sites_onenote_sections_pages_parent_section: sites.operations.SitesOnenoteSectionsPagesParentSectionOperations
:ivar sites_onenote_sections_parent_notebook: SitesOnenoteSectionsParentNotebookOperations operations
:vartype sites_onenote_sections_parent_notebook: sites.operations.SitesOnenoteSectionsParentNotebookOperations
:ivar sites_onenote_sections_parent_notebook_section_groups_parent_notebook: SitesOnenoteSectionsParentNotebookSectionGroupsParentNotebookOperations operations
:vartype sites_onenote_sections_parent_notebook_section_groups_parent_notebook: sites.operations.SitesOnenoteSectionsParentNotebookSectionGroupsParentNotebookOperations
:ivar sites_onenote_sections_parent_notebook_section_groups_sections: SitesOnenoteSectionsParentNotebookSectionGroupsSectionsOperations operations
:vartype sites_onenote_sections_parent_notebook_section_groups_sections: sites.operations.SitesOnenoteSectionsParentNotebookSectionGroupsSectionsOperations
:ivar sites_onenote_sections_parent_notebook_sections: SitesOnenoteSectionsParentNotebookSectionsOperations operations
:vartype sites_onenote_sections_parent_notebook_sections: sites.operations.SitesOnenoteSectionsParentNotebookSectionsOperations
:ivar sites_onenote_sections_parent_section_group_parent_notebook: SitesOnenoteSectionsParentSectionGroupParentNotebookOperations operations
:vartype sites_onenote_sections_parent_section_group_parent_notebook: sites.operations.SitesOnenoteSectionsParentSectionGroupParentNotebookOperations
:ivar sites_onenote_sections_parent_section_group_parent_notebook_sections: SitesOnenoteSectionsParentSectionGroupParentNotebookSectionsOperations operations
:vartype sites_onenote_sections_parent_section_group_parent_notebook_sections: sites.operations.SitesOnenoteSectionsParentSectionGroupParentNotebookSectionsOperations
:ivar sites_onenote_sections_parent_section_group_sections: SitesOnenoteSectionsParentSectionGroupSectionsOperations operations
:vartype sites_onenote_sections_parent_section_group_sections: sites.operations.SitesOnenoteSectionsParentSectionGroupSectionsOperations
:ivar users: UsersOperations operations
:vartype users: sites.operations.UsersOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param top: Show only the first n items.
:type top: int
:param skip: Skip the first n items.
:type skip: int
:param search: Search items by search phrases.
:type search: str
:param filter: Filter items by property values.
:type filter: str
:param count: Include count of items.
:type count: bool
:param str base_url: Service URL
"""
def __init__(
self,
credential, # type: "TokenCredential"
top=None, # type: Optional[int]
skip=None, # type: Optional[int]
search=None, # type: Optional[str]
filter=None, # type: Optional[str]
count=None, # type: Optional[bool]
base_url=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> None
if not base_url:
base_url = 'https://graph.microsoft.com/v1.0'
self._config = SitesConfiguration(credential, top, skip, search, filter, count, **kwargs)
self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.groups = GroupsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_site = SitesSiteOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites = SitesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_content_types = SitesContentTypesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_lists = SitesListsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_lists_content_types = SitesListsContentTypesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_lists_items = SitesListsItemsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_lists_items_versions = SitesListsItemsVersionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_notebooks = SitesOnenoteNotebooksOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_notebooks_section_groups_parent_notebook = SitesOnenoteNotebooksSectionGroupsParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_notebooks_section_groups_sections = SitesOnenoteNotebooksSectionGroupsSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_notebooks_section_groups_sections_pages = SitesOnenoteNotebooksSectionGroupsSectionsPagesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_notebooks_section_groups_sections_pages_parent_notebook = SitesOnenoteNotebooksSectionGroupsSectionsPagesParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_notebooks_section_groups_sections_pages_parent_section = SitesOnenoteNotebooksSectionGroupsSectionsPagesParentSectionOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_notebooks_section_groups_sections_parent_notebook = SitesOnenoteNotebooksSectionGroupsSectionsParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_notebooks_sections = SitesOnenoteNotebooksSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_notebooks_sections_pages = SitesOnenoteNotebooksSectionsPagesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_notebooks_sections_pages_parent_notebook = SitesOnenoteNotebooksSectionsPagesParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_notebooks_sections_pages_parent_section = SitesOnenoteNotebooksSectionsPagesParentSectionOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_notebooks_sections_parent_notebook = SitesOnenoteNotebooksSectionsParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_notebooks_sections_parent_section_group_parent_notebook = SitesOnenoteNotebooksSectionsParentSectionGroupParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_notebooks_sections_parent_section_group_sections = SitesOnenoteNotebooksSectionsParentSectionGroupSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages = SitesOnenotePagesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_notebook = SitesOnenotePagesParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_notebook_section_groups_parent_notebook = SitesOnenotePagesParentNotebookSectionGroupsParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_notebook_section_groups_sections = SitesOnenotePagesParentNotebookSectionGroupsSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_notebook_section_groups_sections_pages = SitesOnenotePagesParentNotebookSectionGroupsSectionsPagesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_notebook_section_groups_sections_parent_notebook = SitesOnenotePagesParentNotebookSectionGroupsSectionsParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_notebook_sections = SitesOnenotePagesParentNotebookSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_notebook_sections_pages = SitesOnenotePagesParentNotebookSectionsPagesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_notebook_sections_parent_notebook = SitesOnenotePagesParentNotebookSectionsParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_notebook_sections_parent_section_group_parent_notebook = SitesOnenotePagesParentNotebookSectionsParentSectionGroupParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_notebook_sections_parent_section_group_sections = SitesOnenotePagesParentNotebookSectionsParentSectionGroupSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_section = SitesOnenotePagesParentSectionOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_section_pages = SitesOnenotePagesParentSectionPagesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_section_parent_notebook = SitesOnenotePagesParentSectionParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_section_parent_notebook_section_groups_parent_notebook = SitesOnenotePagesParentSectionParentNotebookSectionGroupsParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_section_parent_notebook_section_groups_sections = SitesOnenotePagesParentSectionParentNotebookSectionGroupsSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_section_parent_notebook_sections = SitesOnenotePagesParentSectionParentNotebookSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_section_parent_section_group_parent_notebook = SitesOnenotePagesParentSectionParentSectionGroupParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_section_parent_section_group_parent_notebook_sections = SitesOnenotePagesParentSectionParentSectionGroupParentNotebookSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_pages_parent_section_parent_section_group_sections = SitesOnenotePagesParentSectionParentSectionGroupSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_section_groups_parent_notebook = SitesOnenoteSectionGroupsParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_section_groups_parent_notebook_sections = SitesOnenoteSectionGroupsParentNotebookSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_section_groups_parent_notebook_sections_pages = SitesOnenoteSectionGroupsParentNotebookSectionsPagesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_section_groups_parent_notebook_sections_pages_parent_notebook = SitesOnenoteSectionGroupsParentNotebookSectionsPagesParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_section_groups_parent_notebook_sections_pages_parent_section = SitesOnenoteSectionGroupsParentNotebookSectionsPagesParentSectionOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_section_groups_parent_notebook_sections_parent_notebook = SitesOnenoteSectionGroupsParentNotebookSectionsParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_section_groups_sections = SitesOnenoteSectionGroupsSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_section_groups_sections_pages = SitesOnenoteSectionGroupsSectionsPagesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_section_groups_sections_pages_parent_notebook = SitesOnenoteSectionGroupsSectionsPagesParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_section_groups_sections_pages_parent_notebook_sections = SitesOnenoteSectionGroupsSectionsPagesParentNotebookSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_section_groups_sections_pages_parent_section = SitesOnenoteSectionGroupsSectionsPagesParentSectionOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_section_groups_sections_parent_notebook = SitesOnenoteSectionGroupsSectionsParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_section_groups_sections_parent_notebook_sections = SitesOnenoteSectionGroupsSectionsParentNotebookSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_sections = SitesOnenoteSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_sections_pages = SitesOnenoteSectionsPagesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_sections_pages_parent_notebook = SitesOnenoteSectionsPagesParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_sections_pages_parent_notebook_section_groups_parent_notebook = SitesOnenoteSectionsPagesParentNotebookSectionGroupsParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_sections_pages_parent_notebook_section_groups_sections = SitesOnenoteSectionsPagesParentNotebookSectionGroupsSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_sections_pages_parent_notebook_sections = SitesOnenoteSectionsPagesParentNotebookSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_sections_pages_parent_section = SitesOnenoteSectionsPagesParentSectionOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_sections_parent_notebook = SitesOnenoteSectionsParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_sections_parent_notebook_section_groups_parent_notebook = SitesOnenoteSectionsParentNotebookSectionGroupsParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_sections_parent_notebook_section_groups_sections = SitesOnenoteSectionsParentNotebookSectionGroupsSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_sections_parent_notebook_sections = SitesOnenoteSectionsParentNotebookSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_sections_parent_section_group_parent_notebook = SitesOnenoteSectionsParentSectionGroupParentNotebookOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_sections_parent_section_group_parent_notebook_sections = SitesOnenoteSectionsParentSectionGroupParentNotebookSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.sites_onenote_sections_parent_section_group_sections = SitesOnenoteSectionsParentSectionGroupSectionsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.users = UsersOperations(
self._client, self._config, self._serialize, self._deserialize)
def close(self):
# type: () -> None
self._client.close()
def __enter__(self):
# type: () -> Sites
self._client.__enter__()
return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)
| [
"[email protected]"
] | |
5640887551d1f6accdd6e95361bd131529b00b45 | 3899dd3debab668ef0c4b91c12127e714bdf3d6d | /venv/Lib/site-packages/tensorflow/_api/v1/keras/initializers/__init__.py | aeaecd1674940e94d41121da513763e784bc99ac | [] | no_license | SphericalPotatoInVacuum/CNNDDDD | b2f79521581a15d522d8bb52f81b731a3c6a4db4 | 03c5c0e7cb922f53f31025b7dd78287a19392824 | refs/heads/master | 2020-04-21T16:10:25.909319 | 2019-02-08T06:04:42 | 2019-02-08T06:04:42 | 169,691,960 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,855 | py | # This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Keras initializer serialization / deserialization.
"""
from __future__ import print_function
from tensorflow.python import Constant
from tensorflow.python import Constant as constant
from tensorflow.python import GlorotNormal as glorot_normal
from tensorflow.python import GlorotUniform as glorot_uniform
from tensorflow.python import Identity
from tensorflow.python import Identity as identity
from tensorflow.python import Initializer
from tensorflow.python import Ones
from tensorflow.python import Ones as ones
from tensorflow.python import Orthogonal
from tensorflow.python import Orthogonal as orthogonal
from tensorflow.python import VarianceScaling
from tensorflow.python import Zeros
from tensorflow.python import Zeros as zeros
from tensorflow.python import he_normal
from tensorflow.python import he_uniform
from tensorflow.python import lecun_normal
from tensorflow.python import lecun_uniform
from tensorflow.python.keras.initializers import RandomNormal
from tensorflow.python.keras.initializers import RandomNormal as normal
from tensorflow.python.keras.initializers import RandomNormal as random_normal
from tensorflow.python.keras.initializers import RandomUniform
from tensorflow.python.keras.initializers import RandomUniform as random_uniform
from tensorflow.python.keras.initializers import RandomUniform as uniform
from tensorflow.python.keras.initializers import TruncatedNormal
from tensorflow.python.keras.initializers import TruncatedNormal as truncated_normal
from tensorflow.python.keras.initializers import deserialize
from tensorflow.python.keras.initializers import get
from tensorflow.python.keras.initializers import serialize
del print_function
| [
"[email protected]"
] | |
9663660be1f81d43d647bd908fc286cfa0639e9c | 74912c10f66e90195bf87fd71e9a78fa09f017ec | /execroot/syntaxnet/bazel-out/local-opt/bin/syntaxnet/text_formats_test.runfiles/org_tensorflow/tensorflow/contrib/imperative/imperative_mode.py | 51f63712b82033879294775ce5d8396e1d8f3fbb | [] | no_license | koorukuroo/821bda42e7dedbfae9d936785dd2d125- | 1f0b8f496da8380c6e811ed294dc39a357a5a8b8 | 237fcc152ff436f32b2b5a3752a4181d279b3a57 | refs/heads/master | 2020-03-17T03:39:31.972750 | 2018-05-13T14:35:24 | 2018-05-13T14:35:24 | 133,244,956 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 136 | py | /root/.cache/bazel/_bazel_root/821bda42e7dedbfae9d936785dd2d125/external/org_tensorflow/tensorflow/contrib/imperative/imperative_mode.py | [
"k"
] | k |
4bbc91ad36755f133c5b473d3e59491f7d00cb10 | 8329282a8fda056d705c1af6dbcd0de1ed7ca25e | /.history/textutiles/textutiles/views_20210522204110.py | 615e455ae34300f081f062b014e9c40135cf0c9b | [] | no_license | ritikalohia/Django-beginners- | c069b16867407ef883bb00c6faf4f601921c118a | 829e28ab25201853de5c71a10ceff30496afea52 | refs/heads/main | 2023-05-04T03:34:29.082656 | 2021-05-22T17:38:21 | 2021-05-22T17:38:21 | 369,869,599 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,455 | py | #created
from django.http import HttpResponse
from django.shortcuts import render
def index(request):
#params = {'name' : 'Ritika', 'place' : 'Mars'}
return render(request, 'index.html')
#return HttpResponse("Home")
def contact(request):
return render(request, 'contact')
def analyze(request):
#get the text in head
djtext = request.GET.get('text', 'default' )
#check checkbox values
removepunc = request.GET.get('removepunc', 'off')
fullcaps = request.GET.get('fullcaps', 'off')
newlineremover = request.GET.get('newlineremover', 'off')
spaceremover = request.GET.get('spaceremover', 'off'),
charcount = request.GET.get('charcount', 'off')
if removepunc == "on":
#analyzed = djtext
punctuations = '''!()-[]{};:'"\,<>./?@#$%^&*_'''
analyzed = ""
for char in djtext:
if char not in punctuations:
analyzed = analyzed + char
params ={'purpose':'removed punctuations', 'analyzed_text': analyzed}
#analyze the text
return render(request, 'analyze.html', params)
elif(fullcaps == "on"):
analyzed =""
for char in djtext:
analyzed = analyzed + char.upper()
params ={'purpose':'changed to UPPERCASE', 'analyzed_text': analyzed}
#analyze the text
return render(request, 'analyze.html', params)
elif(newlineremover== "on"):
analyzed =""
for char in djtext:
if char != '\n':
analyzed = analyzed + char
params ={'purpose':'Removed new lines', 'analyzed_text': analyzed}
#analyze the text
return render(request, 'analyze.html', params)
elif(spaceremover== "on"):
analyzed =""
for index, char in enumerate(djtext):
if not djtext[index] == " " and djtext[index+1]==" ":
analyzed = analyzed + char
params ={'purpose':'extra space removed', 'analyzed_text': analyzed}
#analyze the text
return render(request, 'analyze.html', params)
elif(charcount== "on"):
a=0
for char in djtext:
a = a + 1
params ={'purpose':'extra space removed', 'analyzed_text': a}
#analyze the text
return render(request, 'analyze.html', params)
else:
return HttpResponse("Error")
# def capfirst(request):
# return HttpResponse("capitalize first") | [
"[email protected]"
] | |
e0d37014e4b7a9b35335efd96f0808e2c0f5f6ac | 48c07006c95631ccba60bf08942a0ad5f458b453 | /backend/wallet/admin.py | fd30a9aae90dd41aa5ec089a0797784423b75c71 | [] | no_license | crowdbotics-apps/irmflorida-21805 | 80359102e4cb3f949cbc4261f3345338d4ca5e31 | 6cca6ab134dc423a0dfe6c5180a3310471f44392 | refs/heads/master | 2022-12-31T22:17:17.827396 | 2020-10-22T13:09:20 | 2020-10-22T13:09:20 | 306,148,773 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 382 | py | from django.contrib import admin
from .models import (
CustomerWallet,
PaymentMethod,
PaymentTransaction,
TaskerPaymentAccount,
TaskerWallet,
)
admin.site.register(CustomerWallet)
admin.site.register(PaymentTransaction)
admin.site.register(PaymentMethod)
admin.site.register(TaskerWallet)
admin.site.register(TaskerPaymentAccount)
# Register your models here.
| [
"[email protected]"
] | |
e1a146982bf9b581fb39ab7883f2b8bc9180d00d | 287e70a3b2382d1b4c3dd4bc3454ec66a7af1ac5 | /tsuanlo/thongke.py | c8719e935443b954dc81c57ceb57dc9dc94cc2cb | [] | no_license | twgo/su5pio2 | 9b7c070bf3483312628d3c338949703147189a8a | 24a567604aec090fdbce11335bbfef56c71f6888 | refs/heads/master | 2020-03-11T14:19:23.606168 | 2018-10-16T12:22:26 | 2018-10-16T12:22:26 | 130,050,110 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,321 | py | from 臺灣言語工具.解析整理.拆文分析器 import 拆文分析器
from builtins import set
import json
from 臺灣言語工具.音標系統.台語 import 新白話字
from 臺灣言語工具.音標系統.閩南語.臺灣閩南語羅馬字拼音 import 臺灣閩南語羅馬字拼音通行韻母表
def su():
imchiat = 0
su = 0
tsuanpooji = set()
tsuanpoojitiau = set()
tsuanpoosu = set()
for liong, 句物件 in enumerate(tsuliau()):
imchiat += len(句物件.篩出字物件())
su += len(句物件.網出詞物件())
for 詞物件 in 句物件.網出詞物件():
if not 詞物件.敢是標點符號():
tshingkhi = True
for 字物件 in 詞物件.篩出字物件():
tailo = 新白話字(字物件.型)
if (
tailo.音標 is not None and
tailo.韻 in 臺灣閩南語羅馬字拼音通行韻母表
):
tsuanpooji.add(字物件.看分詞().strip('0123456789'))
tsuanpoojitiau.add(字物件.看分詞())
else:
tshingkhi = False
if tshingkhi:
tsuanpoosu.add(詞物件.看分詞().lstrip('01'))
with open('tsonghong.json', 'w') as tong:
json.dump(
{
'總音節數(無算標點)': imchiat,
'總詞數(無算標點)': su,
'資料總數(詞、句、段)': liong,
'詞種類(無算標點)': len(tsuanpoosu),
'音節加調種類': len(tsuanpoojitiau),
'音節無調種類': len(tsuanpooji),
},
tong, ensure_ascii=False, sort_keys=True, indent=2
)
with open('tsuanpoojitiau.txt', 'w') as tong:
print('\n'.join(sorted(tsuanpoojitiau)), file=tong)
with open('tsuanpooji.txt', 'w') as tong:
print('\n'.join(sorted(tsuanpooji)), file=tong)
with open('tsuanpoosu.txt', 'w') as tong:
print('\n'.join(sorted(tsuanpoosu)), file=tong)
def tsuliau():
with open('tsuanpooku.txt') as tong:
for tsua in tong.read().split('\n'):
yield 拆文分析器.建立句物件(tsua)
su()
| [
"[email protected]"
] | |
50602a2a258c39fa894476cd9a9cbbc0de1e795a | e27509d78f9bc9746c535b4ed7adec6370741af6 | /bwdesignworld/magazineisuue/views.py | 732a1d7f95c369a29873e8f7c6a683ee1d397957 | [] | no_license | bopopescu/bw | deae2323039d70b32a35043361f8562b09bea630 | 1ddb78a280388548ef7dd201a361db710e271791 | refs/heads/master | 2022-11-22T18:29:05.581667 | 2019-10-01T03:30:48 | 2019-10-01T03:30:48 | 282,542,998 | 0 | 0 | null | 2020-07-25T23:41:43 | 2020-07-25T23:41:42 | null | UTF-8 | Python | false | false | 2,009 | py | # -*- coding: utf-8 -*-
from django.shortcuts import render
from django.http import HttpResponse
import datetime
from django.utils import timezone
from django.http import HttpResponseRedirect
from django.http import HttpResponse
import json
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger,InvalidPage
from django.conf import settings
# Create your views here.
from magazineisuue.models import Magazine
from bwdesignworld.utils import sidebar_data, category_jump_list, closeDbConnection
def magazineissue_listing(request, year):
meta_title = 'BW defence Magazine – '+str(year)+' Issues'
meta_description = 'BW defence Magazine is one of the most popular and respected News and Updates for defence in India. Here is a list of issues released in '+str(year)
meta_keyword = 'Web Exclusive, News and Updates for defence in India, News and Updates for defence in India'
og_title = 'BW defence Magazine – '+str(year)+' Issues'
og_url = '/magazine -issue'
og_image = settings.AWS_S3_BASE_URL + settings.BUCKET_PATH +'static_bwhr/images/BW-defence-logo.jpg'
category_jumlist = category_jump_list()
magazine_allyear = Magazine.objects.raw("SELECT magazine_id , YEAR(publish_date_m) as years FROM magazine GROUP BY YEAR(publish_date_m) ORDER BY publish_date_m DESC ")
if request.method == 'GET':
if(year!=''):
magazine_listing = Magazine.objects.raw("SELECT * FROM magazine WHERE YEAR(publish_date_m) = '"+year+"' ORDER BY publish_date_m DESC ")
return render(request, 'magazineissue/magazineissue_listing.html', {
'meta_title': meta_title,
'meta_description': meta_description,
'meta_keyword': meta_keyword,
'og_title':og_title,
'og_url':og_url,
'og_image':og_image,
'magazine_allyear':magazine_allyear,
'magazine_listing':magazine_listing,
'category_jumlist':category_jumlist,
'year':year
})
| [
"[email protected]"
] | |
84d900bd249dc56e022df49fe1adc11d8e94cc33 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03106/s203963496.py | 3326c7676c3537cc9fbbf30aabd9f17c4dbc711a | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 172 | py | A, B, K = map(int, input().split())
I = []
if A < B:
C = A
else:
C = B
for i in range(1, C+1):
if (A % i == 0) & (B % i == 0):
I.append(i)
print(I[-K])
| [
"[email protected]"
] | |
9172b2337654eb31b67c8624ae7c796d8f3c43bf | 144deb9795e28783fde8a930ed396000a2755551 | /research_toolbox/tb_remote.py | a675fb9e6b812163fc014f81930026c1305af049 | [
"MIT"
] | permissive | negrinho/research_toolbox | 10786754afbffa22085d002e1b549be77e90d1d1 | c99aac302ba427269c07ccf25369eb4d552cac95 | refs/heads/master | 2021-08-02T20:06:25.403862 | 2021-07-31T11:41:04 | 2021-07-31T11:41:04 | 121,714,230 | 20 | 6 | null | null | null | null | UTF-8 | Python | false | false | 18,316 | py | ### running remotely
import subprocess
import paramiko
import getpass
import inspect
import uuid
import urllib.request, urllib.parse, urllib.error
import research_toolbox.tb_utils as tb_ut
import research_toolbox.tb_resources as tb_rs
import research_toolbox.tb_logging as tb_lg
import research_toolbox.tb_filesystem as tb_fs
def get_password():
return getpass.getpass()
def run_on_server(bash_command,
servername,
username=None,
password=None,
folderpath=None,
wait_for_output=True,
prompt_for_password=False):
"""SSH into a machine and runs a bash command there. Can specify a folder
to change directory into after logging in.
"""
if password == None and prompt_for_password:
password = getpass.getpass()
if not wait_for_output:
bash_command = "nohup %s &" % bash_command
if folderpath != None:
bash_command = "cd %s && %s" % (folderpath, bash_command)
sess = paramiko.SSHClient()
sess.load_system_host_keys()
#ssh.set_missing_host_key_policy(paramiko.WarningPolicy())
sess.set_missing_host_key_policy(paramiko.AutoAddPolicy())
sess.connect(servername, username=username, password=password)
_, stdout, stderr = sess.exec_command(bash_command)
# depending if waiting for output or not.
if wait_for_output:
stdout_lines = stdout.readlines()
stderr_lines = stderr.readlines()
else:
stdout_lines = None
stderr_lines = None
sess.close()
return stdout_lines, stderr_lines
# tools for handling the lithium server, which has no scheduler.
def get_lithium_nodes():
return {
'gtx970': ["dual-970-0-%d" % i for i in range(0, 13)],
'gtx980': ["quad-980-0-%d" % i for i in [0, 1, 2]],
'k40': ["quad-k40-0-0", "dual-k40-0-1", "dual-k40-0-2"],
'titan': ["quad-titan-0-0"]
}
def get_lithium_resource_availability(servername,
username,
password=None,
abort_if_any_node_unavailable=True,
nodes_to_query=None):
# prompting for password if asked about. (because lithium needs password)
if password == None:
password = getpass.getpass()
# query all nodes if None
if nodes_to_query == None:
nodes_to_query = tb_ut.flatten(get_lithium_nodes())
# script to define the functions to get the available resources.
cmd_lines = ['import psutil', 'import subprocess', 'import numpy as np', '']
fns = [
tb_rs.convert_between_byte_units, tb_rs.cpus_total, tb_rs.memory_total,
tb_rs.gpus_total, tb_rs.cpus_free, tb_rs.memory_free, tb_rs.gpus_free,
tb_rs.gpus_free_ids
]
for fn in fns:
cmd_lines += [
line.rstrip('\n') for line in inspect.getsourcelines(fn)[0]
]
cmd_lines.append('')
cmd_lines += [
'print \'%d;%.2f;%d;%d;%.2f;%d;%s\' % ('
'cpus_total(), memory_total(), gpus_total(), '
'cpus_free(), memory_free(), gpus_free(), '
'\' \'.join(map(str, gpus_free_ids())))'
]
py_cmd = "\n".join(cmd_lines)
ks = [
'cpus_total', 'mem_mbs_total', 'gpus_total', 'cpus_free',
'mem_mbs_free', 'gpus_free', 'free_gpu_ids'
]
# run the command to query the information.
write_script_cmd = 'echo \"%s\" > avail_resources.py' % py_cmd
run_on_server(write_script_cmd, servername, username, password)
# get it for each of the models
node_to_resources = {}
for host in nodes_to_query:
cmd = 'ssh -T %s python avail_resources.py' % host
stdout_lines, stderr_lines = run_on_server(cmd, servername, username,
password)
# print stdout_lines, stderr_lines
# if it did not fail.
if len(stdout_lines) == 1:
# extract the actual values from the command line.
str_vs = stdout_lines[0].strip().split(';')
assert len(str_vs) == 7
# print str_vs
vs = [
fn(str_vs[i])
for (i, fn) in enumerate([int, float, int] * 2 + [str])
]
vs[-1] = [int(x) for x in vs[-1].split(' ') if x != '']
d = tb_ut.create_dict(ks, vs)
node_to_resources[host] = d
else:
assert not abort_if_any_node_unavailable
node_to_resources[host] = None
delete_script_cmd = 'rm avail_resources.py'
run_on_server(delete_script_cmd, servername, username, password)
return node_to_resources
# TODO: add functionality to check if the visible gpus are busy or not
# and maybe terminate upon that event.
# running on one of the compute nodes.
# NOTE: this function has minimum error checking.
def run_on_lithium_node(bash_command,
node,
servername,
username,
password=None,
visible_gpu_ids=None,
folderpath=None,
wait_for_output=True,
run_on_head_node=False):
# check that node exists.
assert node in tb_ut.flatten(get_lithium_nodes())
# prompting for password if asked about. (because lithium needs password)
if password == None:
password = getpass.getpass()
# if no visilbe gpu are specified, it creates a list with nothing there.
if visible_gpu_ids is None:
visible_gpu_ids = []
# creating the command to run remotely.
gpu_cmd = 'export CUDA_VISIBLE_DEVICES=%s' % ",".join(
map(str, visible_gpu_ids))
if not run_on_head_node:
cmd = "ssh -T %s \'%s && %s\'" % (node, gpu_cmd, bash_command)
else:
# NOTE: perhaps repetition could be improved here. also, probably head
# node does not have gpus.
cmd = "%s && %s" % (gpu_cmd, bash_command)
return run_on_server(
cmd,
**tb_ut.subset_dict_via_selection(locals(), [
'servername', 'username', 'password', 'folderpath',
'wait_for_output'
]))
# TODO: add functionality to run on all lithium node.
# NOTE: this may require adding some information to the server.
# NOTE: if any of the command waits for output, it will mean that
# it is going to wait until completion of that command until doing the other one.
# NOTE: as lithium does not have a scheduler, resource management has to be done
# manually. This one has to prompt twice.
# NOTE: the time budget right now does not do anything.
# TODO: run on head node should not be true for these, because I
# do all the headnode information here.
class LithiumRunner:
def __init__(self,
servername,
username,
password=None,
only_run_if_can_run_all=True):
self.servername = servername
self.username = username
self.password = password if password is not None else get_password()
self.jobs = []
def register(self,
bash_command,
num_cpus=1,
num_gpus=0,
mem_budget=8.0,
time_budget=60.0,
mem_units='gigabytes',
time_units='minutes',
folderpath=None,
wait_for_output=True,
require_gpu_types=None,
require_nodes=None,
run_on_head_node=False):
# NOTE: this is not implemented for now.
assert not run_on_head_node
# should not specify both.
assert require_gpu_types is None or require_nodes is None
self.jobs.append(
tb_ut.subset_dict_via_selection(locals(), [
'bash_command', 'num_cpus', 'num_gpus', 'mem_budget',
'time_budget', 'mem_units', 'time_units', 'folderpath',
'wait_for_output', 'require_gpu_types', 'require_nodes',
'run_on_head_node'
]))
def run(self, run_only_if_enough_resources_for_all=True):
args = tb_ut.subset_dict_via_selection(
vars(self), ['servername', 'username', 'password'])
args['abort_if_any_node_unavailable'] = False
# get the resource availability and filter out unavailable nodes.
d = get_lithium_resource_availability(**args)
d = {k: v for (k, v) in d.items() if v is not None}
g = get_lithium_nodes()
# assignments to each of the registered jobs
run_cfgs = []
for x in self.jobs:
if x['require_nodes'] is not None:
req_nodes = x['require_nodes']
else:
req_nodes = list(d.keys())
# based on the gpu type restriction.
if x['require_gpu_types'] is not None:
req_gpu_nodes = tb_ut.flatten(
tb_ut.subset_dict_via_selection(g, x['require_gpu_types']))
else:
# NOTE: only consider the nodes that are available anyway.
req_gpu_nodes = list(d.keys())
# potentially available nodes to place this job.
nodes = list(set(req_nodes).intersection(req_gpu_nodes))
assert len(nodes) > 0
# greedy assigned to a node.
assigned = False
for n in nodes:
r = d[n]
# if there are enough resources on the node, assign it to the
# job.
if ((r['cpus_free'] >= x['num_cpus']) and
(r['gpus_free'] >= x['num_gpus']) and
(r['mem_mbs_free'] >= tb_rs.convert_between_byte_units(
x['mem_budget'],
src_units=x['mem_units'],
dst_units='megabytes'))):
# record information about where to run the job.
run_cfgs.append({
'node':
n,
'visible_gpu_ids':
r['free_gpu_ids'][:x['num_gpus']]
})
# deduct the allocated resources from the available resources
# for that node.
r['cpus_free'] -= x['num_cpus']
r['gpus_free'] -= x['num_gpus']
r['mem_mbs_free'] -= tb_rs.convert_between_byte_units(
x['mem_budget'],
src_units=x['mem_units'],
dst_units='megabytes')
r['free_gpu_ids'] = r['free_gpu_ids'][x['num_gpus']:]
# assigned = True
break
# if not assigned, terminate without doing anything.
if not assigned:
run_cfgs.append(None)
if run_only_if_enough_resources_for_all:
print(("Insufficient resources to satisfy"
" (cpus=%d, gpus=%d, mem=%0.3f%s)" %
(x['num_cpus'], x['num_gpus'], x['mem_budget'],
x['mem_units'])))
return None
# running the jobs that have a valid config.
remaining_jobs = []
outs = []
for x, c in zip(self.jobs, run_cfgs):
if c is None:
remaining_jobs.append(x)
else:
out = run_on_lithium_node(**tb_ut.merge_dicts([
tb_ut.subset_dict_via_selection(
vars(self), ['servername', 'username', 'password']),
tb_ut.subset_dict_via_selection(x, [
'bash_command', 'folderpath', 'wait_for_output',
'run_on_head_node'
]),
tb_ut.subset_dict_via_selection(c,
['node', 'visible_gpu_ids'])
]))
outs.append(out)
self.jobs = remaining_jobs
return outs
# NOTE: there may exist problems due to race conditions; can be solved later.
def run_on_matrix(bash_command,
servername,
username,
password=None,
num_cpus=1,
num_gpus=0,
mem_budget=8.0,
time_budget=60.0,
mem_units='gigabytes',
time_units='minutes',
folderpath=None,
wait_for_output=True,
require_gpu_type=None,
run_on_head_node=False,
jobname=None):
assert (not run_on_head_node) or num_gpus == 0
assert require_gpu_type is None ### NOT IMPLEMENTED YET.
# prompts for password if it has not been provided
if password == None:
password = getpass.getpass()
script_cmd = "\n".join(['#!/bin/bash', bash_command])
script_name = "run_%s.sh" % uuid.uuid4()
# either do the call using sbatch, or run directly on the head node.
if not run_on_head_node:
cmd_parts = [
'srun' if wait_for_output else 'sbatch',
'--cpus-per-task=%d' % num_cpus,
'--gres=gpu:%d' % num_gpus,
'--mem=%d' % tb_rs.convert_between_byte_units(
mem_budget, src_units=mem_units, dst_units='megabytes'),
'--time=%d' % tb_lg.convert_between_time_units(
time_budget, time_units, dst_units='minutes')
]
if jobname is not None:
cmd_parts += ['--job-name=%s' % jobname]
cmd_parts += [script_name]
run_script_cmd = ' '.join(cmd_parts)
else:
run_script_cmd = './' + script_name
# actual command to run remotely
remote_cmd = " && ".join([
"echo \'%s\' > %s" % (script_cmd, script_name),
"chmod +x %s" % script_name, run_script_cmd,
"rm %s" % script_name
])
return run_on_server(
remote_cmd,
**tb_ut.subset_dict_via_selection(locals(), [
'servername', 'username', 'password', 'folderpath',
'wait_for_output'
]))
def rsync_options(recursive=True,
preserve_source_metadata=True,
only_transfer_newer_files=False,
delete_files_on_destination_notexisting_on_source=False,
delete_files_on_destination_nottransfered_from_source=False,
dry_run_ie_do_not_transfer=False,
verbose=True):
opts = []
if recursive:
opts += ['--recursive']
if preserve_source_metadata:
opts += ['--archive']
if only_transfer_newer_files:
opts += ['--update']
if delete_files_on_destination_notexisting_on_source:
opts += ['--delete']
if delete_files_on_destination_nottransfered_from_source:
opts += ['--delete-excluded']
if dry_run_ie_do_not_transfer:
opts += ['--dry-run']
if verbose:
opts += ['--verbose']
return opts
# NOTE: that because this is rsync, you have to be careful about
# the last /. perhaps add a condition that makes this easier to handle.
def sync_local_folder_from_local(src_folderpath,
dst_folderpath,
only_transfer_newer_files=True):
cmd = ['rsync', '--verbose', '--archive']
# whether to only transfer newer files or not.
if only_transfer_newer_files:
cmd += ['--update']
# add the backslash if it does not exist. for working correctly with rsync.
if src_folderpath[-1] != '/':
src_folderpath = src_folderpath + '/'
if dst_folderpath[-1] != '/':
dst_folderpath = dst_folderpath + '/'
cmd += [src_folderpath, dst_folderpath]
out = subprocess.check_output(cmd)
return out
# NOTE: will always prompt for password due to being the simplest approach.
# if a password is necessary, it will prompt for it.
# dst_folderpath should already be created.
def sync_remote_folder_from_local(src_folderpath,
dst_folderpath,
servername,
username=None,
only_transfer_newer_files=True):
cmd = ['rsync', '--verbose', '--archive']
# whether to only transfer newer files or not.
if only_transfer_newer_files:
cmd += ['--update']
# remote path to the folder that we want to syncronize.
if src_folderpath[-1] != '/':
src_folderpath = src_folderpath + '/'
if dst_folderpath[-1] != '/':
dst_folderpath = dst_folderpath + '/'
dst_folderpath = "%s:%s" % (servername, dst_folderpath)
if username is not None:
dst_folderpath = "%s@%s" % (username, dst_folderpath)
cmd += [src_folderpath, dst_folderpath]
out = subprocess.check_output(cmd)
return out
def sync_local_folder_from_remote(src_folderpath,
dst_folderpath,
servername,
username=None,
only_transfer_newer_files=True):
cmd = ['rsync', '--verbose', '--archive']
# whether to only transfer newer files or not.
if only_transfer_newer_files:
cmd += ['--update']
# remote path to the folder that we want to syncronize.
if src_folderpath[-1] != '/':
src_folderpath = src_folderpath + '/'
if dst_folderpath[-1] != '/':
dst_folderpath = dst_folderpath + '/'
src_folderpath = "%s:%s" % (servername, src_folderpath)
if username is not None:
src_folderpath = "%s@%s" % (username, src_folderpath)
cmd += [src_folderpath, dst_folderpath]
out = subprocess.check_output(cmd)
return out
def download_file(urlpath, folderpath, filename=None,
abort_if_file_exists=True):
if filename is None:
filename = urlpath.split('/')[-1]
filepath = tb_fs.join_paths([folderpath, filename])
assert tb_fs.folder_exists(folderpath)
assert (not tb_fs.file_exists(filepath)) or abort_if_file_exists
f = urllib.request.URLopener()
f.retrieve(urlpath, filepath) | [
"[email protected]"
] | |
b81d16acd782e6d7a33df7203ed023976899d3df | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/built-in/nlp/MT5_ID4146_for_PyTorch/transformers/src/transformers/models/bertweet/__init__.py | 1cc8f1fbfd9965d221219b89ed919785328e0409 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 1,128 | py | # flake8: noqa
# There's no way to ignore "F401 '...' imported but unused" warnings in this
# module, but to preserve other warnings. So, don't check this module at all.
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from ...file_utils import _LazyModule
_import_structure = {
"tokenization_bertweet": ["BertweetTokenizer"],
}
if TYPE_CHECKING:
from .tokenization_bertweet import BertweetTokenizer
else:
import sys
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
| [
"[email protected]"
] | |
d2be32d5af389e04d2e6dae67d4efe62f076a23e | d7fafa497d5724a6a53d6790556a0056493be80e | /srl_model/tests/data/dataset_readers/dataset_utils/ontonotes_test.py | fe01419bdd4f4cc5c321accbdd74f81465447bd0 | [
"Apache-2.0"
] | permissive | rz-zhang/Bert_Attempt | 472a9e3d7e3874f63710f850357b884354bb2d46 | 26936a5a63ff22c8ab118480caf515cdc395e37f | refs/heads/master | 2021-10-08T09:25:04.297734 | 2018-12-10T14:59:13 | 2018-12-10T14:59:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,631 | py | # pylint: disable=no-self-use,invalid-name
from nltk import Tree
from srl_model.data.dataset_readers.dataset_utils import Ontonotes
from srl_model.common.testing import AllenNlpTestCase
class TestOntonotes(AllenNlpTestCase):
def test_dataset_iterator(self):
reader = Ontonotes()
annotated_sentences = list(reader.dataset_iterator(self.FIXTURES_ROOT / 'conll_2012' / 'subdomain'))
annotation = annotated_sentences[0]
assert annotation.document_id == "test/test/01/test_001"
assert annotation.sentence_id == 0
assert annotation.words == ['Mali', 'government', 'officials', 'say', 'the', 'woman',
"'s", 'confession', 'was', 'forced', '.']
assert annotation.pos_tags == ['NNP', 'NN', 'NNS', 'VBP', 'DT',
'NN', 'POS', 'NN', 'VBD', 'JJ', '.']
assert annotation.word_senses == [None, None, 1, 1, None, 2, None, None, 1, None, None]
assert annotation.predicate_framenet_ids == [None, None, None, '01', None,
None, None, None, '01', None, None]
assert annotation.srl_frames == [("say", ['B-ARG0', 'I-ARG0', 'I-ARG0', 'B-V', 'B-ARG1',
'I-ARG1', 'I-ARG1', 'I-ARG1', 'I-ARG1', 'I-ARG1', 'O']),
("was", ['O', 'O', 'O', 'O', 'B-ARG1', 'I-ARG1', 'I-ARG1',
'I-ARG1', 'B-V', 'B-ARG2', 'O'])]
assert annotation.named_entities == ['B-GPE', 'O', 'O', 'O', 'O', 'O',
'O', 'O', 'O', 'O', 'O']
assert annotation.predicate_lemmas == [None, None, 'official', 'say', None,
'man', None, None, 'be', None, None]
assert annotation.speakers == [None, None, None, None, None, None,
None, None, None, None, None]
assert annotation.parse_tree == Tree.fromstring("(TOP(S(NP(NML (NNP Mali) (NN government) )"
" (NNS officials) )(VP (VBP say) (SBAR(S(NP(NP"
" (DT the) (NN woman) (POS 's) ) (NN "
"confession) )(VP (VBD was) (ADJP (JJ "
"forced) ))))) (. .) ))")
assert annotation.coref_spans == {(1, (4, 6)), (3, (4, 7))}
annotation = annotated_sentences[1]
assert annotation.document_id == "test/test/02/test_002"
assert annotation.sentence_id == 0
assert annotation.words == ['The', 'prosecution', 'rested', 'its', 'case', 'last', 'month',
'after', 'four', 'months', 'of', 'hearings', '.']
assert annotation.pos_tags == ['DT', 'NN', 'VBD', 'PRP$', 'NN', 'JJ', 'NN',
'IN', 'CD', 'NNS', 'IN', 'NNS', '.']
assert annotation.word_senses == [None, 2, 5, None, 2, None, None,
None, None, 1, None, 1, None]
assert annotation.predicate_framenet_ids == [None, None, '01', None, None, None,
None, None, None, None, None, '01', None]
assert annotation.srl_frames == [('rested', ['B-ARG0', 'I-ARG0', 'B-V', 'B-ARG1',
'I-ARG1', 'B-ARGM-TMP', 'I-ARGM-TMP',
'B-ARGM-TMP', 'I-ARGM-TMP', 'I-ARGM-TMP',
'I-ARGM-TMP', 'I-ARGM-TMP', 'O']),
('hearings', ['O', 'O', 'O', 'O', 'O', 'O', 'O', 'O',
'O', 'O', 'O', 'B-V', 'O'])]
assert annotation.named_entities == ['O', 'O', 'O', 'O', 'O', 'B-DATE', 'I-DATE',
'O', 'B-DATE', 'I-DATE', 'O', 'O', 'O']
assert annotation.predicate_lemmas == [None, 'prosecution', 'rest', None, 'case',
None, None, None, None, 'month', None, 'hearing', None]
assert annotation.speakers == [None, None, None, None, None, None,
None, None, None, None, None, None, None]
assert annotation.parse_tree == Tree.fromstring("(TOP(S(NP (DT The) (NN prosecution) )(VP "
"(VBD rested) (NP (PRP$ its) (NN case) )"
"(NP (JJ last) (NN month) )(PP (IN after) "
"(NP(NP (CD four) (NNS months) )(PP (IN"
" of) (NP (NNS hearings) ))))) (. .) ))")
assert annotation.coref_spans == {(2, (0, 1)), (2, (3, 3))}
# Check we can handle sentences without verbs.
annotation = annotated_sentences[2]
assert annotation.document_id == 'test/test/03/test_003'
assert annotation.sentence_id == 0
assert annotation.words == ['Denise', 'Dillon', 'Headline', 'News', '.']
assert annotation.pos_tags == ['NNP', 'NNP', 'NNP', 'NNP', '.']
assert annotation.word_senses == [None, None, None, None, None]
assert annotation.predicate_framenet_ids == [None, None, None, None, None]
assert annotation.srl_frames == []
assert annotation.named_entities == ['B-PERSON', 'I-PERSON',
'B-WORK_OF_ART', 'I-WORK_OF_ART', 'O']
assert annotation.predicate_lemmas == [None, None, None, None, None]
assert annotation.speakers == [None, None, None, None, None]
assert annotation.parse_tree == Tree.fromstring("(TOP(FRAG(NP (NNP Denise) "
" (NNP Dillon) )(NP (NNP Headline) "
"(NNP News) ) (. .) ))")
assert annotation.coref_spans == {(2, (0, 1))}
# Check we can handle sentences with 2 identical verbs.
annotation = annotated_sentences[3]
assert annotation.document_id == 'test/test/04/test_004'
assert annotation.sentence_id == 0
assert annotation.words == ['and', 'that', 'wildness', 'is', 'still', 'in', 'him', ',',
'as', 'it', 'is', 'with', 'all', 'children', '.']
assert annotation.pos_tags == ['CC', 'DT', 'NN', 'VBZ', 'RB', 'IN', 'PRP', ',',
'IN', 'PRP', 'VBZ', 'IN', 'DT', 'NNS', '.']
assert annotation.word_senses == [None, None, None, 4.0, None, None, None, None,
None, None, 5.0, None, None, None, None]
assert annotation.predicate_framenet_ids == [None, None, None, '01', None, None,
None, None, None, None, '01', None, None, None, None]
assert annotation.srl_frames == [('is', ['B-ARGM-DIS', 'B-ARG1', 'I-ARG1',
'B-V', 'B-ARGM-TMP', 'B-ARG2', 'I-ARG2',
'O', 'B-ARGM-ADV', 'I-ARGM-ADV', 'I-ARGM-ADV',
'I-ARGM-ADV', 'I-ARGM-ADV', 'I-ARGM-ADV', 'O']),
('is', ['O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O',
'B-ARG1', 'B-V', 'B-ARG2', 'I-ARG2', 'I-ARG2', 'O'])]
assert annotation.named_entities == ['O', 'O', 'O', 'O', 'O', 'O', 'O', 'O',
'O', 'O', 'O', 'O', 'O', 'O', 'O']
assert annotation.predicate_lemmas == [None, None, None, 'be', None, None, None,
None, None, None, 'be', None, None, None, None]
assert annotation.speakers == ['_Avalon_', '_Avalon_', '_Avalon_', '_Avalon_', '_Avalon_',
'_Avalon_', '_Avalon_', '_Avalon_', '_Avalon_', '_Avalon_',
'_Avalon_', '_Avalon_', '_Avalon_', '_Avalon_', '_Avalon_']
assert annotation.parse_tree == Tree.fromstring("(TOP (S (CC and) (NP (DT that) (NN wildness)) "
"(VP (VBZ is) (ADVP (RB still)) (PP (IN in) (NP "
"(PRP him))) (, ,) (SBAR (IN as) (S (NP (PRP it)) "
"(VP (VBZ is) (PP (IN with) (NP (DT all) (NNS "
"children))))))) (. .)))")
assert annotation.coref_spans == {(14, (6, 6))}
def test_dataset_path_iterator(self):
reader = Ontonotes()
files = list(reader.dataset_path_iterator(self.FIXTURES_ROOT / 'conll_2012'))
expected_paths = [str(self.FIXTURES_ROOT / 'conll_2012' / 'subdomain' / 'example.gold_conll'),
str(self.FIXTURES_ROOT / 'conll_2012' / 'subdomain2' / 'example.gold_conll')]
assert len(files) == len(expected_paths)
assert set(files) == set(expected_paths)
def test_ontonotes_can_read_conll_file_with_multiple_documents(self):
reader = Ontonotes()
file_path = self.FIXTURES_ROOT / 'coref' / 'coref.gold_conll'
documents = list(reader.dataset_document_iterator(file_path))
assert len(documents) == 2
| [
"[email protected]"
] | |
031beb90b4f484126de2a0aa9d25895b63eba864 | 716abd9e5ba4b72b72cc5f724a6cc0a6ad4390d1 | /8-Working with Python Modules/35-Introduction-to-Python-Modules.py | d82b80c881be39c2f7329e85993ccf5267b6067f | [] | no_license | devopstasks/PythonScripting | ac45edd72dc134ec3539b962f02dfc866f365ecf | 48bc37733ae6b3be4e2d64909ffe0962b6908518 | refs/heads/master | 2023-03-29T11:18:01.329452 | 2021-04-07T03:25:20 | 2021-04-07T03:25:20 | 350,388,744 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,505 | py | '''
======================================================
What is a module?
=> A module is a file containing python definitions and statements.That means module containing
python functions,classes and variables.
What is the use of module?
=> Reusability
ex: If script name is mymodule.py then module name is mymodule
Types of Python modules:
=> Default modules
=> Third-party modules
Note: Import either default or third party modules before using them.
======================================================
=======================================================
=> List all functions and variables of a module using the dir() function
=> Getting help of a particular module
=> from script: print(help(math))
from python command line: help(math)
import math
dir(math)
help(math)
=> install a third-party module
pip install <module-name>
pip install xlrd
pip install xlwt
import xlrd
import xlwt
dir(xlrd)
help(xlrd)
dir(xlwt)
help(xlwt)
======================================================
=======================================================
Method-1
========
import math
print(math.pi)
print(math.pow(3,2))
Method-2
========
import math as m
print(m.pi)
print(m.pow(3,2))
Method-3
========
from math import *
print(pi)
print(pow(3,2))
Method-4
========
from math import pi,pow
print(pi)
print(pow(3,2))
=========
import platform
import math
import sys
import os
import subprocess
or
import platform,math,sys,os,subprocess
======================================================
'''
| [
"[email protected]"
] | |
09fd87d07d10fa12b78b49a16a161ae0c05646ac | 3b7a07ad2d78e8526ad8ae0767530d98aaff9f3e | /backend/core/settings/dev.py | 5e31a25b6b87f5c7ce7ba9c4302df39b89fc9375 | [] | no_license | ImmortalViolet/one-oms | ba2281acdd63f35eb907651e5aae240c97c16e8b | 9b89e2773511cb4f9fe37c4cde79e9e3e47464fe | refs/heads/master | 2022-04-12T17:38:59.052337 | 2020-02-28T04:29:34 | 2020-02-28T04:29:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 269 | py | # -*- coding: utf-8 -*-
# author: timor
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# db
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, '../core.db'),
}
} | [
"[email protected]"
] | |
6ee9e09c43f596747e6f734706f13995d405a1f5 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/verbs/_overruns.py | b104b69597f068db56d3556bd4709eb128fbc11a | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 245 | py |
from xai.brain.wordbase.verbs._overrun import _OVERRUN
#calss header
class _OVERRUNS(_OVERRUN, ):
def __init__(self,):
_OVERRUN.__init__(self)
self.name = "OVERRUNS"
self.specie = 'verbs'
self.basic = "overrun"
self.jsondata = {}
| [
"[email protected]"
] | |
c51305ba5e4d56ee36a2a1cdb829f4a76038d5d3 | 59166105545cdd87626d15bf42e60a9ee1ef2413 | /test/test_human_gene_location.py | ed2c5bf611870f00b36c70bc931cf84d6aa20bd0 | [] | no_license | mosoriob/dbpedia_api_client | 8c594fc115ce75235315e890d55fbf6bd555fa85 | 8d6f0d04a3a30a82ce0e9277e4c9ce00ecd0c0cc | refs/heads/master | 2022-11-20T01:42:33.481024 | 2020-05-12T23:22:54 | 2020-05-12T23:22:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,901 | py | # coding: utf-8
"""
DBpedia
This is the API of the DBpedia Ontology # noqa: E501
The version of the OpenAPI document: v0.0.1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import dbpedia
from dbpedia.models.human_gene_location import HumanGeneLocation # noqa: E501
from dbpedia.rest import ApiException
class TestHumanGeneLocation(unittest.TestCase):
"""HumanGeneLocation unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test HumanGeneLocation
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = dbpedia.models.human_gene_location.HumanGeneLocation() # noqa: E501
if include_optional :
return HumanGeneLocation(
gene_location_end = [
56
],
gene_location_start = [
56
],
genome_db = [
'0'
],
description = [
'0'
],
on_chromosome = [
56
],
id = '0',
label = [
'0'
],
type = [
'0'
]
)
else :
return HumanGeneLocation(
)
def testHumanGeneLocation(self):
"""Test HumanGeneLocation"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
f81a63c4262b48497bf3b90fc7acc5c04f3469e4 | 54f57cf661423b4cb4375ec86557a934ac94a497 | /proj/run.py | c4f36b79ea5076151cfdfb10f7a6911a3fd8a76e | [
"MIT"
] | permissive | 0xangelo/deep-rl | dd87520120e70a5d2325fcd25cdf004ce67f1453 | 9f0c1aafe71852c8973bf1ab732114a3cdbe23ad | refs/heads/master | 2022-02-24T00:21:47.248660 | 2019-06-22T20:51:44 | 2019-06-22T20:51:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,549 | py | """
The MIT License
Copyright (c) 2018 OpenAI (http://openai.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
Adapted from OpenAI's Spinning Up: https://github.com/openai/spinningup
"""
import sys
import os
import os.path as osp
import subprocess
import proj.algorithms
from random import randrange
from proj.utils.exp_grid import ExperimentGrid
from proj.common.models import *
# Command line args that will go to ExperimentGrid.run, and must possess unique
# values (therefore must be treated separately).
RUN_KEYS = ["log_dir", "format_strs", "datestamp"]
def friendly_err(err_msg):
# add whitespace to error message to make it more readable
return "\n\n" + err_msg + "\n\n"
def parse_and_execute_grid_search(cmd, args):
algo = eval("proj.algorithms." + cmd)
# Before all else, check to see if any of the flags is 'help'.
valid_help = ["--help", "-h", "help"]
if any([arg in valid_help for arg in args]):
print("\n\nShowing docstring for spinup." + cmd + ":\n")
print(algo.__doc__)
sys.exit()
def process(arg):
# Process an arg by eval-ing it, so users can specify more
# than just strings at the command line (eg allows for
# users to give functions as args).
try:
return eval(arg)
except NameError:
return arg
# Make first pass through args to build base arg_dict. Anything
# with a '--' in front of it is an argument flag and everything after,
# until the next flag, is a possible value.
arg_dict = dict()
for i, arg in enumerate(args):
assert i > 0 or "--" in arg, friendly_err("You didn't specify a first flag.")
if "--" in arg:
arg_key = arg.lstrip("-")
arg_dict[arg_key] = []
else:
arg_dict[arg_key].append(process(arg))
# Make second pass through, to catch flags that have no vals.
# Assume such flags indicate that a boolean parameter should have
# value True.
for _, v in arg_dict.items():
if len(v) == 0:
v.append(True)
# Final pass: check for the special args that go to the 'run' command
# for an experiment grid, separate them from the arg dict, and make sure
# that they have unique values. The special args are given by RUN_KEYS.
run_kwargs = dict()
for k in RUN_KEYS:
if k in arg_dict:
val = arg_dict[k]
assert len(val) == 1, friendly_err(
"You can only provide one value for %s." % k
)
run_kwargs[k] = val[0]
del arg_dict[k]
# Determine experiment name. If not given by user, will be determined
# by the algorithm name.
if "exp_name" in arg_dict:
assert len(arg_dict["exp_name"]) == 1, friendly_err(
"You can only provide one value for exp_name."
)
exp_name = arg_dict["exp_name"][0]
del arg_dict["exp_name"]
else:
exp_name = "cmd_" + cmd
# Construct and execute the experiment grid.
eg = ExperimentGrid(name=exp_name)
for k, v in arg_dict.items():
eg.add(k, v)
eg.run(algo, **run_kwargs)
if __name__ == "__main__":
cmd = sys.argv[1]
valid_algos = [
"vanilla",
"natural",
"trpo",
"a2c",
"ppo",
"acktr",
"a2c_kfac",
"ddpg",
"td3",
"sac",
]
valid_utils = ["viskit/frontend", "plot", "sim_policy", "record_policy"]
valid_cmds = valid_algos + valid_utils
assert (
cmd in valid_cmds
), "Select an algorithm or utility which is implemented in proj."
if cmd in valid_algos:
args = sys.argv[2:]
parse_and_execute_grid_search(cmd, args)
elif cmd in valid_utils:
# Execute the correct utility file.
runfile = osp.join(osp.abspath(osp.dirname(__file__)), "utils", cmd + ".py")
args = [sys.executable if sys.executable else "python", runfile] + sys.argv[2:]
subprocess.check_call(args, env=os.environ)
| [
"[email protected]"
] | |
d5c530b751dafafc0517165d510d97d4829e610b | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02860/s022291938.py | d07ef112fa467f50552ffd2851692df5d30addee | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 149 | py | N = int(input())
S = str(input())
flag = False
half = (N+1) // 2
if S[:half] == S[half:N]:
flag = True
if flag:
print("Yes")
else:
print("No") | [
"[email protected]"
] | |
f1d0157b2ab380f994197bcf15eb45900e508175 | db9ff8accaa4d8d4a96d3f9122c0fdc5e83ea2a5 | /test/test_inventory_location_full.py | 45ffb25097659f31398e3148e7d153ea0d03eafd | [] | no_license | agtt/ebay-openapi-inventory | 4754cdc8b6765acdb34f6b8f89b017ccbc6b1d2b | d990c26f16e811431892ac6401c73c4599c2d414 | refs/heads/master | 2023-06-17T10:53:43.204075 | 2021-07-14T18:32:38 | 2021-07-14T18:32:38 | 386,039,734 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,295 | py | """
Inventory API
The Inventory API is used to create and manage inventory, and then to publish and manage this inventory on an eBay marketplace. There are also methods in this API that will convert eligible, active eBay listings into the Inventory API model. # noqa: E501
The version of the OpenAPI document: 1.13.0
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import openapi_client
from openapi_client.model.location_details import LocationDetails
from openapi_client.model.operating_hours import OperatingHours
from openapi_client.model.special_hours import SpecialHours
globals()['LocationDetails'] = LocationDetails
globals()['OperatingHours'] = OperatingHours
globals()['SpecialHours'] = SpecialHours
from openapi_client.model.inventory_location_full import InventoryLocationFull
class TestInventoryLocationFull(unittest.TestCase):
"""InventoryLocationFull unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testInventoryLocationFull(self):
"""Test InventoryLocationFull"""
# FIXME: construct object with mandatory attributes with example values
# model = InventoryLocationFull() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
b2f480134ffe6ada98a4401e94a2dc14f3d4157a | 41a20700b5bb351d20562ac23ec4db06bc96f0d7 | /src/plum/loss_functions/__init__.py | 92abc2bd7b2fad62b77b73fb46fd67551b4aeae5 | [] | no_license | kedz/noiseylg | ee0c54634767e8d3789b4ffb93727988c29c6979 | 17266e1a41e33aecb95dc1c3aca68f6bccee86d5 | refs/heads/master | 2020-07-30T11:22:08.351759 | 2019-10-30T21:33:11 | 2019-10-30T21:33:11 | 210,212,253 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 51 | py | from .class_cross_entropy import ClassCrossEntropy
| [
"[email protected]"
] | |
aeeeb349ebfd16e04ca2e0194f5292de498ea198 | bb465b891e5bd14b2be2c6ae008417106cc1d9bb | /pycozmo/audiokinetic/soundbanksinfo.py | bdebf47c9b23d44c2f5aa0606371bf4afe2ea978 | [
"Apache-2.0",
"MIT"
] | permissive | ReidsTech/pycozmo | 4393b157e7a8fba13bc3452226ec45cac5c5b80b | 1b6dcd9b869a3784f1d8b02e820bb033f95fd13a | refs/heads/master | 2023-02-10T22:45:43.789691 | 2020-11-12T14:39:52 | 2020-11-12T14:39:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,733 | py | """
AudioKinetic WWise SoundbanksInfo.xml representation and reading.
See assets/cozmo_resources/sound/SoundbanksInfo.xml
"""
from typing import Dict, Any, Union, TextIO
import xml.etree.ElementTree as et # noqa
from . import exception
__all__ = [
"EventInfo",
"FileInfo",
"SoundBankInfo",
"load_soundbanksinfo",
]
class EventInfo:
""" Event representation in SoundbanksInfo.xml . """
__slots__ = [
"soundbank_id",
"id",
"name",
"object_path",
]
def __init__(self, soundbank_id: int, event_id: int, name: str, object_path: str):
self.soundbank_id = int(soundbank_id)
self.id = int(event_id)
self.name = str(name)
self.object_path = str(object_path)
class FileInfo:
""" File representation in SoundbanksInfo.xml . """
__slots__ = [
"soundbank_id",
"id",
"name",
"path",
"embedded",
"prefetch_size",
]
def __init__(self, soundbank_id: int, file_id: int, name: str, path: str, embedded: bool, prefetch_size: int):
self.soundbank_id = int(soundbank_id)
self.id = int(file_id)
self.name = str(name)
self.path = str(path)
self.embedded = bool(embedded)
self.prefetch_size = int(prefetch_size)
def __eq__(self, other: "FileInfo") -> bool:
res = True
res = res and self.soundbank_id == other.soundbank_id
res = res and self.id == other.id
res = res and self.name == other.name
# There are many files that are both embedded and streamed.
# res = res and self.embedded == other.embedded
# res = res and self.prefetch_size == other.prefetch_size
return res
class SoundBankInfo:
""" SoundBank representation in SoundbanksInfo.xml . """
__slots__ = [
"id",
"name",
"path",
"language",
"object_path",
]
def __init__(self, soundbank_id: int, name: str, path: str, language: str, object_path: str):
self.id = int(soundbank_id)
self.name = str(name)
self.path = str(path)
self.language = str(language)
self.object_path = str(object_path)
def load_soundbanksinfo(fspec: Union[str, TextIO]) -> Dict[int, Any]:
""" Load SoundbanksInfo.xml and return a dictionary of parsed Info objects. """
try:
tree = et.parse(fspec)
except et.ParseError as e:
raise exception.AudioKineticFormatError("Failed to parse SoundbanksInfo file.") from e
root = tree.getroot()
# Load StreamedFiles.
streamed_files = {}
for file in root.findall("./StreamedFiles/File"):
file_id = int(file.get("Id"))
assert file_id not in streamed_files
streamed_files[file_id] = {
"id": file_id,
"language": file.get("Language"),
"name": file.find("ShortName").text,
"path": file.find("Path").text,
}
# Load SoundBanks
objects = {}
for soundbank_node in root.findall("./SoundBanks/SoundBank"):
# Create SoundBankInfo object.
soundbank_id = int(soundbank_node.get("Id"))
language = soundbank_node.get("Language")
soundbank = SoundBankInfo(
soundbank_id,
soundbank_node.find("ShortName").text,
soundbank_node.find("Path").text,
language,
soundbank_node.find("ObjectPath").text)
assert soundbank_id not in objects
objects[soundbank_id] = soundbank
# Create EventInfo objects.
events = soundbank_node.findall("./IncludedEvents/Event")
for event_node in events:
event_id = int(event_node.get("Id"))
event = EventInfo(
soundbank_id,
event_id,
event_node.get("Name"),
event_node.get("ObjectPath"))
assert event_id not in objects
objects[event_id] = event
# Create FileInfo objects for streamed files.
files = soundbank_node.findall("./ReferencedStreamedFiles/File")
for file_node in files:
file_id = int(file_node.get("Id"))
streamed_file = streamed_files[file_id]
# The file and SoundBank languages may differ.
# assert streamed_file["language"] == language
file = FileInfo(
soundbank_id,
file_id,
streamed_file["name"],
streamed_file["path"],
False,
-1)
assert file_id not in objects
objects[file_id] = file
# Create FileInfo objects for embedded files.
files = soundbank_node.findall("./IncludedMemoryFiles/File")
for file_node in files:
file_id = int(file_node.get("Id"))
# The file and SoundBank languages may differ.
# assert file_node.get("Language") == language
prefetch_size_node = file_node.find("PrefetchSize")
prefetch_size = int(prefetch_size_node.text) if prefetch_size_node is not None else -1
file = FileInfo(
soundbank_id,
file_id,
file_node.find("ShortName").text,
file_node.find("Path").text,
True,
prefetch_size)
# assert file_id not in objects
if file_id in objects:
# Many files exist externally and as a "prefetched" embedded file that is truncated.
assert file == objects[file_id]
if not file.embedded:
objects[file_id] = file
else:
objects[file_id] = file
return objects
| [
"[email protected]"
] | |
0ed43fd97bc1aaa0a8dd69685412961f634b5081 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/twosum_20200716164442.py | d7b038af3c1ba2d0728e86c4a678d19d439a0ad0 | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 606 | py | def twoSum(nums,target):
# loop through the array
# add the two nums checking whethere they are equal to the target
# when you get one that is equal to the target append the indices...
index = []
for i in range(len(nums)-1):
for j in range(i+1,len(nums)):
if nums[i]+ nums[j] == target:
index.append(nums[i])
index.append(nums[j])
print(index)
def two(nums,S):
sums = []
check = {}
for i in range(len(nums)):
minus = S - nums[i]
if str(minus) not in
twoSum([2, 7, 11, 15],9) | [
"[email protected]"
] | |
250a4f07e7a707025c6c2dca69887c9dd8db0074 | 8048d04a51cd8b14b9de52c1ab4c0b670a8d8cc9 | /blog/models.py | bda80cb88c8d0a1f1c39f18ba8f6672b578e0796 | [] | no_license | earvingemenez/myblog | 2520bb08278272cdb1d3916b07c1b5167d6f5245 | 18b560efc0c0e20ecca0d0383b727785d713f776 | refs/heads/master | 2021-01-19T22:43:21.359989 | 2017-04-20T10:14:37 | 2017-04-20T10:14:37 | 88,848,933 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 299 | py | from __future__ import unicode_literals
from django.db import models
class Blog(models.Model):
title = models.CharField(max_length=250)
content = models.TextField()
date_created = models.DateTimeField(auto_now_add=True)
def __str__(self):
return "{}".format(self.title) | [
"[email protected]"
] | |
05a2da3536237a06814e065da6da7f82fc80af50 | 654f400751dfb180a937e0f18f6b722119a5b4f1 | /zend_django/apps.py | d51e43ee375699520963436ed7f099816a53f9e8 | [] | no_license | imagilex/tereapps | fae8bcb18ad4276f09a6ef6887d0c685c7a5522a | 51d4da8dab1d184cb7dcfe144ac8d2405a179028 | refs/heads/main | 2023-07-04T12:19:32.943411 | 2021-08-10T19:41:00 | 2021-08-10T19:41:00 | 343,847,920 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 96 | py | from django.apps import AppConfig
class ZendDjangoConfig(AppConfig):
name = 'zend_django'
| [
"[email protected]"
] | |
f168470e59ea4a92c5b6627fba06f7eef60ecdb4 | 09cead98874a64d55b9e5c84b369d3523c890442 | /sj200116_python2/py200423/quiz6_leon.py | e7bc492a754e0f420cf6470c899a5891e1959202 | [] | no_license | edu-athensoft/stem1401python_student | f12b404d749286036a090e941c0268381ce558f8 | baad017d4cef2994855b008a756758d7b5e119ec | refs/heads/master | 2021-08-29T15:01:45.875136 | 2021-08-24T23:03:51 | 2021-08-24T23:03:51 | 210,029,080 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 56 | py | print(1, 2, 3, 4)
print(5, 6, 7, 8)
print(9, 10, 11, 12) | [
"[email protected]"
] | |
71261c496f40cbff4d353ee2197fd2cd73018ab6 | df8b2ec756d5b3a45e04729df0fd7f6f27c5d5e5 | /backend/aliah_27163/urls.py | cc188fd335d41bd8368c51da5890b3964c42faf2 | [] | no_license | crowdbotics-apps/aliah-27163 | 06e0918b09861b9ff79ff0e00c569748f6985a43 | 6f32e337a0cf8d4fb247c45ae71b86740ff4f5f1 | refs/heads/master | 2023-05-04T10:57:07.929233 | 2021-05-21T15:52:18 | 2021-05-21T15:52:18 | 369,581,994 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,193 | py | """aliah_27163 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include, re_path
from django.views.generic.base import TemplateView
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("modules/", include("modules.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
]
admin.site.site_header = "Aliah"
admin.site.site_title = "Aliah Admin Portal"
admin.site.index_title = "Aliah Admin"
# swagger
api_info = openapi.Info(
title="Aliah API",
default_version="v1",
description="API documentation for Aliah App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
urlpatterns += [path("", TemplateView.as_view(template_name='index.html'))]
urlpatterns += [re_path(r"^(?:.*)/?$",
TemplateView.as_view(template_name='index.html'))]
| [
"[email protected]"
] | |
31583fc5b7d6736e1fd571f3c2c2951d935d9380 | 1825283527f5a479204708feeaf55f4ab6d1290b | /leetcode/python/248/original/248.strobogrammatic-number-iii.py | f15a21644c1adb868cb39614cde884e402e9deb3 | [] | no_license | frankieliu/problems | b82c61d3328ffcc1da2cbc95712563355f5d44b5 | 911c6622448a4be041834bcab25051dd0f9209b2 | refs/heads/master | 2023-01-06T14:41:58.044871 | 2019-11-24T03:47:22 | 2019-11-24T03:47:22 | 115,065,956 | 1 | 0 | null | 2023-01-04T07:25:52 | 2017-12-22T02:06:57 | HTML | UTF-8 | Python | false | false | 837 | py | #
# @lc app=leetcode id=248 lang=python3
#
# [248] Strobogrammatic Number III
#
# https://leetcode.com/problems/strobogrammatic-number-iii/description/
#
# algorithms
# Hard (35.88%)
# Total Accepted: 18.6K
# Total Submissions: 51.7K
# Testcase Example: '"50"\n"100"'
#
# A strobogrammatic number is a number that looks the same when rotated 180
# degrees (looked at upside down).
#
# Write a function to count the total strobogrammatic numbers that exist in the
# range of low <= num <= high.
#
# Example:
#
#
# Input: low = "50", high = "100"
# Output: 3
# Explanation: 69, 88, and 96 are three strobogrammatic numbers.
#
# Note:
# Because the range might be a large number, the low and high numbers are
# represented as string.
#
#
class Solution:
def strobogrammaticInRange(self, low: str, high: str) -> int:
| [
"[email protected]"
] | |
a508f7fff35fe197202d0930f5a4a128153dafe8 | 5d1c43bb4881039f198eedcee2ceb101b406e0a0 | /Django/myvenv/Scripts/rst2pseudoxml.py | 14b4d31f246fab38616c63e1ebada18b21f34d47 | [] | no_license | MunSeoHee/Likelion_Gachon_2020 | 46155b1686a245a59c5664f7726ac754b7079e4b | e0e48845fdb0e4aa2365e7c47e29880a27f0f261 | refs/heads/master | 2021-04-10T09:51:06.618980 | 2020-12-07T10:06:43 | 2020-12-07T10:06:43 | 248,927,668 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 665 | py | #!c:\users\munseohee\desktop\likelion_gachon_2020\django\myvenv\scripts\python.exe
# $Id: rst2pseudoxml.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: David Goodger <[email protected]>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing pseudo-XML.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates pseudo-XML from standalone reStructuredText '
'sources (for testing purposes). ' + default_description)
publish_cmdline(description=description)
| [
"[email protected]"
] | |
36cada529703f18157356f5a4a204b09e7a73f74 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/built-in/cv/detection/SSD_for_PyTorch/configs/faster_rcnn/faster_rcnn_r50_fpn_bounded_iou_1x_coco.py | 7993e3d583f8b6a8348d46e7ed63642a7c1310ba | [
"Apache-2.0",
"GPL-1.0-or-later",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 798 | py | # Copyright 2022 Huawei Technologies Co., Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
_base_ = './faster_rcnn_r50_fpn_1x_coco.py'
model = dict(
roi_head=dict(
bbox_head=dict(
reg_decoded_bbox=True,
loss_bbox=dict(type='BoundedIoULoss', loss_weight=10.0))))
| [
"[email protected]"
] | |
0358c0a7d390749d6a438898355cb946deba8891 | 6b9b032a5516c8d7dbb26deeb1b189022f8f9411 | /LeetCode/math/326.3的幂.py | c748ea5a012df0e8d72d3e38e176aa8eefb1eadb | [] | no_license | mrmenand/Py_transaction | 84db99a0010ae90f43fba6b737d7035e48af55fb | 7e82422c84ad699805cc12568b8d3d969f66a419 | refs/heads/master | 2021-07-13T21:15:03.714689 | 2020-06-21T11:49:31 | 2020-06-21T11:49:31 | 176,281,954 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 163 | py | # 326. 3的幂
class Solution:
def isPowerOfThree(self, n: int) -> bool:
ret = 1
while ret < n:
ret *= 3
return ret == n
| [
"[email protected]"
] | |
5cdc9a19af934f9266f1825c6652b217388655ed | 111b4a8e40afb25fe9c906e89b3d31ccde18447b | /07b - Iteraties-WhileLus/Kassa.py | 4a0ca6b9223b11b1bd5165e99b762bf2937f5301 | [] | no_license | astilleman/Informatica5 | 722276619b5c2a80e03ad97a6c67bdb25f12bda2 | 9cbd82055c621c25ec372547c179bcf045f6629f | refs/heads/master | 2020-03-28T03:19:12.206052 | 2019-05-16T08:05:54 | 2019-05-16T08:05:54 | 147,637,244 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 252 | py | #bewerking
totale_prijs = 0
prijs = float(input('Prijs van het product: '))
while prijs > 0:
totale_prijs += prijs
prijs = float(input('Prijs van het product: '))
mes = 'De totale prijs is € {:.2f}'.format(totale_prijs)
#uitvoer
print(mes)
| [
"[email protected]"
] | |
e96a7653bd522fe7c50ce48d1ca0ee14524ecf15 | 24a52b2b363417a8bdfeb8f669ee53b7ee19f4d6 | /playa/common/__init__.py | d697428efddf43e7d8cbbcff969cc8c236aedfd8 | [
"Apache-2.0"
] | permissive | isabella232/playa | e203997e2660babe333d4915f294530cde57ccb0 | a93335e592aa596645a60497a7c030a36ae7fec2 | refs/heads/master | 2023-03-18T23:51:35.577746 | 2011-07-15T01:07:53 | 2011-07-15T01:07:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 125 | py | """
playa.common
~~~~~~~~~~~~
:copyright: (c) 2011 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
| [
"[email protected]"
] | |
12cb6beaf4312399b793a615b7d33422092fb159 | 5f953682963317760135a7b39e655e11c962ee6d | /grabbit/_version.py | b173b93e4627f6fa464e0e3f4a9ebac6164db311 | [
"MIT"
] | permissive | duncanmmacleod/grabbit | 3651db5e24ed23bf72a1d12e44d03d9d44860b9b | 83ff93df36019eaaee9d4e31f816a518e46cae07 | refs/heads/master | 2020-04-22T04:27:31.516594 | 2019-01-11T16:27:08 | 2019-01-11T16:27:08 | 170,124,285 | 0 | 0 | null | 2019-02-11T12:22:15 | 2019-02-11T12:22:14 | null | UTF-8 | Python | false | false | 18,444 | py |
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.18 (https://github.com/warner/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = ""
cfg.parentdir_prefix = ""
cfg.versionfile_source = "grabbit/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Decorator to mark a method as the handler for a particular VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip()
if sys.version_info[0] >= 3:
stdout = stdout.decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Eexceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}
| [
"[email protected]"
] | |
44c1f957f92750ca1bb1fc159f552e8a11dad6cf | 3eb7ab0621e33aeb1db2e2b67d4a5f3b6d041d4f | /sim_ws_new/build/f110-fall2018-skeletons/labs/wall_following/catkin_generated/pkg.installspace.context.pc.py | f822c74f901f9a33f91d75679bc27565bb3026ea | [] | no_license | kkamons/Capstone_RacecarJ | 4ec192c5b569c540bd8b614257c580d946077f3a | 1fbc6aea625014c39dfe34d52bac5fef80592ee9 | refs/heads/master | 2020-12-14T03:49:27.323060 | 2020-03-11T22:35:47 | 2020-03-11T22:35:47 | 234,626,725 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 383 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "wall_following"
PROJECT_SPACE_DIR = "/home/nvidia/sandbox/sim_ws/install"
PROJECT_VERSION = "0.0.0"
| [
"[email protected]"
] | |
9aba883f1836f9fbb65bad41fdd7395fb576f7b5 | 37fef592f365194c28579f95abd222cc4e1243ae | /streamlit/venv/venv/lib/python3.7/site-packages/plotly/graph_objs/contourcarpet/contours/_labelfont.py | 923e265437086913d339a7c5738604017a8dc29f | [] | no_license | edimaudo/Python-projects | be61e0d3fff63fb7bd00513dbf1401e2c1822cfb | 85d54badf82a0b653587a02e99daf389df62e012 | refs/heads/master | 2023-04-07T03:26:23.259959 | 2023-03-24T12:03:03 | 2023-03-24T12:03:03 | 72,611,253 | 4 | 3 | null | 2022-10-31T18:10:41 | 2016-11-02T06:37:17 | null | UTF-8 | Python | false | false | 8,634 | py | from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Labelfont(_BaseTraceHierarchyType):
# class properties
# --------------------
_parent_path_str = "contourcarpet.contours"
_path_str = "contourcarpet.contours.labelfont"
_valid_props = {"color", "family", "size"}
# color
# -----
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, rebeccapurple, saddlebrown, salmon,
sandybrown, seagreen, seashell, sienna, silver,
skyblue, slateblue, slategray, slategrey, snow,
springgreen, steelblue, tan, teal, thistle, tomato,
turquoise, violet, wheat, white, whitesmoke,
yellow, yellowgreen
Returns
-------
str
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
# family
# ------
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser will only be able to apply a font if
it is available on the system which it operates. Provide
multiple font families, separated by commas, to indicate the
preference in which to apply fonts if they aren't available on
the system. The Chart Studio Cloud (at https://chart-
studio.plotly.com or on-premise) generates images on a server,
where only a select number of fonts are installed and
supported. These include "Arial", "Balto", "Courier New",
"Droid Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
The 'family' property is a string and must be specified as:
- A non-empty string
Returns
-------
str
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
# size
# ----
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
Returns
-------
int|float
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
size
"""
def __init__(self, arg=None, color=None, family=None, size=None, **kwargs):
"""
Construct a new Labelfont object
Sets the font used for labeling the contour levels. The default
color comes from the lines, if shown. The default family and
size come from `layout.font`.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of :class:`plotly.graph_objs.contourcarpet.
contours.Labelfont`
color
family
HTML font family - the typeface that will be applied by
the web browser. The web browser will only be able to
apply a font if it is available on the system which it
operates. Provide multiple font families, separated by
commas, to indicate the preference in which to apply
fonts if they aren't available on the system. The Chart
Studio Cloud (at https://chart-studio.plotly.com or on-
premise) generates images on a server, where only a
select number of fonts are installed and supported.
These include "Arial", "Balto", "Courier New", "Droid
Sans",, "Droid Serif", "Droid Sans Mono", "Gravitas
One", "Old Standard TT", "Open Sans", "Overpass", "PT
Sans Narrow", "Raleway", "Times New Roman".
size
Returns
-------
Labelfont
"""
super(Labelfont, self).__init__("labelfont")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.contourcarpet.contours.Labelfont
constructor must be a dict or
an instance of :class:`plotly.graph_objs.contourcarpet.contours.Labelfont`"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
# Populate data dict with properties
# ----------------------------------
_v = arg.pop("color", None)
_v = color if color is not None else _v
if _v is not None:
self["color"] = _v
_v = arg.pop("family", None)
_v = family if family is not None else _v
if _v is not None:
self["family"] = _v
_v = arg.pop("size", None)
_v = size if size is not None else _v
if _v is not None:
self["size"] = _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
| [
"[email protected]"
] | |
a77d9836bd428d402dfa9f88a23deaf8f561556e | 067f015c6b70c18c78dc9c6976c10d77b2f6bb03 | /tests/test_extension.py | ef0f38593557e3ffde9f7323b34d1e17c8252893 | [
"Apache-2.0"
] | permissive | SolarisYan/jupyterlab_autoversion | d146a99f8e975def441a2a22ceb89c27f1510123 | f16a1f0833cd278df390e7cc3c72aa4569dd4b8e | refs/heads/master | 2020-04-26T12:17:01.191856 | 2019-02-14T14:07:01 | 2019-02-14T14:07:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 737 | py | # for Coverage
from mock import patch, MagicMock
from jupyterlab_autoversion.extension import load_jupyter_server_extension
class TestExtension:
def setup(self):
pass
# setup() before each test method
def teardown(self):
pass
# teardown() after each test method
@classmethod
def setup_class(cls):
pass
# setup_class() before any methods in this class
@classmethod
def teardown_class(cls):
pass
# teardown_class() after any methods in this class
def test_load_jupyter_server_extension(self):
m = MagicMock()
m.web_app.settings = {}
m.web_app.settings['base_url'] = '/test'
load_jupyter_server_extension(m)
| [
"[email protected]"
] | |
59179e3611dfd909c95ac81a1ccb88b8d87bd48d | aec6d287856a06e8b366216b5562b76d46978dc6 | /specialelaven.py | d85f1f38564a80b40e58bd012bb94922693c77aa | [] | no_license | subbuinti/python_practice | b724a4e562b63c66d23b67093219d5a4bf1e1f0f | ffdab933586609b3760681effccdfead0b90033f | refs/heads/master | 2023-07-12T17:47:00.773554 | 2021-08-31T14:26:51 | 2021-08-31T14:26:51 | 395,187,797 | 0 | 0 | null | 2021-08-31T14:22:56 | 2021-08-12T03:54:58 | Python | UTF-8 | Python | false | false | 219 | py | number = int(input())
is_multiply_by_11 = ((number % 11) == 0)
is_multiple_times_by_11 =((number % 11) == 1)
if is_multiply_by_11 or is_multiple_times_by_11:
print("Special Eleven")
else:
print("Normal Number") | [
"[email protected]"
] | |
e3fbaa41424466c1205d5c08baabfd0e8be8d179 | 45467e07e77131f631d0865046dcc4d18f483601 | /src/Codeforces/manthan_codefest18/B.py | d3ed1ae6fd8350c021bcad1b6d526a63371793da | [] | no_license | bvsbrk/Algos | 98374e094bd3811579276d25a82bbf2c0f046d96 | cbb18bce92054d57c0e825069ef7f2120a9cc622 | refs/heads/master | 2021-09-25T10:01:59.323857 | 2018-10-20T16:07:07 | 2018-10-20T16:07:07 | 98,708,839 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 478 | py | if __name__ == '__main__':
n, s = [int(__) for __ in input().strip().split()]
arr = [int(__) for __ in input().strip().split()]
arr.sort()
mid = n // 2
ans = 0
if s == arr[mid]:
print(ans)
elif s > arr[mid]:
while mid < n and arr[mid] < s:
ans += s - arr[mid]
mid += 1
print(ans)
else:
while mid >= 0 and arr[mid] > s:
ans += arr[mid] - s
mid -= 1
print(ans) | [
"[email protected]"
] | |
0e2d85e036627e1e34c67e17618b48e652a18d05 | e3a6939a2faaf9bfd7ed21e7a877d2bd2125b130 | /projects/migrations/0006_alter_portfolio_main_image.py | 802a68ae6f98fe2b1e907debaaf8c9c2427ecd06 | [] | no_license | furkalokbu/PortfolioTime | b133a64ec1472a12b878b87cf8e0706fdf39a16a | c4233930cd0988a80c65edf2079d4a560987d225 | refs/heads/main | 2023-04-29T21:24:30.629206 | 2021-05-14T15:49:14 | 2021-05-14T15:49:14 | 367,320,283 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 624 | py | # Generated by Django 3.2.2 on 2021-05-13 18:23
from django.db import migrations, models
import djlime.utils
class Migration(migrations.Migration):
dependencies = [
("projects", "0005_portfolio_main_image"),
]
operations = [
migrations.AlterField(
model_name="portfolio",
name="main_image",
field=models.ImageField(
blank=True,
help_text="recommended size 1000x665",
null=True,
upload_to=djlime.utils.get_file_path,
verbose_name="main image",
),
),
]
| [
"[email protected]"
] | |
c11b62bf5d9810407247cd30ec391cedacc33a4b | 8780bc7f252f14ff5406ce965733c099034920b7 | /pyCode/python3.6网络爬虫实战/16-使用Selenium模拟浏览器抓取淘宝商品美食信息/16-使用Selenium模拟浏览器抓取淘宝商品美食信息.py | 31abbc0c226bf81b218662c13dc366640a3d2005 | [] | no_license | 13661892653/workspace | 5e4e458d31b9355c67d67ba7d9faccbcc1ac9f6b | 17960becabb3b4f0fc30009c71a11c4f7a5f8330 | refs/heads/master | 2020-12-24T20:00:15.541432 | 2018-08-14T13:56:15 | 2018-08-14T13:56:15 | 86,225,975 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,070 | py | #coding=utf-8
#Version:Python-3.6.0
from selenium import webdriver
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from pyquery import PyQuery
import pymongo
import re
from config import *
browser = webdriver.Chrome()
client=pymongo.MongoClient(MONGO_URL)
db=client[MONGO_DB]
def search():
try:
browser.get('https://www.taobao.com/')
input = WebDriverWait(browser, 10).until(
EC.presence_of_element_located((By.CSS_SELECTOR, "#q")))
submit=WebDriverWait(browser,10).until(EC.element_to_be_clickable((By.CSS_SELECTOR,'#J_TSearchForm > div.search-button > button')))
input.send_keys('婴儿用品')
submit.click()
total=WebDriverWait(browser, 10).until(EC.presence_of_element_located((By.CSS_SELECTOR,'#mainsrp-pager > div > div > div > div.total')))
get_products()
return total.text
except TimeoutException:
print('超时啦.............',TimeoutException)
return search()
def next_page(page_number):
try:
input = WebDriverWait(browser, 10).until(
EC.presence_of_element_located((By.CSS_SELECTOR, "#mainsrp-pager > div > div > div > div.form > input")))
submit = WebDriverWait(browser, 10).until(
EC.element_to_be_clickable((By.CSS_SELECTOR, '#mainsrp-pager > div > div > div > div.form > span.btn.J_Submit')))
input.clear()
input.send_keys(page_number)
submit.click()
WebDriverWait(browser, 10).until(
EC.text_to_be_present_in_element((By.CSS_SELECTOR, '#mainsrp-pager > div > div > div > ul > li.item.active > span'),str(page_number))
)
get_products()
except TimeoutException:
print('超时啦.............')
next_page(page_number)
def get_products():
print("正在获取详情信息...")
WebDriverWait(browser, 10).until(
EC.presence_of_element_located((By.CSS_SELECTOR,'#mainsrp-itemlist .items .item'))
)
html=browser.page_source
doc=PyQuery(html)
items=doc('#mainsrp-itemlist .items .item').items()
for item in items:
product={
'首图': item.find('.pic .img').attr('src'),
'价格': item.find('.price').text().replace('\n',' '),
'购买人数': item.find('.deal-cnt').text()[0:-3],
'宝贝标题': item.find('.title').text().replace('\n',' '),
'店铺名称': item.find('.shop').text(),
'店铺位置': item.find('.location').text()
}
print(product)
save_to_mongo(product)
def save_to_mongo(result):
if result:
if db[MONGO_TABLE].insert(result):
print('插入成功')
return True
return False
def main():
total=search()
total=int(re.compile('(\d+)').search(total).group(1))
for i in range(2,total+1):
next_page(i)
if __name__=='__main__':
main() | [
"[email protected]"
] | |
734c3ae12014f9b1251ed08b02aa2812c6836d5a | f159aeec3408fe36a9376c50ebb42a9174d89959 | /1094.Car-Pooling.py | ca906b5c2ebfe85078f842d56167aa7405bbd8e4 | [
"MIT"
] | permissive | mickey0524/leetcode | 83b2d11ab226fad5da7198bb37eeedcd8d17635a | fc5b1744af7be93f4dd01d6ad58d2bd12f7ed33f | refs/heads/master | 2023-09-04T00:01:13.138858 | 2023-08-27T07:43:53 | 2023-08-27T07:43:53 | 140,945,128 | 27 | 9 | null | null | null | null | UTF-8 | Python | false | false | 698 | py | # https://leetcode.com/problems/car-pooling/
# Easy (56.22%)
# Total Accepted: 3,367
# Total Submissions: 5,989
class Solution(object):
def carPooling(self, trips, capacity):
"""
:type trips: List[List[int]]
:type capacity: int
:rtype: bool
"""
trips.sort(key=lambda x: x[1])
q = []
for t in trips:
capacity -= t[0]
q += t,
tmp = []
for q_t in q:
if q_t[2] <= t[1]:
capacity += q_t[0]
else:
tmp += q_t,
if capacity < 0:
return False
q = tmp
return True
| [
"[email protected]"
] | |
ec09c23ed955776b9848aa5c94f2f4b1c92574bf | 3f7e2c4e9a4dcee8f788150e1ead8fea1ea29b9d | /src/spaceone/statistics/service/schedule_service.py | 90d817258de4d4e8093583d0e0619f546f8f4ec0 | [
"Apache-2.0"
] | permissive | choonho/statistics | 8fd5d5cb88c7b1d028cc1074cbafebfcd38e1371 | 31fbae2d0772a2e8b717ac12c8de9edd9d8f1734 | refs/heads/master | 2023-03-04T04:16:21.121928 | 2020-08-18T12:54:26 | 2020-08-18T12:54:26 | 289,881,803 | 0 | 0 | null | 2020-08-24T09:17:07 | 2020-08-24T09:17:07 | null | UTF-8 | Python | false | false | 7,013 | py | import logging
import copy
from spaceone.core.service import *
from spaceone.statistics.error import *
from spaceone.statistics.manager.resource_manager import ResourceManager
from spaceone.statistics.manager.schedule_manager import ScheduleManager
_LOGGER = logging.getLogger(__name__)
@authentication_handler
@authorization_handler
@event_handler
class ScheduleService(BaseService):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.resource_mgr: ResourceManager = self.locator.get_manager('ResourceManager')
self.schedule_mgr: ScheduleManager = self.locator.get_manager('ScheduleManager')
@transaction
@check_required(['topic', 'options', 'schedule', 'domain_id'])
def add(self, params):
"""Add schedule for statistics
Args:
params (dict): {
'topic': 'str',
'options': 'dict',
'schedule': 'dict',
'tags': 'dict',
'domain_id': 'str'
}
Returns:
schedule_vo
"""
domain_id = params['domain_id']
options = copy.deepcopy(params['options'])
schedule = params['schedule']
self._check_schedule(schedule)
self._verify_query_option(options, domain_id)
return self.schedule_mgr.add_schedule(params)
@transaction
@check_required(['schedule_id', 'domain_id'])
def update(self, params):
"""Update schedule
Args:
params (dict): {
'schedule_id': 'str',
'schedule': 'dict',
'tags': 'dict',
'domain_id': 'str'
}
Returns:
schedule_vo
"""
schedule = params.get('schedule')
self._check_schedule(schedule)
return self.schedule_mgr.update_schedule(params)
@transaction
@check_required(['schedule_id', 'domain_id'])
def enable(self, params):
"""Enable schedule
Args:
params (dict): {
'schedule_id': 'str',
'domain_id': 'str'
}
Returns:
schedule_vo
"""
domain_id = params['domain_id']
schedule_id = params['schedule_id']
schedule_vo = self.schedule_mgr.get_schedule(schedule_id, domain_id)
return self.schedule_mgr.update_schedule_by_vo({'state': 'ENABLED'}, schedule_vo)
@transaction
@check_required(['schedule_id', 'domain_id'])
def disable(self, params):
"""Disable schedule
Args:
params (dict): {
'schedule_id': 'str',
'domain_id': 'str'
}
Returns:
schedule_vo
"""
domain_id = params['domain_id']
schedule_id = params['schedule_id']
schedule_vo = self.schedule_mgr.get_schedule(schedule_id, domain_id)
return self.schedule_mgr.update_schedule_by_vo({'state': 'DISABLED'}, schedule_vo)
@transaction
@check_required(['schedule_id', 'domain_id'])
def delete(self, params):
"""Delete schedule
Args:
params (dict): {
'schedule_id': 'str',
'domain_id': 'str'
}
Returns:
None
"""
self.schedule_mgr.delete_schedule(params['schedule_id'], params['domain_id'])
@transaction
@check_required(['schedule_id', 'domain_id'])
def get(self, params):
"""Get schedule
Args:
params (dict): {
'schedule_id': 'str',
'domain_id': 'str',
'only': 'list'
}
Returns:
schedule_vo
"""
return self.schedule_mgr.get_schedule(params['schedule_id'], params['domain_id'], params.get('only'))
@transaction
@check_required(['domain_id'])
@append_query_filter(['schedule_id', 'topic', 'state', 'data_source_id', 'resource_type', 'domain_id'])
@append_keyword_filter(['schedule_id', 'topic', 'resource_type'])
def list(self, params):
""" List schedules
Args:
params (dict): {
'schedule_id': 'str',
'topic': 'str',
'state': 'str',
'data_source_id': 'str',
'resource_type': 'str',
'domain_id': 'str',
'query': 'dict (spaceone.api.core.v1.Query)'
}
Returns:
schedule_vos (object)
total_count
"""
query = params.get('query', {})
return self.schedule_mgr.list_schedules(query)
@transaction
@check_required(['query', 'domain_id'])
@append_query_filter(['domain_id'])
def stat(self, params):
"""
Args:
params (dict): {
'domain_id': 'str',
'query': 'dict (spaceone.api.core.v1.StatisticsQuery)'
}
Returns:
values (list) : 'list of statistics data'
"""
query = params.get('query', {})
return self.schedule_mgr.stat_schedules(query)
@transaction
@append_query_filter([])
def list_domains(self, params):
""" This is used by Scheduler
Returns:
results (list)
total_count (int)
"""
mgr = self.locator.get_manager('ScheduleManager')
query = params.get('query', {})
result = mgr.list_domains(query)
return result
@staticmethod
def _check_schedule(schedule):
if schedule and len(schedule) > 1:
raise ERROR_SCHEDULE_OPTION()
@staticmethod
def _check_query_option(options):
if 'resource_type' not in options:
raise ERROR_REQUIRED_PARAMETER(key='option.resource_type')
if 'query' not in options:
raise ERROR_REQUIRED_PARAMETER(key='option.query')
def _verify_query_option(self, options, domain_id):
self._check_query_option(options)
resource_type = options['resource_type']
query = options['query']
distinct = query.get('distinct')
join = options.get('join', [])
formulas = options.get('formulas', [])
sort = query.get('sort')
page = query.get('page', {})
limit = query.get('limit')
has_join_or_formula = len(join) > 0 or len(formulas) > 0
if distinct:
if has_join_or_formula:
raise ERROR_STATISTICS_DISTINCT()
else:
if has_join_or_formula:
query['sort'] = None
query['page'] = None
query['limit'] = None
response = self.resource_mgr.stat(resource_type, query, domain_id)
if has_join_or_formula:
results = response.get('results', [])
self.resource_mgr.join_and_execute_formula(results, resource_type, query, join,
formulas, sort, page, limit, domain_id)
| [
"[email protected]"
] | |
6a59f5d9af6d2da2d4c3ff4fc689214c1ab15d8d | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/SRAP-WCM-MIB.py | 2e0dea32a85b2a42244420b2abca21ea9f2a02d6 | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 77,412 | py | #
# PySNMP MIB module SRAP-WCM-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/SRAP-WCM-MIB
# Produced by pysmi-0.3.4 at Wed May 1 15:10:33 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
a3Com, = mibBuilder.importSymbols("A3Com-products-MIB", "a3Com")
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, ConstraintsUnion, SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ConstraintsUnion", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
iso, IpAddress, Counter64, Integer32, ModuleIdentity, Gauge32, Unsigned32, ObjectIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, NotificationType, TimeTicks, MibIdentifier, Counter32 = mibBuilder.importSymbols("SNMPv2-SMI", "iso", "IpAddress", "Counter64", "Integer32", "ModuleIdentity", "Gauge32", "Unsigned32", "ObjectIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "NotificationType", "TimeTicks", "MibIdentifier", "Counter32")
DisplayString, TextualConvention, MacAddress = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention", "MacAddress")
srAPModules, srAPWCM = mibBuilder.importSymbols("SRAP-GLOBAL-REG", "srAPModules", "srAPWCM")
srAPWCMMIBModule = ModuleIdentity((1, 3, 6, 1, 4, 1, 43, 35, 1, 1, 5))
if mibBuilder.loadTexts: srAPWCMMIBModule.setLastUpdated('9905260000Z')
if mibBuilder.loadTexts: srAPWCMMIBModule.setOrganization('3Com')
if mibBuilder.loadTexts: srAPWCMMIBModule.setContactInfo(' Name: Yuli Hou Mail Stop: 4240 Address: 5400 Bayfront Plaza Zip: CA 95052 City: Santa Clara, Country: USA Phone: +001-408-326-1191 Fax: +001-408-326-6555 e-mail: [email protected]')
if mibBuilder.loadTexts: srAPWCMMIBModule.setDescription('The 3COM Wireless LAN Products Web Configuration Management MIB Module.')
wcmMIB = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1))
wcmConf = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 1))
apConfigMgmtGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 1, 1))
apPerformMgmtGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 1, 2))
apFaultMgmtGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 1, 3))
apSecurityMgmtGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 1, 4))
apEventGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 1, 5))
wcmObjs = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2))
apConfigMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1))
apManufactureInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 1))
apSystemConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 2))
apSNMPInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 3))
apRFConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 4))
apPerformMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2))
apRFStatistics = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1))
apEtherItfStat = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 2))
apRFItfStat = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 3))
apTrafficMatrix = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 4))
apFaultMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 3))
apSecurityMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4))
wcmEvents = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 3))
wcmEventsV2 = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 3, 0))
apModelnumber = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: apModelnumber.setStatus('current')
if mibBuilder.loadTexts: apModelnumber.setDescription('The Model Number of the ACCESS Point device.')
apSerialnumber = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: apSerialnumber.setStatus('current')
if mibBuilder.loadTexts: apSerialnumber.setDescription('The Serial Number of the ACCESS Point system.')
apMyMacAddr = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 1, 3), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apMyMacAddr.setStatus('current')
if mibBuilder.loadTexts: apMyMacAddr.setDescription('The MAC address of the ACCESS Point.')
apFirmwareVer = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: apFirmwareVer.setStatus('current')
if mibBuilder.loadTexts: apFirmwareVer.setDescription('The current version of the ACCESS Point firmware.')
apWebUiVer = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: apWebUiVer.setStatus('current')
if mibBuilder.loadTexts: apWebUiVer.setDescription('The current version of the ACCESS Point WEB Configration System.')
apMfgDate = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 1, 6), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: apMfgDate.setStatus('current')
if mibBuilder.loadTexts: apMfgDate.setDescription('The Date of this ACCESS Point was manufactured.')
apProductName = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 1, 7), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: apProductName.setStatus('current')
if mibBuilder.loadTexts: apProductName.setDescription('The Product Name of this ACCESS Point.')
apMfgName = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 1, 8), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: apMfgName.setStatus('current')
if mibBuilder.loadTexts: apMfgName.setDescription('The Manufacture Name of this ACCESS Point.')
apHardwareRev = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: apHardwareRev.setStatus('current')
if mibBuilder.loadTexts: apHardwareRev.setDescription('The Hardware Revision of this ACCESS Point.')
apDeviceName = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 2, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 256))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apDeviceName.setStatus('current')
if mibBuilder.loadTexts: apDeviceName.setDescription('An ASCII string to identify the Device Name of this ACCESS Point (up to 255 characters).')
apDeviceLoc = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 2, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 256))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apDeviceLoc.setStatus('current')
if mibBuilder.loadTexts: apDeviceLoc.setDescription('An ASCII string to identify the Location of this ACCESS Point (up to 255 characters).')
apHelpFilePath = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 2, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 256))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apHelpFilePath.setStatus('current')
if mibBuilder.loadTexts: apHelpFilePath.setDescription('An ASCII string to identify the Location of this ACCESS Point (up to 255 characters).')
apMyIPAddr = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 2, 4), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apMyIPAddr.setStatus('current')
if mibBuilder.loadTexts: apMyIPAddr.setDescription('The IP address for the Access Point. It can be set when apStaticIPAddrEnable is set to is enabled.')
apSubnetMask = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 2, 5), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apSubnetMask.setStatus('current')
if mibBuilder.loadTexts: apSubnetMask.setDescription('The subnet mask for the Access Point. It can be set when apStaticIPAddrEnable is set to enabled.')
apGatewayIPAddr = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 2, 6), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apGatewayIPAddr.setStatus('current')
if mibBuilder.loadTexts: apGatewayIPAddr.setDescription('The IP address of the gateway for the Access Point.')
apAdditionalGatewaysTableLength = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 2, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apAdditionalGatewaysTableLength.setStatus('current')
if mibBuilder.loadTexts: apAdditionalGatewaysTableLength.setDescription('The maximum number of entries in the Additional Gateways Table.')
apAdditionalGatewaysTable = MibTable((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 2, 8), )
if mibBuilder.loadTexts: apAdditionalGatewaysTable.setStatus('current')
if mibBuilder.loadTexts: apAdditionalGatewaysTable.setDescription('A table of information for additional Gateways. The current number of entries is 4(that mean aditional gateway is 3).')
apAdditionalGatewaysEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 2, 8, 1), ).setIndexNames((0, "SRAP-WCM-MIB", "additionalGatewaysIndex"))
if mibBuilder.loadTexts: apAdditionalGatewaysEntry.setStatus('current')
if mibBuilder.loadTexts: apAdditionalGatewaysEntry.setDescription('An entry in the table of information for additional gateways for this ACCESS Point. A row in this table cannot be created or deleted by SNMP operations on columns of the table.')
additionalGatewaysIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 2, 8, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 3)))
if mibBuilder.loadTexts: additionalGatewaysIndex.setStatus('current')
if mibBuilder.loadTexts: additionalGatewaysIndex.setDescription('Specific Gateway information Table index. Range (1..3).')
apAdditionalGatewaysIPAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 2, 8, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apAdditionalGatewaysIPAddr.setStatus('current')
if mibBuilder.loadTexts: apAdditionalGatewaysIPAddr.setDescription('The IP address of the Gateway used for this Access Point.')
apDHCPEnable = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 2, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apDHCPEnable.setStatus('current')
if mibBuilder.loadTexts: apDHCPEnable.setDescription('Enables/Disables Wireless DHCP mode for the Access Point. Default is disabled(2).')
apStaticIPAddrEnable = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 2, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apStaticIPAddrEnable.setStatus('current')
if mibBuilder.loadTexts: apStaticIPAddrEnable.setDescription("Enables/Disables Static IP of ACCESS Point . Default is disabled(2). Before issuing this command, set 'apMyIPAddr', 'apSubnetMask', and 'apGatewayIPAddr' to the proper values.")
apSNMPRequests = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 3, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apSNMPRequests.setStatus('current')
if mibBuilder.loadTexts: apSNMPRequests.setDescription('Indicates the number of SNMP requests to this SNMP Agent.')
apSNMPTraps = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 3, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apSNMPTraps.setStatus('current')
if mibBuilder.loadTexts: apSNMPTraps.setDescription('Indicates the number of Traps that were generated by this SNMP Agent.')
apAllTrapsEnable = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 3, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apAllTrapsEnable.setStatus('current')
if mibBuilder.loadTexts: apAllTrapsEnable.setDescription('Enables/Disables all SNMP traps. Default is enabled(1).')
apColdBootTrapEnable = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 3, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("disabled", 1), ("trapHost1", 2), ("trapHost2", 3), ("allTrapHosts", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apColdBootTrapEnable.setStatus('current')
if mibBuilder.loadTexts: apColdBootTrapEnable.setDescription('Enables/Disables ACCESS Point Cold Boot trap. Default is allTrapHosts(4). This is a generic-trap and the trap code is 0. When not disabled the trap could be directed to TrapHost1, TrapHost2 or all TrapHosts. The apAllTrapsEnable mib variable should be enabled for this trap to be issued.')
apAuthenFailureTrapEnable = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 3, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("disabled", 1), ("trapHost1", 2), ("trapHost2", 3), ("allTrapHosts", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apAuthenFailureTrapEnable.setStatus('current')
if mibBuilder.loadTexts: apAuthenFailureTrapEnable.setDescription('Enables/Disables authentication failure trap. Default is allTrapHosts(4). This is a generic-trap and the trap code is 4. When not disabled the trap could be directed to TrapHost1, TrapHost2 or all TrapHosts. The apAllTrapsEnable mib variable should be enabled for this trap to be issued.')
apRFTrapEnable = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 3, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("disabled", 1), ("trapHost1", 2), ("trapHost2", 3), ("allTrapHosts", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apRFTrapEnable.setStatus('current')
if mibBuilder.loadTexts: apRFTrapEnable.setDescription('Enables/Disables ACCESS Point RF driver startup trap. Default is allTrapHosts(4). This is an a3com enterprise-specific trap. When not disabled the trap could be directed to TrapHost1, TrapHost2 or all TrapHosts. The apAllTrapsEnable mib variable should be enabled for this trap to be issued.')
apWarmBootTrapEnable = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 3, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("disabled", 1), ("trapHost1", 2), ("trapHost2", 3), ("allTrapHosts", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apWarmBootTrapEnable.setStatus('current')
if mibBuilder.loadTexts: apWarmBootTrapEnable.setDescription('Enables/Disables Warm Boot trap. Default is allTrapHosts(4). This is a generic-trap and the trap code is 0. When not disabled the trap could be directed to TrapHost1, TrapHost2 or all TrapHosts. The apAllTrapsEnable mib variable should be enabled for this trap to be issued.')
apWCMaxAssocTrapEnable = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 3, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("disabled", 1), ("trapHost1", 2), ("trapHost2", 3), ("allTrapHosts", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apWCMaxAssocTrapEnable.setStatus('current')
if mibBuilder.loadTexts: apWCMaxAssocTrapEnable.setDescription('Enables/Disables apWCMaxAssoc Trap. Default is allTrapHosts(4). When not disabled the trap could be directed to TrapHost1, TrapHost2 or all TrapHosts. The apAllTrapsEnable mib variable should be enabled for this trap to be issued.')
apThresholdAssociatedEnable = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 3, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("disabled", 1), ("trapHost1", 2), ("trapHost2", 3), ("allTrapHosts", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apThresholdAssociatedEnable.setStatus('current')
if mibBuilder.loadTexts: apThresholdAssociatedEnable.setDescription('Enables/Disables apThresholdAssociated Trap. Default is allTrapHosts(4). When not disabled the trap could be directed to TrapHost1, TrapHost2 or all TrapHosts. The apAllTrapsEnable mib variable should be enabled for this trap to be issued.')
dslSecurityViolationEnable = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 3, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("disabled", 1), ("trapHost1", 2), ("trapHost2", 3), ("allTrapHosts", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dslSecurityViolationEnable.setStatus('current')
if mibBuilder.loadTexts: dslSecurityViolationEnable.setDescription('Enables/Disables dslSecurityViolation Trap. Default is allTrapHosts(4). When not disabled the trap could be directed to TrapHost1, TrapHost2 or all TrapHosts. The apAllTrapsEnable MIB variable should be enabled for this trap to be issued.')
apWlanServiceArea = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 4, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apWlanServiceArea.setStatus('current')
if mibBuilder.loadTexts: apWlanServiceArea.setDescription('The WLAN Service Area of the ACCESS Point.')
apCountryName = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 4, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: apCountryName.setStatus('current')
if mibBuilder.loadTexts: apCountryName.setDescription('The country for which the ACCESS Point radio is configured. It is set at the factory and may not be altered.')
apWEPAlgorithm = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 4, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))).clone(namedValues=NamedValues(("openSystem", 1), ("wep40bShareKey", 2), ("wep128bShareKey", 3), ("wep128bpasswordsessionKey", 4), ("wepEAPMD5Authenticationwithnoencryption", 5), ("wepEAPMD5Authenticationwith40bsharedkey", 6), ("wepEAPMD5Authenticationwith128bsharedkey", 7), ("wepEAPMD5Authenticationwith128bpasswordsessionkey", 8), ("wepEAPTLSAuthenticationwith40bsessionkey", 9), ("wepEAPTLSAuthenticationwith128bsessionkey", 10), ("wepSerialAuthenticationwith40bsessionkey", 11), ("wepSerialAuthenticationwith128bsessionkey", 12)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: apWEPAlgorithm.setStatus('current')
if mibBuilder.loadTexts: apWEPAlgorithm.setDescription('This attribute is a set of all the shared key WEP algorithms supported by the STAs. The following are the default values and the associated algorithm. Value = 1: open system Value = 2: 40 bit Key Value = 3: 128 bit Key IMPORTANT NOTE: The 128 bit WEP key ENcryption is currently restricted by U.S. laws.')
apShortPreamble = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 4, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apShortPreamble.setStatus('current')
if mibBuilder.loadTexts: apShortPreamble.setDescription('This attribute is a set to enable short preamble for compatibility within the 802.11 community. Setting it to enabled shall result in performance enhancement. Default is disabled.')
apMaxWCTrigger = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 4, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apMaxWCTrigger.setStatus('current')
if mibBuilder.loadTexts: apMaxWCTrigger.setDescription('Indicates the trigger point for sending apWCMaxAssocTrap trap. Range (1..255) The default value is 255.')
apRadioAutoSense = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 4, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apRadioAutoSense.setStatus('current')
if mibBuilder.loadTexts: apRadioAutoSense.setDescription('This attribute is a set to enable Radio Auto Sense. Default is enabled.')
apRadioChannel = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 4, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 14))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apRadioChannel.setStatus('current')
if mibBuilder.loadTexts: apRadioChannel.setDescription('This attribute is Radio Channel. This attribute can be written when apRadioAutoSense is disabled.')
apHighPerformance = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 4, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apHighPerformance.setStatus('current')
if mibBuilder.loadTexts: apHighPerformance.setDescription('This attribute is a set to enable Radio High Performance. Default is disabled.')
apTransmitPower = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 4, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("high", 1), ("medium", 2), ("low", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apTransmitPower.setStatus('current')
if mibBuilder.loadTexts: apTransmitPower.setDescription('This attribute is a set the transmit power from 1-100 micro watts.')
apRadioAntenna = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 4, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabledoneantenna", 1), ("bothantennaon", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apRadioAntenna.setStatus('current')
if mibBuilder.loadTexts: apRadioAntenna.setDescription('This attribute is a set the section of radio antenna.')
thresholdAssociated = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 4, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 256))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: thresholdAssociated.setStatus('current')
if mibBuilder.loadTexts: thresholdAssociated.setDescription('This attribute is a set the threshold of Associated Wireless Client. The default is 200.')
apDataRateMgmt = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 4, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("auto", 1), ("manual", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apDataRateMgmt.setStatus('current')
if mibBuilder.loadTexts: apDataRateMgmt.setDescription('This attribute is automatically or manually set the best data rate .')
apDataRate5 = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 4, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("optional", 1), ("required", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apDataRate5.setStatus('current')
if mibBuilder.loadTexts: apDataRate5.setDescription('This attribute is a set the data rate to 5.5 Mb/sec .')
apDataRate11 = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 1, 4, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("optional", 1), ("required", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apDataRate11.setStatus('current')
if mibBuilder.loadTexts: apDataRate11.setDescription('This attribute is a set the data rate to 11.0 Mb/sec .')
rfTxUcastFrms = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfTxUcastFrms.setStatus('current')
if mibBuilder.loadTexts: rfTxUcastFrms.setDescription(' The number of Unicast frames successfully transmitted.')
rfRxUcastFrms = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfRxUcastFrms.setStatus('current')
if mibBuilder.loadTexts: rfRxUcastFrms.setDescription(' The number of Unicast frames successfully received ')
rfTxBcastFrms = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfTxBcastFrms.setStatus('current')
if mibBuilder.loadTexts: rfTxBcastFrms.setDescription(' The number of Broadcast frames transmitted ')
rfRxBcastFrms = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfRxBcastFrms.setStatus('current')
if mibBuilder.loadTexts: rfRxBcastFrms.setDescription(' The number of Broadcast frames received.')
rfTxMcastFrms = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfTxMcastFrms.setStatus('current')
if mibBuilder.loadTexts: rfTxMcastFrms.setDescription(' The number of Multicast frames transmitted.')
rfRxMcastFrms = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfRxMcastFrms.setStatus('current')
if mibBuilder.loadTexts: rfRxMcastFrms.setDescription(' The number of Multicast frames received.')
rfTxEncryptFrms = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfTxEncryptFrms.setStatus('current')
if mibBuilder.loadTexts: rfTxEncryptFrms.setDescription(' The number of Encrypted frames transmitted.')
rfRxEncryptFrms = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfRxEncryptFrms.setStatus('current')
if mibBuilder.loadTexts: rfRxEncryptFrms.setDescription(' The number of Encrypted frames received.')
rfTxFragFrms = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfTxFragFrms.setStatus('current')
if mibBuilder.loadTexts: rfTxFragFrms.setDescription(' The number of frames fragments transmitted.')
rfRxFragFrms = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfRxFragFrms.setStatus('current')
if mibBuilder.loadTexts: rfRxFragFrms.setDescription(' The number of frames fragments received.')
rfTxBeacons = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfTxBeacons.setStatus('current')
if mibBuilder.loadTexts: rfTxBeacons.setDescription(' The number of Beacon frames transmitted.')
rfRxBeacons = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfRxBeacons.setStatus('current')
if mibBuilder.loadTexts: rfRxBeacons.setDescription(' The number of Beacon frames received.')
rfTxACK = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfTxACK.setStatus('current')
if mibBuilder.loadTexts: rfTxACK.setDescription('The number of ACK frames transmitted in response to successfully received frames.')
rfRxACK = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfRxACK.setStatus('current')
if mibBuilder.loadTexts: rfRxACK.setDescription(' The number of frames transmitted that had their corresponding ACK frames successfully received frames.')
rfTxRTS = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfTxRTS.setStatus('current')
if mibBuilder.loadTexts: rfTxRTS.setDescription(' The number of RTS frames that were successfully transmitted.')
rfRxRTS = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfRxRTS.setStatus('current')
if mibBuilder.loadTexts: rfRxRTS.setDescription(' The number of RTS received frames.')
rfCTSFailures = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfCTSFailures.setStatus('current')
if mibBuilder.loadTexts: rfCTSFailures.setDescription(' The number of frames for which no CTS frames was received in response to a RTS frames being sent.')
rfRxCTS = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfRxCTS.setStatus('current')
if mibBuilder.loadTexts: rfRxCTS.setDescription(' The number of CTS frames received in response to a RTS.')
rfTxACKFailures = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfTxACKFailures.setStatus('current')
if mibBuilder.loadTexts: rfTxACKFailures.setDescription(' The number of frames transmitted that did not have their corresponding ACK packets successfully received.')
rfTxRetrys = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfTxRetrys.setStatus('current')
if mibBuilder.loadTexts: rfTxRetrys.setDescription(' The number of frames that were retransmitted.')
rfFCSErrors = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfFCSErrors.setStatus('current')
if mibBuilder.loadTexts: rfFCSErrors.setDescription(' The number of frames received with checksum error.')
rfRxDuplicateFrms = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfRxDuplicateFrms.setStatus('current')
if mibBuilder.loadTexts: rfRxDuplicateFrms.setDescription(' The number of duplicate frames received.')
rfWEPUndecryptFrms = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfWEPUndecryptFrms.setStatus('current')
if mibBuilder.loadTexts: rfWEPUndecryptFrms.setDescription(" The number of frames received with the WEP sub-field of the Frame Control field set to one and the WEPOn value for the key mapped to the TA's MAC address indicates that the frame should not have been encrypted or that frame is discarded due to the receiving STA not implementing the privacy option.")
rfWEPICVErrors = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 1, 24), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfWEPICVErrors.setStatus('current')
if mibBuilder.loadTexts: rfWEPICVErrors.setDescription(' The number of frames received with the WEP sub-field of the Frame Control field set to one and the value of the ICV as received in the frame does not match the ICV value that is calculated for the contents of the received frame.')
apEtherPacketsIns = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 2, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apEtherPacketsIns.setStatus('current')
if mibBuilder.loadTexts: apEtherPacketsIns.setDescription('Indicates the number of good packets received on the Ethernet interface.')
apEtherPacketsOuts = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 2, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apEtherPacketsOuts.setStatus('current')
if mibBuilder.loadTexts: apEtherPacketsOuts.setDescription('Indicates the number of packets successfully transmitted on the Ethernet interface.')
apEtherOctetsIns = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 2, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apEtherOctetsIns.setStatus('current')
if mibBuilder.loadTexts: apEtherOctetsIns.setDescription('Indicates the number of good bytes received on the Ethernet interface.')
apEtherOctetsOuts = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 2, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apEtherOctetsOuts.setStatus('current')
if mibBuilder.loadTexts: apEtherOctetsOuts.setDescription('Indicates the number of bytes successfully transmitted on the Ethernet interface.')
apEtherPktsInPerSec = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 2, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apEtherPktsInPerSec.setStatus('current')
if mibBuilder.loadTexts: apEtherPktsInPerSec.setDescription('Indicates the number of good packets per second received on the Ethernet Interface.')
apEtherPktsOutPerSec = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 2, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apEtherPktsOutPerSec.setStatus('current')
if mibBuilder.loadTexts: apEtherPktsOutPerSec.setDescription('Indicates the number of packets per second successfully transmitted on the Ethernet interface.')
apEtherOctInPerSec = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 2, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apEtherOctInPerSec.setStatus('current')
if mibBuilder.loadTexts: apEtherOctInPerSec.setDescription('Indicates the number of good bytes per second received on the Ethernet Interface.')
apEtherOctOutPerSec = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 2, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apEtherOctOutPerSec.setStatus('current')
if mibBuilder.loadTexts: apEtherOctOutPerSec.setDescription('Indicates the number of bytes per second successfully transmitted on the Ethernet interface.')
apRFFrmsIns = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 3, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apRFFrmsIns.setStatus('current')
if mibBuilder.loadTexts: apRFFrmsIns.setDescription('Indicates the number of good packets received on the RF interface.')
apRFFrmsOuts = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 3, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apRFFrmsOuts.setStatus('current')
if mibBuilder.loadTexts: apRFFrmsOuts.setDescription('Indicates the number of packets successfully transmitted on the RF interface.')
apRFOctetsIns = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 3, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apRFOctetsIns.setStatus('current')
if mibBuilder.loadTexts: apRFOctetsIns.setDescription('Indicates the number of good bytes received on the RF interface.')
apRFOctetsOuts = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 3, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apRFOctetsOuts.setStatus('current')
if mibBuilder.loadTexts: apRFOctetsOuts.setDescription('Indicates the number of bytes successfully transmitted on the RF interface.')
apRFFrmsInPerSec = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 3, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apRFFrmsInPerSec.setStatus('current')
if mibBuilder.loadTexts: apRFFrmsInPerSec.setDescription('Indicates the number of good packets per second received on the RF Interface.')
apRFFrmsOutPerSec = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 3, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apRFFrmsOutPerSec.setStatus('current')
if mibBuilder.loadTexts: apRFFrmsOutPerSec.setDescription('Indicates the number of packets per second successfully transmitted on the RF interface.')
apRFOctInPerSec = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 3, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apRFOctInPerSec.setStatus('current')
if mibBuilder.loadTexts: apRFOctInPerSec.setDescription('Indicates the number of good bytes per second received on the RF Interface.')
apRFOctOutPerSec = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 3, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apRFOctOutPerSec.setStatus('current')
if mibBuilder.loadTexts: apRFOctOutPerSec.setDescription('Indicates the number of bytes per second successfully transmitted on the RF interface.')
apEnetsNPktsToEnets = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 4, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apEnetsNPktsToEnets.setStatus('current')
if mibBuilder.loadTexts: apEnetsNPktsToEnets.setDescription('Indicates the number of packets sent from the Ethernet Interface the Ethernet Interface. Must be 0')
apRFNPktsToEnets = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 4, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apRFNPktsToEnets.setStatus('current')
if mibBuilder.loadTexts: apRFNPktsToEnets.setDescription('Indicates the number of packets sent from the RF Interface to the Ethernet Interface.')
apEnetsNPktsToRF = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 4, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apEnetsNPktsToRF.setStatus('current')
if mibBuilder.loadTexts: apEnetsNPktsToRF.setDescription('Indicates the number of packets sent from Ethernet Interface to the RF Interface. ')
apRFNPktsToRF = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 4, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apRFNPktsToRF.setStatus('current')
if mibBuilder.loadTexts: apRFNPktsToRF.setDescription('Indicates the number of packets sent from the RF Interfaceto the RF Interface. Must be 0')
apDSPerFreqStatTable = MibTable((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 5), )
if mibBuilder.loadTexts: apDSPerFreqStatTable.setStatus('current')
if mibBuilder.loadTexts: apDSPerFreqStatTable.setDescription('A table of Radio Per Frequency statistics, which is based on the historical frequency records used by this Access Point. The current number of entries is 78.')
apDSPerFreqStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 5, 1), ).setIndexNames((0, "SRAP-WCM-MIB", "rfDSFrequency"))
if mibBuilder.loadTexts: apDSPerFreqStatEntry.setStatus('current')
if mibBuilder.loadTexts: apDSPerFreqStatEntry.setDescription('An entry in the table of Radio per frequency statistics. A row in this table cannot be created or deleted by SNMP operations on columns of the table.')
rfDSFrequency = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 5, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 14))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfDSFrequency.setStatus('current')
if mibBuilder.loadTexts: rfDSFrequency.setDescription('Radio Frequency channel number.')
rfDSPerFqPktsSents = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 5, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfDSPerFqPktsSents.setStatus('current')
if mibBuilder.loadTexts: rfDSPerFqPktsSents.setDescription('Counter for the packets sent per frequency.')
rfDSPerFqPktsRcvds = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 5, 1, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfDSPerFqPktsRcvds.setStatus('current')
if mibBuilder.loadTexts: rfDSPerFqPktsRcvds.setDescription('Counter for the packets received per frequency.')
rfDSPerFqRetries = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 2, 5, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rfDSPerFqRetries.setStatus('current')
if mibBuilder.loadTexts: rfDSPerFqRetries.setDescription('Counter for the retries per frequency.')
apUpdateTemplateStatus = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 3, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("downloadSuccessful", 1), ("downloadUnknownError", 2), ("downloadTimeout", 3), ("badFileName", 4), ("badMacAddrForRestoreFile", 5), ("incorrectFileImage", 6), ("tftpFileNotFound", 7), ("tftpAccessViolation", 8), ("tftpDiskfullOrAllocationExceeded", 9), ("tftpIllegalTFTPoperation", 10), ("tftpUnknownTransferID", 11), ("tftpFileAlreadyExists", 12), ("tftpNoSuchUser", 13), ("fileDownloadOrUpload", 14)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: apUpdateTemplateStatus.setStatus('current')
if mibBuilder.loadTexts: apUpdateTemplateStatus.setDescription('Reports the Firmware/HTML file download and update status.')
apBackupFileName = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 3, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 256))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apBackupFileName.setStatus('current')
if mibBuilder.loadTexts: apBackupFileName.setDescription('The name of the file to be backuped to the TFTP Server. (Including path name and total up to 255 characters.)')
apTemplateOperation = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 3, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("startBackup", 1), ("startTemplate", 2), ("startRestore", 3), ("uploadingData", 4), ("downloadingData", 5), ("notDownloading", 6), ("idle", 7)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apTemplateOperation.setStatus('current')
if mibBuilder.loadTexts: apTemplateOperation.setDescription("startBackup(1) - Backup the AP configuration through TFTP server. The AP backup configuration, named as apBackupFileName, will be saved in the default location of TFTP server. Before issuing this command, set 'apBackupFileName', and 'apTFTPServer' to the proper values. startTemplate(2) - This allows the network management platform to restore the template configuration of a AP, from one of it's previous backups. It is expected that common Parameters of AP in the backup configuration file, named as apRestoreFileName, will be restored to the AP. Before issuing this command, set 'apRestoreFileName', and 'apTFTPServer' to the proper values. startRestore(3) - This allows the network management platform to restore the full configuration of a AP, from one of it's previous backups. It is expected that full of the data in the backup configuration file, named as apRestoreFileName, will be restored to the AP. Before issuing this command, set 'apRestoreFileName', and 'apTFTPServer' to the proper values.")
apRestoreFileName = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 3, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 256))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apRestoreFileName.setStatus('current')
if mibBuilder.loadTexts: apRestoreFileName.setDescription('The name of the file to be restored to the AP. (Including path name and total up to 255 characters.)')
apSyslogDstIpAddressTableLength = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 3, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apSyslogDstIpAddressTableLength.setStatus('current')
if mibBuilder.loadTexts: apSyslogDstIpAddressTableLength.setDescription('The maximum number of entries in the Syslog Destination Ip Address Table.')
apSyslogDstIpAddressTable = MibTable((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 3, 6), )
if mibBuilder.loadTexts: apSyslogDstIpAddressTable.setStatus('current')
if mibBuilder.loadTexts: apSyslogDstIpAddressTable.setDescription('A table of Syslog Destination Ip Address. The current number of entries is 2.')
apSyslogDstIpAddressEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 3, 6, 1), ).setIndexNames((0, "SRAP-WCM-MIB", "apSyslogDstIpAddressIndex"))
if mibBuilder.loadTexts: apSyslogDstIpAddressEntry.setStatus('current')
if mibBuilder.loadTexts: apSyslogDstIpAddressEntry.setDescription('An entry in the table of Syslog Destination Ip Address for this ACCESS Point. A row in this table cannot be created or deleted by SNMP operations on columns of the table.')
apSyslogDstIpAddressIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 3, 6, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2)))
if mibBuilder.loadTexts: apSyslogDstIpAddressIndex.setStatus('current')
if mibBuilder.loadTexts: apSyslogDstIpAddressIndex.setDescription('Specific Syslog Destination Ip Address Table index. Range (1..2).')
apSyslogDstIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 3, 6, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apSyslogDstIpAddress.setStatus('current')
if mibBuilder.loadTexts: apSyslogDstIpAddress.setDescription('The IP address of Syslog Destination used for this Access Point.')
apSyslogEnable = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 3, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apSyslogEnable.setStatus('current')
if mibBuilder.loadTexts: apSyslogEnable.setDescription('Enables/Disables syslog for the Access Point. Default is disabled(2).')
apACLViolations = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apACLViolations.setStatus('current')
if mibBuilder.loadTexts: apACLViolations.setDescription('Indicates the number of Access Control violations occurred.')
apEAPSupport = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("disabled", 1), ("eap", 2), ("serialauth", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apEAPSupport.setStatus('current')
if mibBuilder.loadTexts: apEAPSupport.setDescription('The AP supprt the Extensive Authentication Protocol is enabled or disabled. Default is enabled.')
apAuthServerSupportNumber = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apAuthServerSupportNumber.setStatus('current')
if mibBuilder.loadTexts: apAuthServerSupportNumber.setDescription('The maximum number of entries in the Authenicator Server Table.')
apKeyServerSupportNumber = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apKeyServerSupportNumber.setStatus('current')
if mibBuilder.loadTexts: apKeyServerSupportNumber.setDescription('The maximum number of entries in the Key Server Table.')
apAccountServerSupportNumber = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: apAccountServerSupportNumber.setStatus('current')
if mibBuilder.loadTexts: apAccountServerSupportNumber.setDescription('The maximum number of entries in the Accounting Server Table.')
apAuthServerTable = MibTable((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 6), )
if mibBuilder.loadTexts: apAuthServerTable.setStatus('current')
if mibBuilder.loadTexts: apAuthServerTable.setDescription('A table of information for Authenticator Servers. Now support number of entries is 2.')
apAuthServerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 6, 1), ).setIndexNames((0, "SRAP-WCM-MIB", "apAuthServerIndex"))
if mibBuilder.loadTexts: apAuthServerEntry.setStatus('current')
if mibBuilder.loadTexts: apAuthServerEntry.setDescription('An entry in the table of information for Authenticator Servers for this Access Point.')
apAuthServerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 6, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: apAuthServerIndex.setStatus('current')
if mibBuilder.loadTexts: apAuthServerIndex.setDescription('A number uniquely identifying each Authenticator server.')
apAuthServerIPAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 6, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apAuthServerIPAddr.setStatus('current')
if mibBuilder.loadTexts: apAuthServerIPAddr.setDescription('The IP address of the Authenticator server referred to in this table entry.')
apAuthServerPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 6, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apAuthServerPortNumber.setStatus('current')
if mibBuilder.loadTexts: apAuthServerPortNumber.setDescription('The UDP port the client is using to send requests to this Authenticator server.')
apAuthSharedSecret = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 6, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 20))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apAuthSharedSecret.setStatus('mandatory')
if mibBuilder.loadTexts: apAuthSharedSecret.setDescription('This values indicates the shared key used by the authentication server.')
apAuthServerRetransmit = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 6, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 30))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apAuthServerRetransmit.setStatus('current')
if mibBuilder.loadTexts: apAuthServerRetransmit.setDescription('The number of times the access point tries to authenticate logon access.')
apAuthServerTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 6, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apAuthServerTimeout.setStatus('current')
if mibBuilder.loadTexts: apAuthServerTimeout.setDescription('The number of seconds the access point waits for a reply from the RADIUS server before it resends the request')
apKeyServerTable = MibTable((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 7), )
if mibBuilder.loadTexts: apKeyServerTable.setStatus('current')
if mibBuilder.loadTexts: apKeyServerTable.setDescription('A table of information for Key Servers. Now support number of entries is 2.')
apKeyServerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 7, 1), ).setIndexNames((0, "SRAP-WCM-MIB", "apKeyServerIndex"))
if mibBuilder.loadTexts: apKeyServerEntry.setStatus('current')
if mibBuilder.loadTexts: apKeyServerEntry.setDescription('An entry in the table of information for Key Servers for this Access Point.')
apKeyServerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 7, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: apKeyServerIndex.setStatus('current')
if mibBuilder.loadTexts: apKeyServerIndex.setDescription('A number uniquely identifying each Key server.')
apKeyServerIPAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 7, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apKeyServerIPAddr.setStatus('current')
if mibBuilder.loadTexts: apKeyServerIPAddr.setDescription('The IP address of the Key server referred to in this table entry.')
apKeyServerPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 7, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apKeyServerPortNumber.setStatus('current')
if mibBuilder.loadTexts: apKeyServerPortNumber.setDescription('The UDP port the client is using to send requests to this server.')
apAccountServerTable = MibTable((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 8), )
if mibBuilder.loadTexts: apAccountServerTable.setStatus('current')
if mibBuilder.loadTexts: apAccountServerTable.setDescription('A table of information for Account Servers. Now support number of entries is 1.')
apAccountServerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 8, 1), ).setIndexNames((0, "SRAP-WCM-MIB", "apAccountServerIndex"))
if mibBuilder.loadTexts: apAccountServerEntry.setStatus('current')
if mibBuilder.loadTexts: apAccountServerEntry.setDescription('An entry in the table of information for Account Servers for this Access Point.')
apAccountServerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 8, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2147483647)))
if mibBuilder.loadTexts: apAccountServerIndex.setStatus('current')
if mibBuilder.loadTexts: apAccountServerIndex.setDescription('A number uniquely identifying each Account server.')
apAccountServerIPAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 8, 1, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apAccountServerIPAddr.setStatus('current')
if mibBuilder.loadTexts: apAccountServerIPAddr.setDescription('The IP address of the Account server referred to in this table entry.')
apAccountServerPortNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 8, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apAccountServerPortNumber.setStatus('current')
if mibBuilder.loadTexts: apAccountServerPortNumber.setDescription('The UDP port the client is using to send requests to this server.')
apAccountServerSharedSecret = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 8, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 20))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apAccountServerSharedSecret.setStatus('mandatory')
if mibBuilder.loadTexts: apAccountServerSharedSecret.setDescription('This values indicates the shared key used by the accounting server.')
apAccountServerRetransmit = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 8, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 30))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apAccountServerRetransmit.setStatus('current')
if mibBuilder.loadTexts: apAccountServerRetransmit.setDescription('The number of times the access point tries to accounting logon access.')
apAccountServerTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 8, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apAccountServerTimeout.setStatus('current')
if mibBuilder.loadTexts: apAccountServerTimeout.setDescription('The number of seconds the access point waits for a reply from the RADIUS accounting server before it resends the request')
apFastReKeying = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apFastReKeying.setStatus('current')
if mibBuilder.loadTexts: apFastReKeying.setDescription('The AP supprt the Fast-rekeying is enabled or disabled. Default is disabled.')
apTFTPServerPortNumber = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apTFTPServerPortNumber.setStatus('current')
if mibBuilder.loadTexts: apTFTPServerPortNumber.setDescription('The TFTP Server port.')
apTFTPRetries = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apTFTPRetries.setStatus('current')
if mibBuilder.loadTexts: apTFTPRetries.setDescription('The maximum retries number of tftp client in SRAPEE')
apTFTPTimeOutInterval = MibScalar((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apTFTPTimeOutInterval.setStatus('current')
if mibBuilder.loadTexts: apTFTPTimeOutInterval.setDescription('The maximum time out interval of tftp client in SRAPEE')
apEncryptionTable = MibTable((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 13), )
if mibBuilder.loadTexts: apEncryptionTable.setStatus('current')
if mibBuilder.loadTexts: apEncryptionTable.setDescription('Conceptual table for Encryption setting of device')
apEncryptionEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 13, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: apEncryptionEntry.setStatus('current')
if mibBuilder.loadTexts: apEncryptionEntry.setDescription('An Entry (conceptual row) in the Encryption Table ifIndex - Each IEEE 802.11 interface is represented by an ifEntry. Interface tables in this MIB module are indexed by ifIndex.')
apEncryptionMode = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 13, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("none", 1), ("wep64b", 2), ("wep128b", 3), ("wep152b", 4), ("tkipWPAPSK", 5), ("tkipWPAEnterprise", 6), ("aesWPAPSK", 7), ("aesWPAEnterprise", 8)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apEncryptionMode.setStatus('current')
if mibBuilder.loadTexts: apEncryptionMode.setDescription('The encryption mode that each IEEE 802.11 Interface(apEncryptionMode.ifIndex) is using. Value: 1 = No Encryption 2 = 64 bit WEP 3 = 128 bit WEP 4 = 152 bit WEP 5 = TKIP using WPA-PSK 6 = TKIP using WPA-Enterprise 7 = AES using WPA-PSK 8 = AES using WPA-Enterprise')
apWPAPSKPassPhrase = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 13, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(8, 63))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apWPAPSKPassPhrase.setStatus('current')
if mibBuilder.loadTexts: apWPAPSKPassPhrase.setDescription('A WPA Pre-Share Key Pass Phrase.')
apWEPDefaultKeyID = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 13, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apWEPDefaultKeyID.setStatus('current')
if mibBuilder.loadTexts: apWEPDefaultKeyID.setDescription('The WEPDefaultKey N currently being used to encrypt data')
apWEPDefaultKey1 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 13, 1, 4), OctetString().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(5, 5), ValueSizeConstraint(13, 13), ValueSizeConstraint(16, 16), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apWEPDefaultKey1.setStatus('current')
if mibBuilder.loadTexts: apWEPDefaultKey1.setDescription('A WEP default secret key 1 value.')
apWEPDefaultKey2 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 13, 1, 5), OctetString().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(5, 5), ValueSizeConstraint(13, 13), ValueSizeConstraint(16, 16), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apWEPDefaultKey2.setStatus('current')
if mibBuilder.loadTexts: apWEPDefaultKey2.setDescription('A WEP default secret key 2 value.')
apWEPDefaultKey3 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 13, 1, 6), OctetString().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(5, 5), ValueSizeConstraint(13, 13), ValueSizeConstraint(16, 16), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apWEPDefaultKey3.setStatus('current')
if mibBuilder.loadTexts: apWEPDefaultKey3.setDescription('A WEP default secret key 3 value.')
apWEPDefaultKey4 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 2, 4, 13, 1, 7), OctetString().subtype(subtypeSpec=ConstraintsUnion(ValueSizeConstraint(5, 5), ValueSizeConstraint(13, 13), ValueSizeConstraint(16, 16), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: apWEPDefaultKey4.setStatus('current')
if mibBuilder.loadTexts: apWEPDefaultKey4.setDescription('A WEP default secret key 4 value.')
radioTransmitStop = NotificationType((1, 3, 6, 1, 4, 1, 43, 0, 95)).setObjects(("IF-MIB", "ifIndex"), ("SRAP-WCM-MIB", "apMyMacAddr"), ("SRAP-WCM-MIB", "apRadioChannel"))
if mibBuilder.loadTexts: radioTransmitStop.setStatus('current')
if mibBuilder.loadTexts: radioTransmitStop.setDescription('The radio has stopped transmitting due to a fault. There may be a jammed channel. The user should change the AP to the different channel or check if there is a RF hardware issue.')
radioReceiveStop = NotificationType((1, 3, 6, 1, 4, 1, 43, 0, 96)).setObjects(("IF-MIB", "ifIndex"), ("SRAP-WCM-MIB", "apMyMacAddr"), ("SRAP-WCM-MIB", "apRadioChannel"))
if mibBuilder.loadTexts: radioReceiveStop.setStatus('current')
if mibBuilder.loadTexts: radioReceiveStop.setDescription('The radio has stopped receiving due to a fault. There may be a jammed channel. The user should change the AP to the different channel or check if there is a RF hardware issue.')
exceedMaxAssociated = NotificationType((1, 3, 6, 1, 4, 1, 43, 0, 97)).setObjects(("IF-MIB", "ifIndex"), ("SRAP-WCM-MIB", "apMyMacAddr"))
if mibBuilder.loadTexts: exceedMaxAssociated.setStatus('current')
if mibBuilder.loadTexts: exceedMaxAssociated.setDescription('Exceed max 256 Associated clients. No more association of wireless clients allowed. The user can disassociate the idle wirless clients.')
exceedtThresholdAssociated = NotificationType((1, 3, 6, 1, 4, 1, 43, 0, 98)).setObjects(("IF-MIB", "ifIndex"), ("SRAP-WCM-MIB", "apMyMacAddr"))
if mibBuilder.loadTexts: exceedtThresholdAssociated.setStatus('current')
if mibBuilder.loadTexts: exceedtThresholdAssociated.setDescription('The number of Wireless Clients association reaches the threshold of the AP. Please check the MIB object, thresholdAssociated. The default is 200. The number of wireless client is more than the value of thresholdAssociated. This will affect the performace of the AP.')
dslSecurityViolation = NotificationType((1, 3, 6, 1, 4, 1, 43, 0, 99)).setObjects(("IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: dslSecurityViolation.setStatus('current')
if mibBuilder.loadTexts: dslSecurityViolation.setDescription('For Wireless Client, User name or password is not correct in Dynamic Security Link database of AP. The user can check the AP admin if the account exists on the Dynamic Security Link database')
apManufactureInfoGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 1, 1, 1)).setObjects(("SRAP-WCM-MIB", "apModelnumber"), ("SRAP-WCM-MIB", "apSerialnumber"), ("SRAP-WCM-MIB", "apMyMacAddr"), ("SRAP-WCM-MIB", "apFirmwareVer"), ("SRAP-WCM-MIB", "apMfgDate"), ("SRAP-WCM-MIB", "apWebUiVer"), ("SRAP-WCM-MIB", "apProductName"), ("SRAP-WCM-MIB", "apMfgName"), ("SRAP-WCM-MIB", "apHardwareRev"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
apManufactureInfoGroup = apManufactureInfoGroup.setStatus('current')
if mibBuilder.loadTexts: apManufactureInfoGroup.setDescription('The apManufactureInfo group')
apSystemConfigGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 1, 1, 2)).setObjects(("SRAP-WCM-MIB", "apDeviceName"), ("SRAP-WCM-MIB", "apDeviceLoc"), ("SRAP-WCM-MIB", "apHelpFilePath"), ("SRAP-WCM-MIB", "apMyIPAddr"), ("SRAP-WCM-MIB", "apSubnetMask"), ("SRAP-WCM-MIB", "apGatewayIPAddr"), ("SRAP-WCM-MIB", "apAdditionalGatewaysTableLength"), ("SRAP-WCM-MIB", "apAdditionalGatewaysIPAddr"), ("SRAP-WCM-MIB", "apDHCPEnable"), ("SRAP-WCM-MIB", "apStaticIPAddrEnable"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
apSystemConfigGroup = apSystemConfigGroup.setStatus('current')
if mibBuilder.loadTexts: apSystemConfigGroup.setDescription('The apSystemConfig group')
apSNMPInfoGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 1, 1, 3)).setObjects(("SRAP-WCM-MIB", "apSNMPRequests"), ("SRAP-WCM-MIB", "apSNMPTraps"), ("SRAP-WCM-MIB", "apAllTrapsEnable"), ("SRAP-WCM-MIB", "apColdBootTrapEnable"), ("SRAP-WCM-MIB", "apAuthenFailureTrapEnable"), ("SRAP-WCM-MIB", "apRFTrapEnable"), ("SRAP-WCM-MIB", "apWarmBootTrapEnable"), ("SRAP-WCM-MIB", "apWCMaxAssocTrapEnable"), ("SRAP-WCM-MIB", "apThresholdAssociatedEnable"), ("SRAP-WCM-MIB", "dslSecurityViolationEnable"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
apSNMPInfoGroup = apSNMPInfoGroup.setStatus('current')
if mibBuilder.loadTexts: apSNMPInfoGroup.setDescription('The apSNMPInfo group')
apRFConfigGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 1, 1, 4)).setObjects(("SRAP-WCM-MIB", "apWlanServiceArea"), ("SRAP-WCM-MIB", "apCountryName"), ("SRAP-WCM-MIB", "apShortPreamble"), ("SRAP-WCM-MIB", "apMaxWCTrigger"), ("SRAP-WCM-MIB", "apWEPAlgorithm"), ("SRAP-WCM-MIB", "apRadioAutoSense"), ("SRAP-WCM-MIB", "apRadioChannel"), ("SRAP-WCM-MIB", "apHighPerformance"), ("SRAP-WCM-MIB", "apTransmitPower"), ("SRAP-WCM-MIB", "apRadioAntenna"), ("SRAP-WCM-MIB", "thresholdAssociated"), ("SRAP-WCM-MIB", "apDataRateMgmt"), ("SRAP-WCM-MIB", "apDataRate5"), ("SRAP-WCM-MIB", "apDataRate11"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
apRFConfigGroup = apRFConfigGroup.setStatus('current')
if mibBuilder.loadTexts: apRFConfigGroup.setDescription('The apRFConfig group')
apRFStatisticsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 1, 2, 1)).setObjects(("SRAP-WCM-MIB", "rfTxUcastFrms"), ("SRAP-WCM-MIB", "rfRxUcastFrms"), ("SRAP-WCM-MIB", "rfTxBcastFrms"), ("SRAP-WCM-MIB", "rfRxBcastFrms"), ("SRAP-WCM-MIB", "rfTxMcastFrms"), ("SRAP-WCM-MIB", "rfRxMcastFrms"), ("SRAP-WCM-MIB", "rfTxEncryptFrms"), ("SRAP-WCM-MIB", "rfRxEncryptFrms"), ("SRAP-WCM-MIB", "rfTxFragFrms"), ("SRAP-WCM-MIB", "rfRxFragFrms"), ("SRAP-WCM-MIB", "rfTxBeacons"), ("SRAP-WCM-MIB", "rfRxBeacons"), ("SRAP-WCM-MIB", "rfTxACK"), ("SRAP-WCM-MIB", "rfRxACK"), ("SRAP-WCM-MIB", "rfTxRTS"), ("SRAP-WCM-MIB", "rfRxRTS"), ("SRAP-WCM-MIB", "rfCTSFailures"), ("SRAP-WCM-MIB", "rfRxCTS"), ("SRAP-WCM-MIB", "rfTxACKFailures"), ("SRAP-WCM-MIB", "rfTxRetrys"), ("SRAP-WCM-MIB", "rfFCSErrors"), ("SRAP-WCM-MIB", "rfRxDuplicateFrms"), ("SRAP-WCM-MIB", "rfWEPUndecryptFrms"), ("SRAP-WCM-MIB", "rfWEPICVErrors"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
apRFStatisticsGroup = apRFStatisticsGroup.setStatus('current')
if mibBuilder.loadTexts: apRFStatisticsGroup.setDescription('The apRFStatistics group')
apEtherItfStatGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 1, 2, 2)).setObjects(("SRAP-WCM-MIB", "apEtherPacketsIns"), ("SRAP-WCM-MIB", "apEtherPacketsOuts"), ("SRAP-WCM-MIB", "apEtherOctetsIns"), ("SRAP-WCM-MIB", "apEtherOctetsOuts"), ("SRAP-WCM-MIB", "apEtherPktsInPerSec"), ("SRAP-WCM-MIB", "apEtherPktsOutPerSec"), ("SRAP-WCM-MIB", "apEtherOctInPerSec"), ("SRAP-WCM-MIB", "apEtherOctOutPerSec"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
apEtherItfStatGroup = apEtherItfStatGroup.setStatus('current')
if mibBuilder.loadTexts: apEtherItfStatGroup.setDescription('The apEtherInfStat group')
apRFItfStatGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 1, 2, 3)).setObjects(("SRAP-WCM-MIB", "apRFFrmsIns"), ("SRAP-WCM-MIB", "apRFFrmsOuts"), ("SRAP-WCM-MIB", "apRFOctetsIns"), ("SRAP-WCM-MIB", "apRFOctetsOuts"), ("SRAP-WCM-MIB", "apRFFrmsInPerSec"), ("SRAP-WCM-MIB", "apRFFrmsOutPerSec"), ("SRAP-WCM-MIB", "apRFOctInPerSec"), ("SRAP-WCM-MIB", "apRFOctOutPerSec"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
apRFItfStatGroup = apRFItfStatGroup.setStatus('current')
if mibBuilder.loadTexts: apRFItfStatGroup.setDescription('The apRFInfStat group')
apTrafficMatrixGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 1, 2, 4)).setObjects(("SRAP-WCM-MIB", "apEnetsNPktsToEnets"), ("SRAP-WCM-MIB", "apRFNPktsToEnets"), ("SRAP-WCM-MIB", "apEnetsNPktsToRF"), ("SRAP-WCM-MIB", "apRFNPktsToRF"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
apTrafficMatrixGroup = apTrafficMatrixGroup.setStatus('current')
if mibBuilder.loadTexts: apTrafficMatrixGroup.setDescription('The apTrafficMatrix group')
apDSPerFreqStatTableGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 1, 2, 5)).setObjects(("SRAP-WCM-MIB", "rfDSFrequency"), ("SRAP-WCM-MIB", "rfDSPerFqPktsSents"), ("SRAP-WCM-MIB", "rfDSPerFqPktsRcvds"), ("SRAP-WCM-MIB", "rfDSPerFqRetries"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
apDSPerFreqStatTableGroup = apDSPerFreqStatTableGroup.setStatus('current')
if mibBuilder.loadTexts: apDSPerFreqStatTableGroup.setDescription('The apDSPerFreqStatTable group')
apFaultMgmtGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 1, 3, 1)).setObjects(("SRAP-WCM-MIB", "apUpdateTemplateStatus"), ("SRAP-WCM-MIB", "apBackupFileName"), ("SRAP-WCM-MIB", "apTemplateOperation"), ("SRAP-WCM-MIB", "apRestoreFileName"), ("SRAP-WCM-MIB", "apSyslogDstIpAddressTableLength"), ("SRAP-WCM-MIB", "apSyslogDstIpAddress"), ("SRAP-WCM-MIB", "apSyslogEnable"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
apFaultMgmtGroup = apFaultMgmtGroup.setStatus('current')
if mibBuilder.loadTexts: apFaultMgmtGroup.setDescription('The apRFStatistics group')
apSecurityMgmtGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 1, 4, 1)).setObjects(("SRAP-WCM-MIB", "apACLViolations"), ("SRAP-WCM-MIB", "apEAPSupport"), ("SRAP-WCM-MIB", "apAuthServerSupportNumber"), ("SRAP-WCM-MIB", "apKeyServerSupportNumber"), ("SRAP-WCM-MIB", "apAccountServerSupportNumber"), ("SRAP-WCM-MIB", "apAuthServerIPAddr"), ("SRAP-WCM-MIB", "apAuthServerPortNumber"), ("SRAP-WCM-MIB", "apAuthSharedSecret"), ("SRAP-WCM-MIB", "apAuthServerRetransmit"), ("SRAP-WCM-MIB", "apAuthServerTimeout"), ("SRAP-WCM-MIB", "apKeyServerIPAddr"), ("SRAP-WCM-MIB", "apKeyServerPortNumber"), ("SRAP-WCM-MIB", "apAccountServerIPAddr"), ("SRAP-WCM-MIB", "apAccountServerPortNumber"), ("SRAP-WCM-MIB", "apAccountSharedSecret"), ("SRAP-WCM-MIB", "apAccountServerRetransmit"), ("SRAP-WCM-MIB", "apAccountServerTimeout"), ("SRAP-WCM-MIB", "apFastReKeying"), ("SRAP-WCM-MIB", "apTFTPServerPortNumber"), ("SRAP-WCM-MIB", "apTFTPRetries"), ("SRAP-WCM-MIB", "apTFTPTimeOutInterval"), ("SRAP-WCM-MIB", "apEncryptionMode"), ("SRAP-WCM-MIB", "apWPAPSKPassPhrase"), ("SRAP-WCM-MIB", "apWEPDefaultKeyID"), ("SRAP-WCM-MIB", "apWEPDefaultKey1"), ("SRAP-WCM-MIB", "apWEPDefaultKey2"), ("SRAP-WCM-MIB", "apWEPDefaultKey3"), ("SRAP-WCM-MIB", "apWEPDefaultKey4"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
apSecurityMgmtGroup = apSecurityMgmtGroup.setStatus('current')
if mibBuilder.loadTexts: apSecurityMgmtGroup.setDescription('The apSecurityMgmt group')
wcmNotificationGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 43, 35, 1, 7, 1, 1, 5, 1)).setObjects(("SRAP-WCM-MIB", "radioTransmitStop"), ("SRAP-WCM-MIB", "radioReceiveStop"), ("SRAP-WCM-MIB", "exceedMaxAssociated"), ("SRAP-WCM-MIB", "exceedtThresholdAssociated"), ("SRAP-WCM-MIB", "dslSecurityViolation"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
wcmNotificationGroup = wcmNotificationGroup.setStatus('current')
if mibBuilder.loadTexts: wcmNotificationGroup.setDescription('The wcm Notifications.')
mibBuilder.exportSymbols("SRAP-WCM-MIB", apWarmBootTrapEnable=apWarmBootTrapEnable, apEtherPktsOutPerSec=apEtherPktsOutPerSec, apTFTPRetries=apTFTPRetries, wcmNotificationGroup=wcmNotificationGroup, apPerformMgmt=apPerformMgmt, apKeyServerSupportNumber=apKeyServerSupportNumber, apRestoreFileName=apRestoreFileName, apFaultMgmt=apFaultMgmt, apSNMPRequests=apSNMPRequests, apDeviceLoc=apDeviceLoc, apTrafficMatrixGroup=apTrafficMatrixGroup, apAdditionalGatewaysIPAddr=apAdditionalGatewaysIPAddr, apWebUiVer=apWebUiVer, apAuthServerEntry=apAuthServerEntry, apEtherPacketsOuts=apEtherPacketsOuts, apAuthServerSupportNumber=apAuthServerSupportNumber, apEtherOctetsIns=apEtherOctetsIns, apSecurityMgmt=apSecurityMgmt, additionalGatewaysIndex=additionalGatewaysIndex, apRFFrmsIns=apRFFrmsIns, apEncryptionMode=apEncryptionMode, rfTxUcastFrms=rfTxUcastFrms, apThresholdAssociatedEnable=apThresholdAssociatedEnable, rfRxMcastFrms=rfRxMcastFrms, apEtherOctetsOuts=apEtherOctetsOuts, apAuthServerIndex=apAuthServerIndex, rfRxUcastFrms=rfRxUcastFrms, rfRxBeacons=rfRxBeacons, rfTxEncryptFrms=rfTxEncryptFrms, rfDSFrequency=rfDSFrequency, apMyMacAddr=apMyMacAddr, apAuthServerIPAddr=apAuthServerIPAddr, apEtherPktsInPerSec=apEtherPktsInPerSec, apHelpFilePath=apHelpFilePath, rfRxDuplicateFrms=rfRxDuplicateFrms, apDataRate11=apDataRate11, apWEPDefaultKey4=apWEPDefaultKey4, wcmEventsV2=wcmEventsV2, wcmObjs=wcmObjs, apRFOctetsIns=apRFOctetsIns, apAdditionalGatewaysTableLength=apAdditionalGatewaysTableLength, apWEPDefaultKey3=apWEPDefaultKey3, apFaultMgmtGroup=apFaultMgmtGroup, rfTxFragFrms=rfTxFragFrms, apSystemConfigGroup=apSystemConfigGroup, apMyIPAddr=apMyIPAddr, rfFCSErrors=rfFCSErrors, apEncryptionEntry=apEncryptionEntry, apManufactureInfo=apManufactureInfo, exceedtThresholdAssociated=exceedtThresholdAssociated, apAuthServerPortNumber=apAuthServerPortNumber, apDHCPEnable=apDHCPEnable, apRFTrapEnable=apRFTrapEnable, apAuthSharedSecret=apAuthSharedSecret, rfWEPICVErrors=rfWEPICVErrors, apACLViolations=apACLViolations, apAuthServerRetransmit=apAuthServerRetransmit, apProductName=apProductName, apMfgName=apMfgName, radioTransmitStop=radioTransmitStop, apHighPerformance=apHighPerformance, apRFItfStat=apRFItfStat, apAllTrapsEnable=apAllTrapsEnable, apSecurityMgmtGroups=apSecurityMgmtGroups, apKeyServerPortNumber=apKeyServerPortNumber, apStaticIPAddrEnable=apStaticIPAddrEnable, apAccountServerRetransmit=apAccountServerRetransmit, apTrafficMatrix=apTrafficMatrix, rfTxRetrys=rfTxRetrys, rfTxACKFailures=rfTxACKFailures, dslSecurityViolation=dslSecurityViolation, exceedMaxAssociated=exceedMaxAssociated, apPerformMgmtGroups=apPerformMgmtGroups, apEventGroups=apEventGroups, rfTxBcastFrms=rfTxBcastFrms, rfTxMcastFrms=rfTxMcastFrms, apRFFrmsInPerSec=apRFFrmsInPerSec, apTFTPServerPortNumber=apTFTPServerPortNumber, apFaultMgmtGroups=apFaultMgmtGroups, apKeyServerIPAddr=apKeyServerIPAddr, apUpdateTemplateStatus=apUpdateTemplateStatus, apWEPDefaultKey2=apWEPDefaultKey2, apAccountServerIPAddr=apAccountServerIPAddr, apEtherOctOutPerSec=apEtherOctOutPerSec, radioReceiveStop=radioReceiveStop, apWEPAlgorithm=apWEPAlgorithm, apMfgDate=apMfgDate, rfTxBeacons=rfTxBeacons, rfDSPerFqRetries=rfDSPerFqRetries, apDataRateMgmt=apDataRateMgmt, apRFFrmsOutPerSec=apRFFrmsOutPerSec, apRadioAntenna=apRadioAntenna, rfDSPerFqPktsRcvds=rfDSPerFqPktsRcvds, apEtherItfStatGroup=apEtherItfStatGroup, apGatewayIPAddr=apGatewayIPAddr, rfRxEncryptFrms=rfRxEncryptFrms, wcmMIB=wcmMIB, apDeviceName=apDeviceName, apKeyServerTable=apKeyServerTable, apWPAPSKPassPhrase=apWPAPSKPassPhrase, apSyslogDstIpAddressTable=apSyslogDstIpAddressTable, apSyslogDstIpAddressIndex=apSyslogDstIpAddressIndex, apEAPSupport=apEAPSupport, apSyslogEnable=apSyslogEnable, apTemplateOperation=apTemplateOperation, apFastReKeying=apFastReKeying, apKeyServerIndex=apKeyServerIndex, apDataRate5=apDataRate5, apDSPerFreqStatTable=apDSPerFreqStatTable, apTransmitPower=apTransmitPower, apSyslogDstIpAddressTableLength=apSyslogDstIpAddressTableLength, apWEPDefaultKey1=apWEPDefaultKey1, apRFStatistics=apRFStatistics, apSerialnumber=apSerialnumber, dslSecurityViolationEnable=dslSecurityViolationEnable, apMaxWCTrigger=apMaxWCTrigger, apAuthenFailureTrapEnable=apAuthenFailureTrapEnable, apAuthServerTable=apAuthServerTable, apManufactureInfoGroup=apManufactureInfoGroup, apEtherPacketsIns=apEtherPacketsIns, wcmEvents=wcmEvents, apEtherItfStat=apEtherItfStat, apRFOctOutPerSec=apRFOctOutPerSec, rfRxRTS=rfRxRTS, apAccountServerTable=apAccountServerTable, apAuthServerTimeout=apAuthServerTimeout, srAPWCMMIBModule=srAPWCMMIBModule, apRadioChannel=apRadioChannel, apSecurityMgmtGroup=apSecurityMgmtGroup, apBackupFileName=apBackupFileName, apRFStatisticsGroup=apRFStatisticsGroup, apRFConfigGroup=apRFConfigGroup, apConfigMgmtGroups=apConfigMgmtGroups, apTFTPTimeOutInterval=apTFTPTimeOutInterval, apRFFrmsOuts=apRFFrmsOuts, rfCTSFailures=rfCTSFailures, rfRxCTS=rfRxCTS, apKeyServerEntry=apKeyServerEntry, apSNMPTraps=apSNMPTraps, apRFItfStatGroup=apRFItfStatGroup, apEtherOctInPerSec=apEtherOctInPerSec, apAccountServerTimeout=apAccountServerTimeout, apRFConfig=apRFConfig, rfTxACK=rfTxACK, apAccountServerSupportNumber=apAccountServerSupportNumber, apCountryName=apCountryName, rfWEPUndecryptFrms=rfWEPUndecryptFrms, apSNMPInfo=apSNMPInfo, rfDSPerFqPktsSents=rfDSPerFqPktsSents, apRadioAutoSense=apRadioAutoSense, apHardwareRev=apHardwareRev, apWCMaxAssocTrapEnable=apWCMaxAssocTrapEnable, apAdditionalGatewaysEntry=apAdditionalGatewaysEntry, apAccountServerEntry=apAccountServerEntry, apSubnetMask=apSubnetMask, apWlanServiceArea=apWlanServiceArea, apAccountServerSharedSecret=apAccountServerSharedSecret, apModelnumber=apModelnumber, apSNMPInfoGroup=apSNMPInfoGroup, rfTxRTS=rfTxRTS, apSyslogDstIpAddress=apSyslogDstIpAddress, apWEPDefaultKeyID=apWEPDefaultKeyID, apConfigMgmt=apConfigMgmt, wcmConf=wcmConf, apAdditionalGatewaysTable=apAdditionalGatewaysTable, apSyslogDstIpAddressEntry=apSyslogDstIpAddressEntry, apShortPreamble=apShortPreamble, rfRxBcastFrms=rfRxBcastFrms, rfRxFragFrms=rfRxFragFrms, thresholdAssociated=thresholdAssociated, rfRxACK=rfRxACK, apSystemConfig=apSystemConfig, apColdBootTrapEnable=apColdBootTrapEnable, apFirmwareVer=apFirmwareVer, apEncryptionTable=apEncryptionTable, apRFOctInPerSec=apRFOctInPerSec, apRFOctetsOuts=apRFOctetsOuts, apRFNPktsToEnets=apRFNPktsToEnets, apAccountServerIndex=apAccountServerIndex, apEnetsNPktsToRF=apEnetsNPktsToRF, apDSPerFreqStatEntry=apDSPerFreqStatEntry, apDSPerFreqStatTableGroup=apDSPerFreqStatTableGroup, apEnetsNPktsToEnets=apEnetsNPktsToEnets, apAccountServerPortNumber=apAccountServerPortNumber, apRFNPktsToRF=apRFNPktsToRF, PYSNMP_MODULE_ID=srAPWCMMIBModule)
| [
"[email protected]"
] | |
d129e4c0812fa19f8e5a6c71e7e94860e1283e5f | c1f1900d0f1522cfb6c0148ccd1138e3a3503ba8 | /generativeopenset/options.py | 1ccde89c0051407dab95ba2371390693424f4dd0 | [] | no_license | yunruiguo/counterfactual-openset | 4252c79bc799de0856cf019b4f13308e737a137c | fdba67466a877ed81de645b6c856d6f0a36006db | refs/heads/master | 2023-02-28T21:54:44.175036 | 2021-02-02T14:02:00 | 2021-02-02T14:02:00 | 335,285,749 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,386 | py | import os
import subprocess
import json
from pprint import pprint
def save_options(options):
# Include the version of the code that saved the options
# (in case the meaning of an option changes in the future)
if 'version' not in options:
options['version'] = get_code_version()
if not os.path.exists(options['result_dir']):
print("Creating result directory {}".format(options['result_dir']))
os.makedirs(options['result_dir'])
filename = os.path.join(options['result_dir'], 'params.json')
with open(filename, 'w') as fp:
print("Saving options to {}".format(filename))
to_save = options.copy()
# Do not save result_dir; always read it from the command line
del to_save['result_dir']
json.dump(to_save, fp, indent=2, sort_keys=True)
def load_options(options):
print("Resuming existing experiment at {} with options:".format(options['result_dir']))
param_path = get_param_path(options['result_dir'])
old_opts = json.load(open(param_path))
options.update(old_opts)
options['result_dir'] = os.path.expanduser(options['result_dir'])
pprint(options)
return options
def get_param_path(result_dir):
if os.path.exists(os.path.join(result_dir, 'params.json')):
return os.path.join(result_dir, 'params.json')
elif os.path.exists(os.path.join(result_dir, 'default_params.json')):
return os.path.join(result_dir, 'default_params.json')
raise ValueError("Could not find {}/params.json".format(result_dir))
def get_current_epoch(result_dir):
checkpoints_path = os.path.join(result_dir, 'checkpoints')
filenames = os.listdir(checkpoints_path)
model_filenames = [f for f in filenames if f.endswith('.pth')]
if not model_filenames:
return 0
def filename_to_epoch(filename):
tokens = filename.rstrip('.pth').split('_')
try:
return int(tokens[-1])
except ValueError:
return 0
return max(filename_to_epoch(f) for f in model_filenames)
def get_code_version():
cwd = os.path.dirname(__file__)
try:
output = subprocess.check_output(['git', 'rev-parse', 'HEAD'], cwd=cwd)
except subprocess.CalledProcessError:
print("Warning: Failed git rev-parse, current code version unknown")
return "unknown"
return output.strip().decode('utf-8')
| [
"[email protected]"
] | |
42e89f451bbe4d4030bb30fcf32c0531c44d827a | 147d6678b8c99bd1e18b20814f259dc25a395ca6 | /python daily coding/2020.4.16 (문자열)/2675번 (문자열 반복).py | fd8d8f2aec4445d09f7e86b6a80d085465055c18 | [] | no_license | omy5123/Oh-min-young | 7759cf869720d58fb07edc0e8f5a9b013afacc95 | 7db08ab828cc28cb9f477ea5410a48245a156fef | refs/heads/master | 2021-05-19T07:08:01.379930 | 2021-01-17T07:51:49 | 2021-01-17T07:51:49 | 251,577,901 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,023 | py | """
문제
문자열 S를 입력받은 후에, 각 문자를 R번 반복해 새 문자열 P를 만든 후 출력하는 프로그램을 작성하시오. 즉, 첫 번째 문자를 R번 반복하고, 두 번째 문자를 R번 반복하는 식으로 P를 만들면 된다. S에는 QR Code "alphanumeric" 문자만 들어있다.
QR Code "alphanumeric" 문자는 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ\$%*+-./: 이다.
입력
첫째 줄에 테스트 케이스의 개수 T(1 ≤ T ≤ 1,000)가 주어진다. 각 테스트 케이스는 반복 횟수 R(1 ≤ R ≤ 8), 문자열 S가 공백으로 구분되어 주어진다. S의 길이는 적어도 1이며, 20글자를 넘지 않는다.
출력
각 테스트 케이스에 대해 P를 출력한다.
예제 입력 1
2
3 ABC
5 /HTP
예제 출력 1
AAABBBCCC
/////HHHHHTTTTTPPPPP
"""
a = int(input())
for i in range(a):
b = list(map(str, input().split()))
b1 = int(b[0])
c = list(map(str, b[1]))
for j in range(len(c)):
print(c[j] * b1, end='')
print()
| [
"[email protected]"
] | |
72a5fad825c61fa86d5ef8ed291f73492c1c0c5f | df2cbe914f463ad050d7ed26194424afbe3a0a52 | /addons/website_sale_delivery/models/sale_order.py | 8348b35afb5921f18f137f6b7cf52a75f79e7da7 | [
"Apache-2.0"
] | permissive | SHIVJITH/Odoo_Machine_Test | 019ed339e995be980606a2d87a63312ddc18e706 | 310497a9872db7844b521e6dab5f7a9f61d365a4 | refs/heads/main | 2023-07-16T16:23:14.300656 | 2021-08-29T11:48:36 | 2021-08-29T11:48:36 | 401,010,175 | 0 | 0 | Apache-2.0 | 2021-08-29T10:13:58 | 2021-08-29T10:13:58 | null | UTF-8 | Python | false | false | 4,297 | py | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
from odoo import api, fields, models
_logger = logging.getLogger(__name__)
class SaleOrder(models.Model):
_inherit = 'sale.order'
amount_delivery = fields.Monetary(
compute='_compute_amount_delivery',
string='Delivery Amount',
help="The amount without tax.", store=True, tracking=True)
def _compute_website_order_line(self):
super(SaleOrder, self)._compute_website_order_line()
for order in self:
order.website_order_line = order.website_order_line.filtered(lambda l: not l.is_delivery)
@api.depends('order_line.price_unit', 'order_line.tax_id', 'order_line.discount', 'order_line.product_uom_qty')
def _compute_amount_delivery(self):
for order in self:
if self.env.user.has_group('account.group_show_line_subtotals_tax_excluded'):
order.amount_delivery = sum(order.order_line.filtered('is_delivery').mapped('price_subtotal'))
else:
order.amount_delivery = sum(order.order_line.filtered('is_delivery').mapped('price_total'))
def _check_carrier_quotation(self, force_carrier_id=None):
self.ensure_one()
DeliveryCarrier = self.env['delivery.carrier']
if self.only_services:
self.write({'carrier_id': None})
self._remove_delivery_line()
return True
else:
self = self.with_company(self.company_id)
# attempt to use partner's preferred carrier
if not force_carrier_id and self.partner_shipping_id.property_delivery_carrier_id:
force_carrier_id = self.partner_shipping_id.property_delivery_carrier_id.id
carrier = force_carrier_id and DeliveryCarrier.browse(force_carrier_id) or self.carrier_id
available_carriers = self._get_delivery_methods()
if carrier:
if carrier not in available_carriers:
carrier = DeliveryCarrier
else:
# set the forced carrier at the beginning of the list to be verfied first below
available_carriers -= carrier
available_carriers = carrier + available_carriers
if force_carrier_id or not carrier or carrier not in available_carriers:
for delivery in available_carriers:
verified_carrier = delivery._match_address(self.partner_shipping_id)
if verified_carrier:
carrier = delivery
break
self.write({'carrier_id': carrier.id})
self._remove_delivery_line()
if carrier:
res = carrier.rate_shipment(self)
if res.get('success'):
self.set_delivery_line(carrier, res['price'])
self.delivery_rating_success = True
self.delivery_message = res['warning_message']
else:
self.set_delivery_line(carrier, 0.0)
self.delivery_rating_success = False
self.delivery_message = res['error_message']
return bool(carrier)
def _get_delivery_methods(self):
address = self.partner_shipping_id
# searching on website_published will also search for available website (_search method on computed field)
return self.env['delivery.carrier'].sudo().search([('website_published', '=', True)]).available_carriers(address)
def _cart_update(self, product_id=None, line_id=None, add_qty=0, set_qty=0, **kwargs):
""" Override to update carrier quotation if quantity changed """
self._remove_delivery_line()
# When you update a cart, it is not enouf to remove the "delivery cost" line
# The carrier might also be invalid, eg: if you bought things that are too heavy
# -> this may cause a bug if you go to the checkout screen, choose a carrier,
# then update your cart (the cart becomes uneditable)
self.write({'carrier_id': False})
values = super(SaleOrder, self)._cart_update(product_id, line_id, add_qty, set_qty, **kwargs)
return values
| [
"[email protected]"
] | |
2f7faf8cb1215010b6f4f201ca1e2084f22cdd66 | 09de981a1b1591f85e41147a299885e60f7cea93 | /ivona_api/__init__.py | 12d2e343b511d0b5f83847e836f1f82e3f4ebef5 | [
"MIT",
"Python-2.0"
] | permissive | Pythonity/python-ivona-api | c95951509f7dda90812c2d325beb82ad34560d1f | 490a2e502d4aa769b9f41603eb5d5e5ebf1ea912 | refs/heads/master | 2020-12-18T16:00:19.157330 | 2017-06-15T21:02:07 | 2017-06-15T21:02:07 | 60,565,253 | 10 | 3 | null | 2017-01-20T23:15:10 | 2016-06-06T22:33:22 | Python | UTF-8 | Python | false | false | 416 | py | # -*- coding: utf-8 -*-
"""
ivona_api - Python library that helps you connect to Amazon's IVONA Speech
Cloud from within your Python project.
"""
from __future__ import absolute_import, unicode_literals
from ivona_api.ivona_api import IvonaAPI # noqa
__title__ = 'ivona_api'
__version__ = '0.3.0'
__author__ = 'Pythonity'
__license__ = 'MIT'
__url__ = 'https://github.com/Pythonity/python-ivona-api'
| [
"[email protected]"
] | |
f9ca580abef29cccb94846ae9d25defc6a9972cc | a8828f1b8d443e44be8521d63b212e33c6bd7cbb | /app/sprites/tile_map.py | 16b23e9fc61336084860a295778fd5c873d0a668 | [] | no_license | JordanSlaman/pygame-cc | ab8e156ee7b27296996efc212ab23a18de122325 | fe2ac5ceb2c8f2cdb37dd1efa657723b4a45d1b9 | refs/heads/main | 2023-03-13T05:49:21.705879 | 2021-03-07T01:22:39 | 2021-03-07T01:22:39 | 345,232,003 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,538 | py | from pathlib import Path
tile_sprite_filepath = Path('./sprites/tiles.png')
tile_size = 34
column = [0, 34, 68, 102, 136, 170, 204, 238]
row = [14, 48, 82, 116, 150, 184, 218, 252, 286, 320, 354, 388, 422, 456, 490, 524]
tiles = {
"player_down": (column[0], row[0]),
"player_left": (column[1], row[0]),
"player_up": (column[2], row[0]),
"player_right": (column[3], row[0]),
"player_down_masked": (column[4], row[0]),
"player_left_masked": (column[5], row[0]),
"player_up_masked": (column[6], row[0]),
"player_right_masked": (column[7], row[0]),
"player_down_water": (column[0], row[1]),
"player_left_water": (column[1], row[1]),
"player_up_water": (column[2], row[1]),
"player_right_water": (column[3], row[1]),
"player_drowned": (column[4], row[1]),
"player_burned": (column[5], row[1]),
# "unused": (column[6], row[1]),
# "unused": (column[7], row[1]),
"key_red": (column[0], row[2]),
"key_blue": (column[1], row[2]),
"key_yellow": (column[2], row[2]),
"key_green": (column[3], row[2]),
"key_red_masked": (column[4], row[2]),
"key_blue_masked": (column[5], row[2]),
"key_green_masked": (column[6], row[2]),
"key_yellow_masked": (column[7], row[2]),
"boots_skate": (column[0], row[3]),
"boots_suction": (column[1], row[3]),
"boots_fireproof": (column[2], row[3]),
"boots_flipper": (column[3], row[3]),
"boots_skate_masked": (column[4], row[3]),
"boots_suction_masked": (column[5], row[3]),
"boots_fireproof_masked": (column[6], row[3]),
"boots_flipper_masked": (column[7], row[3]),
"bug_down": (column[0], row[4]),
"bug_left": (column[1], row[4]),
"bug_up": (column[2], row[4]),
"bug_right": (column[3], row[4]),
"bug_down_masked": (column[4], row[4]),
"bug_left_masked": (column[5], row[4]),
"bug_up_masked": (column[6], row[4]),
"bug_right_masked": (column[7], row[4]),
"tank_down": (column[0], row[5]),
"tank_left": (column[1], row[5]),
"tank_up": (column[2], row[5]),
"tank_right": (column[3], row[5]),
"tank_down_masked": (column[4], row[5]),
"tank_left_masked": (column[5], row[5]),
"tank_up_masked": (column[6], row[5]),
"tank_right_masked": (column[7], row[5]),
# ball row
"ship_down": (column[0], row[7]),
"ship_left": (column[1], row[7]),
"ship_up": (column[2], row[7]),
"ship_right": (column[3], row[7]),
"ship_down_masked": (column[4], row[7]),
"ship_left_masked": (column[5], row[7]),
"ship_up_masked": (column[6], row[7]),
"ship_right_masked": (column[7], row[7]),
"alien_down": (column[0], row[8]),
"alien_left": (column[1], row[8]),
"alien_up": (column[2], row[8]),
"alien_right": (column[3], row[8]),
"alien_down_masked": (column[4], row[8]),
"alien_left_masked": (column[5], row[8]),
"alien_up_masked": (column[6], row[8]),
"alien_right_masked": (column[7], row[8]),
# amoeba row
"tile": (column[0], row[10]),
"chip": (column[1], row[10]),
"chip_gate": (column[2], row[10]),
# corner wall things
"button_red": (column[0], row[11]),
"button_blue": (column[1], row[11]),
"button_green": (column[2], row[11]),
"button_grey": (column[3], row[11]),
"info": (column[4], row[11]),
# emitters? Big buttons?
"wall": (column[0], row[12]),
# "wall_blue": (column[1], row[12]),
# "wall_pretty": (column[3], row[12]),
"door_red": (column[4], row[12]),
"door_blue": (column[5], row[12]),
"door_yellow": (column[6], row[12]),
"door_green": (column[7], row[12]),
"water": (column[0], row[13]),
"ice": (column[1], row[13]),
"box_pushable": (column[2], row[12]),
"box_submerged": (column[2], row[13]),
# "static": (column[3], row[13]),
"ice_top_left": (column[4], row[13]),
"ice_top_right": (column[5], row[13]),
"ice_bottom_right": (column[6], row[13]),
"ice_bottom_left": (column[7], row[13]),
"slide_down": (column[0], row[14]),
"slide_left": (column[1], row[14]),
"slide_up": (column[2], row[14]),
"slide_right": (column[3], row[14]),
"slide_spiral": (column[4], row[14]),
"fire": (column[5], row[14]),
# "bomb": (column[6], row[14]),
# "theif": (column[7], row[14]),
# "magic_wall": (column[0], row[15]),
# "magic_tile": (column[1], row[15]),
"exit": (column[2], row[15]),
# "unused": (column[3], row[15]),
# "unused": (column[4], row[15]),
# "blue": (column[7], row[15]),
}
| [
"[email protected]"
] | |
73873758c4fae5b30bbd029eb70045763949697f | bc758176773f988876d408880b2288de0a1a1532 | /manage.py | 2786e300b207f85a923c4a6d888b9f6294bb6853 | [] | no_license | 15101538237ren/traffic_prediction | 171e8ed66d5d0462e18d635e4380cbac566c5123 | 5e463a098cd4f6439cd5fcf6d7ae6bb36a6536de | refs/heads/master | 2021-03-19T16:35:13.074763 | 2018-04-11T03:31:01 | 2018-04-11T03:31:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 261 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "traffic_prediction.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| [
"[email protected]"
] | |
fd33cdef41235e78f40026197c9486d4bb1b798e | 0fccee4c738449f5e0a8f52ea5acabf51db0e910 | /genfragments/ThirteenTeV/PythiaChargedResonance_WG/PythiaChargedResonance_WGToLNuG_M2600_width5_13TeV-pythia8_cff.py | 83509f85ddff909d3050b28a383fc39e126a530a | [] | no_license | cms-sw/genproductions | f308ffaf3586c19b29853db40e6d662e937940ff | dd3d3a3826343d4f75ec36b4662b6e9ff1f270f4 | refs/heads/master | 2023-08-30T17:26:02.581596 | 2023-08-29T14:53:43 | 2023-08-29T14:53:43 | 11,424,867 | 69 | 987 | null | 2023-09-14T12:41:28 | 2013-07-15T14:18:33 | Python | UTF-8 | Python | false | false | 1,133 | py | import FWCore.ParameterSet.Config as cms
from Configuration.Generator.Pythia8CommonSettings_cfi import *
from Configuration.Generator.Pythia8CUEP8M1Settings_cfi import *
generator = cms.EDFilter("Pythia8GeneratorFilter",
comEnergy = cms.double(13000.0),
filterEfficiency = cms.untracked.double(1),
maxEventsToPrint = cms.untracked.int32(0),
pythiaHepMCVerbosity = cms.untracked.bool(False),
pythiaPylistVerbosity = cms.untracked.int32(0),
PythiaParameters = cms.PSet(
pythia8CommonSettingsBlock,
pythia8CUEP8M1SettingsBlock,
processParameters = cms.vstring(
"37:onMode = off",
"37:addChannel = 1 0.00001 101 24 22",
"37:onIfMatch = 24 22",
"37:m0 = 2600",
"37:doForceWidth = on",
"37:mWidth = 130.000000",
"24:onMode = off",
"24:onIfAny = 11 13 15",
"Higgs:useBSM = on",
"HiggsBSM:ffbar2H+- = on"),
parameterSets = cms.vstring(
"pythia8CommonSettings",
"pythia8CUEP8M1Settings",
"processParameters")
)
)
| [
"shubhanshu.chauhan.cern.ch"
] | shubhanshu.chauhan.cern.ch |
4d67763457adce3ac314868828b0af8e54f996a2 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/6/nP2.py | 861daa82b45e4c0c46c0e7caad157a507e5e6890 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'nP2':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
19ff523329804f16e2b6d3094c39cb90e0bf0d56 | 9ed4d46aedd4d4acadb48d610e940594b5b7b3fd | /electronics/resistor_equivalence.py | 55e7f2d6b5d2bd87baae998228830e13073f1456 | [
"MIT"
] | permissive | TheAlgorithms/Python | 7596a0e236ed12a61f9db19a7ea68309779cc85b | 421ace81edb0d9af3a173f4ca7e66cc900078c1d | refs/heads/master | 2023-09-01T17:32:20.190949 | 2023-08-29T13:18:10 | 2023-08-29T13:18:10 | 63,476,337 | 184,217 | 48,615 | MIT | 2023-09-14T02:05:29 | 2016-07-16T09:44:01 | Python | UTF-8 | Python | false | false | 1,593 | py | # https://byjus.com/equivalent-resistance-formula/
from __future__ import annotations
def resistor_parallel(resistors: list[float]) -> float:
"""
Req = 1/ (1/R1 + 1/R2 + ... + 1/Rn)
>>> resistor_parallel([3.21389, 2, 3])
0.8737571620498019
>>> resistor_parallel([3.21389, 2, -3])
Traceback (most recent call last):
...
ValueError: Resistor at index 2 has a negative or zero value!
>>> resistor_parallel([3.21389, 2, 0.000])
Traceback (most recent call last):
...
ValueError: Resistor at index 2 has a negative or zero value!
"""
first_sum = 0.00
index = 0
for resistor in resistors:
if resistor <= 0:
msg = f"Resistor at index {index} has a negative or zero value!"
raise ValueError(msg)
first_sum += 1 / float(resistor)
index += 1
return 1 / first_sum
def resistor_series(resistors: list[float]) -> float:
"""
Req = R1 + R2 + ... + Rn
Calculate the equivalent resistance for any number of resistors in parallel.
>>> resistor_series([3.21389, 2, 3])
8.21389
>>> resistor_series([3.21389, 2, -3])
Traceback (most recent call last):
...
ValueError: Resistor at index 2 has a negative value!
"""
sum_r = 0.00
index = 0
for resistor in resistors:
sum_r += resistor
if resistor < 0:
msg = f"Resistor at index {index} has a negative value!"
raise ValueError(msg)
index += 1
return sum_r
if __name__ == "__main__":
import doctest
doctest.testmod()
| [
"[email protected]"
] | |
b8f2f746520ef84dcec1b5ea9dbfb14196e27a81 | 37efda4646f478b66674e384e1bc139e7874d972 | /practice/String.py | ee6e9b1a4544ded7ff46560e1f056471c96e2855 | [] | no_license | siberian122/kyoupuro | 02c1c40f7c09ff0c07a1d50b727f860ad269d8b1 | 8bf5e5b354d82f44f54c80f1fc014c9519de3ca4 | refs/heads/master | 2023-04-04T02:45:29.445107 | 2021-04-20T07:37:47 | 2021-04-20T07:37:47 | 299,248,378 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 152 | py | s=input()
t=input()
Flag=False
for _ in range(len(s)):
s=s[-1]+s[:-1]
if s==t:
Flag=True
if Flag:
print('Yes')
else:
print('No') | [
"[email protected]"
] | |
a285d314cf791de350574d787f98fcbe0f98517f | 2487f84e74c068e727783932be59aa79e2fd3f3c | /tutorial/snippets/migrations/0001_initial.py | 8342bc81abf76fa893db88b1ca3383d6a83e3f94 | [] | no_license | jesusmaherrera/my-rest-framework-tutorial | 350a9a34fb02c2f1160cac33e6341140be6c70f9 | 066b416805083d27529d6fe2c6807dc3685f0dd4 | refs/heads/master | 2016-09-06T18:47:00.684356 | 2015-04-11T04:38:15 | 2015-04-11T04:38:15 | 33,751,142 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,237 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Snippet',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('created', models.DateTimeField(auto_now_add=True)),
('title', models.CharField(default=b'', max_length=100, blank=True)),
('code', models.TextField()),
('highlighted', models.TextField()),
('linenos', models.BooleanField(default=False)),
('language', models.CharField(default=b'python', max_length=100, choices=[(b'abap', b'ABAP'), (b'ada', b'Ada'), (b'agda', b'Agda'), (b'ahk', b'autohotkey'), (b'alloy', b'Alloy'), (b'antlr', b'ANTLR'), (b'antlr-as', b'ANTLR With ActionScript Target'), (b'antlr-cpp', b'ANTLR With CPP Target'), (b'antlr-csharp', b'ANTLR With C# Target'), (b'antlr-java', b'ANTLR With Java Target'), (b'antlr-objc', b'ANTLR With ObjectiveC Target'), (b'antlr-perl', b'ANTLR With Perl Target'), (b'antlr-python', b'ANTLR With Python Target'), (b'antlr-ruby', b'ANTLR With Ruby Target'), (b'apacheconf', b'ApacheConf'), (b'apl', b'APL'), (b'applescript', b'AppleScript'), (b'as', b'ActionScript'), (b'as3', b'ActionScript 3'), (b'aspectj', b'AspectJ'), (b'aspx-cs', b'aspx-cs'), (b'aspx-vb', b'aspx-vb'), (b'asy', b'Asymptote'), (b'at', b'AmbientTalk'), (b'autoit', b'AutoIt'), (b'awk', b'Awk'), (b'basemake', b'Base Makefile'), (b'bash', b'Bash'), (b'bat', b'Batchfile'), (b'bbcode', b'BBCode'), (b'befunge', b'Befunge'), (b'blitzbasic', b'BlitzBasic'), (b'blitzmax', b'BlitzMax'), (b'boo', b'Boo'), (b'brainfuck', b'Brainfuck'), (b'bro', b'Bro'), (b'bugs', b'BUGS'), (b'c', b'C'), (b'c-objdump', b'c-objdump'), (b'ca65', b'ca65 assembler'), (b'cbmbas', b'CBM BASIC V2'), (b'ceylon', b'Ceylon'), (b'cfc', b'Coldfusion CFC'), (b'cfengine3', b'CFEngine3'), (b'cfm', b'Coldfusion HTML'), (b'cfs', b'cfstatement'), (b'chai', b'ChaiScript'), (b'chapel', b'Chapel'), (b'cheetah', b'Cheetah'), (b'cirru', b'Cirru'), (b'clay', b'Clay'), (b'clojure', b'Clojure'), (b'clojurescript', b'ClojureScript'), (b'cmake', b'CMake'), (b'cobol', b'COBOL'), (b'cobolfree', b'COBOLFree'), (b'coffee-script', b'CoffeeScript'), (b'common-lisp', b'Common Lisp'), (b'console', b'Bash Session'), (b'control', b'Debian Control file'), (b'coq', b'Coq'), (b'cpp', b'C++'), (b'cpp-objdump', b'cpp-objdump'), (b'croc', b'Croc'), (b'cryptol', b'Cryptol'), (b'csharp', b'C#'), (b'css', b'CSS'), (b'css+django', b'CSS+Django/Jinja'), (b'css+erb', b'CSS+Ruby'), (b'css+genshitext', b'CSS+Genshi Text'), (b'css+lasso', b'CSS+Lasso'), (b'css+mako', b'CSS+Mako'), (b'css+mozpreproc', b'CSS+mozpreproc'), (b'css+myghty', b'CSS+Myghty'), (b'css+php', b'CSS+PHP'), (b'css+smarty', b'CSS+Smarty'), (b'cucumber', b'Gherkin'), (b'cuda', b'CUDA'), (b'cypher', b'Cypher'), (b'cython', b'Cython'), (b'd', b'D'), (b'd-objdump', b'd-objdump'), (b'dart', b'Dart'), (b'delphi', b'Delphi'), (b'dg', b'dg'), (b'diff', b'Diff'), (b'django', b'Django/Jinja'), (b'docker', b'Docker'), (b'dpatch', b'Darcs Patch'), (b'dtd', b'DTD'), (b'duel', b'Duel'), (b'dylan', b'Dylan'), (b'dylan-console', b'Dylan session'), (b'dylan-lid', b'DylanLID'), (b'ebnf', b'EBNF'), (b'ec', b'eC'), (b'ecl', b'ECL'), (b'eiffel', b'Eiffel'), (b'elixir', b'Elixir'), (b'erb', b'ERB'), (b'erl', b'Erlang erl session'), (b'erlang', b'Erlang'), (b'evoque', b'Evoque'), (b'factor', b'Factor'), (b'fan', b'Fantom'), (b'fancy', b'Fancy'), (b'felix', b'Felix'), (b'fortran', b'Fortran'), (b'foxpro', b'FoxPro'), (b'fsharp', b'FSharp'), (b'gap', b'GAP'), (b'gas', b'GAS'), (b'genshi', b'Genshi'), (b'genshitext', b'Genshi Text'), (b'glsl', b'GLSL'), (b'gnuplot', b'Gnuplot'), (b'go', b'Go'), (b'golo', b'Golo'), (b'gooddata-cl', b'GoodData-CL'), (b'gosu', b'Gosu'), (b'groff', b'Groff'), (b'groovy', b'Groovy'), (b'gst', b'Gosu Template'), (b'haml', b'Haml'), (b'handlebars', b'Handlebars'), (b'haskell', b'Haskell'), (b'haxeml', b'Hxml'), (b'html', b'HTML'), (b'html+cheetah', b'HTML+Cheetah'), (b'html+django', b'HTML+Django/Jinja'), (b'html+evoque', b'HTML+Evoque'), (b'html+genshi', b'HTML+Genshi'), (b'html+handlebars', b'HTML+Handlebars'), (b'html+lasso', b'HTML+Lasso'), (b'html+mako', b'HTML+Mako'), (b'html+myghty', b'HTML+Myghty'), (b'html+php', b'HTML+PHP'), (b'html+smarty', b'HTML+Smarty'), (b'html+twig', b'HTML+Twig'), (b'html+velocity', b'HTML+Velocity'), (b'http', b'HTTP'), (b'hx', b'Haxe'), (b'hybris', b'Hybris'), (b'hylang', b'Hy'), (b'i6t', b'Inform 6 template'), (b'idl', b'IDL'), (b'idris', b'Idris'), (b'iex', b'Elixir iex session'), (b'igor', b'Igor'), (b'inform6', b'Inform 6'), (b'inform7', b'Inform 7'), (b'ini', b'INI'), (b'io', b'Io'), (b'ioke', b'Ioke'), (b'irc', b'IRC logs'), (b'isabelle', b'Isabelle'), (b'jade', b'Jade'), (b'jags', b'JAGS'), (b'jasmin', b'Jasmin'), (b'java', b'Java'), (b'javascript+mozpreproc', b'Javascript+mozpreproc'), (b'jlcon', b'Julia console'), (b'js', b'JavaScript'), (b'js+cheetah', b'JavaScript+Cheetah'), (b'js+django', b'JavaScript+Django/Jinja'), (b'js+erb', b'JavaScript+Ruby'), (b'js+genshitext', b'JavaScript+Genshi Text'), (b'js+lasso', b'JavaScript+Lasso'), (b'js+mako', b'JavaScript+Mako'), (b'js+myghty', b'JavaScript+Myghty'), (b'js+php', b'JavaScript+PHP'), (b'js+smarty', b'JavaScript+Smarty'), (b'json', b'JSON'), (b'jsonld', b'JSON-LD'), (b'jsp', b'Java Server Page'), (b'julia', b'Julia'), (b'kal', b'Kal'), (b'kconfig', b'Kconfig'), (b'koka', b'Koka'), (b'kotlin', b'Kotlin'), (b'lagda', b'Literate Agda'), (b'lasso', b'Lasso'), (b'lcry', b'Literate Cryptol'), (b'lean', b'Lean'), (b'lhs', b'Literate Haskell'), (b'lidr', b'Literate Idris'), (b'lighty', b'Lighttpd configuration file'), (b'limbo', b'Limbo'), (b'liquid', b'liquid'), (b'live-script', b'LiveScript'), (b'llvm', b'LLVM'), (b'logos', b'Logos'), (b'logtalk', b'Logtalk'), (b'lsl', b'LSL'), (b'lua', b'Lua'), (b'make', b'Makefile'), (b'mako', b'Mako'), (b'maql', b'MAQL'), (b'mask', b'Mask'), (b'mason', b'Mason'), (b'mathematica', b'Mathematica'), (b'matlab', b'Matlab'), (b'matlabsession', b'Matlab session'), (b'minid', b'MiniD'), (b'modelica', b'Modelica'), (b'modula2', b'Modula-2'), (b'monkey', b'Monkey'), (b'moocode', b'MOOCode'), (b'moon', b'MoonScript'), (b'mozhashpreproc', b'mozhashpreproc'), (b'mozpercentpreproc', b'mozpercentpreproc'), (b'mql', b'MQL'), (b'mscgen', b'Mscgen'), (b'mupad', b'MuPAD'), (b'mxml', b'MXML'), (b'myghty', b'Myghty'), (b'mysql', b'MySQL'), (b'nasm', b'NASM'), (b'nemerle', b'Nemerle'), (b'nesc', b'nesC'), (b'newlisp', b'NewLisp'), (b'newspeak', b'Newspeak'), (b'nginx', b'Nginx configuration file'), (b'nimrod', b'Nimrod'), (b'nit', b'Nit'), (b'nixos', b'Nix'), (b'nsis', b'NSIS'), (b'numpy', b'NumPy'), (b'objdump', b'objdump'), (b'objdump-nasm', b'objdump-nasm'), (b'objective-c', b'Objective-C'), (b'objective-c++', b'Objective-C++'), (b'objective-j', b'Objective-J'), (b'ocaml', b'OCaml'), (b'octave', b'Octave'), (b'ooc', b'Ooc'), (b'opa', b'Opa'), (b'openedge', b'OpenEdge ABL'), (b'pan', b'Pan'), (b'pawn', b'Pawn'), (b'perl', b'Perl'), (b'perl6', b'Perl6'), (b'php', b'PHP'), (b'pig', b'Pig'), (b'pike', b'Pike'), (b'plpgsql', b'PL/pgSQL'), (b'postgresql', b'PostgreSQL SQL dialect'), (b'postscript', b'PostScript'), (b'pot', b'Gettext Catalog'), (b'pov', b'POVRay'), (b'powershell', b'PowerShell'), (b'prolog', b'Prolog'), (b'properties', b'Properties'), (b'protobuf', b'Protocol Buffer'), (b'psql', b'PostgreSQL console (psql)'), (b'puppet', b'Puppet'), (b'py3tb', b'Python 3.0 Traceback'), (b'pycon', b'Python console session'), (b'pypylog', b'PyPy Log'), (b'pytb', b'Python Traceback'), (b'python', b'Python'), (b'python3', b'Python 3'), (b'qbasic', b'QBasic'), (b'qml', b'QML'), (b'racket', b'Racket'), (b'ragel', b'Ragel'), (b'ragel-c', b'Ragel in C Host'), (b'ragel-cpp', b'Ragel in CPP Host'), (b'ragel-d', b'Ragel in D Host'), (b'ragel-em', b'Embedded Ragel'), (b'ragel-java', b'Ragel in Java Host'), (b'ragel-objc', b'Ragel in Objective C Host'), (b'ragel-ruby', b'Ragel in Ruby Host'), (b'raw', b'Raw token data'), (b'rb', b'Ruby'), (b'rbcon', b'Ruby irb session'), (b'rconsole', b'RConsole'), (b'rd', b'Rd'), (b'rebol', b'REBOL'), (b'red', b'Red'), (b'redcode', b'Redcode'), (b'registry', b'reg'), (b'resource', b'ResourceBundle'), (b'rexx', b'Rexx'), (b'rhtml', b'RHTML'), (b'robotframework', b'RobotFramework'), (b'rql', b'RQL'), (b'rsl', b'RSL'), (b'rst', b'reStructuredText'), (b'rust', b'Rust'), (b'sass', b'Sass'), (b'scala', b'Scala'), (b'scaml', b'Scaml'), (b'scheme', b'Scheme'), (b'scilab', b'Scilab'), (b'scss', b'SCSS'), (b'shell-session', b'Shell Session'), (b'slim', b'Slim'), (b'smali', b'Smali'), (b'smalltalk', b'Smalltalk'), (b'smarty', b'Smarty'), (b'sml', b'Standard ML'), (b'snobol', b'Snobol'), (b'sourceslist', b'Debian Sourcelist'), (b'sp', b'SourcePawn'), (b'sparql', b'SPARQL'), (b'spec', b'RPMSpec'), (b'splus', b'S'), (b'sql', b'SQL'), (b'sqlite3', b'sqlite3con'), (b'squidconf', b'SquidConf'), (b'ssp', b'Scalate Server Page'), (b'stan', b'Stan'), (b'swift', b'Swift'), (b'swig', b'SWIG'), (b'systemverilog', b'systemverilog'), (b'tads3', b'TADS 3'), (b'tcl', b'Tcl'), (b'tcsh', b'Tcsh'), (b'tea', b'Tea'), (b'tex', b'TeX'), (b'text', b'Text only'), (b'todotxt', b'Todotxt'), (b'trac-wiki', b'MoinMoin/Trac Wiki markup'), (b'treetop', b'Treetop'), (b'ts', b'TypeScript'), (b'twig', b'Twig'), (b'urbiscript', b'UrbiScript'), (b'vala', b'Vala'), (b'vb.net', b'VB.net'), (b'vctreestatus', b'VCTreeStatus'), (b'velocity', b'Velocity'), (b'verilog', b'verilog'), (b'vgl', b'VGL'), (b'vhdl', b'vhdl'), (b'vim', b'VimL'), (b'xml', b'XML'), (b'xml+cheetah', b'XML+Cheetah'), (b'xml+django', b'XML+Django/Jinja'), (b'xml+erb', b'XML+Ruby'), (b'xml+evoque', b'XML+Evoque'), (b'xml+lasso', b'XML+Lasso'), (b'xml+mako', b'XML+Mako'), (b'xml+myghty', b'XML+Myghty'), (b'xml+php', b'XML+PHP'), (b'xml+smarty', b'XML+Smarty'), (b'xml+velocity', b'XML+Velocity'), (b'xquery', b'XQuery'), (b'xslt', b'XSLT'), (b'xtend', b'Xtend'), (b'xul+mozpreproc', b'XUL+mozpreproc'), (b'yaml', b'YAML'), (b'yaml+jinja', b'YAML+Jinja'), (b'zephir', b'Zephir')])),
('style', models.CharField(default=b'friendly', max_length=100, choices=[(b'autumn', b'autumn'), (b'borland', b'borland'), (b'bw', b'bw'), (b'colorful', b'colorful'), (b'default', b'default'), (b'emacs', b'emacs'), (b'friendly', b'friendly'), (b'fruity', b'fruity'), (b'igor', b'igor'), (b'manni', b'manni'), (b'monokai', b'monokai'), (b'murphy', b'murphy'), (b'native', b'native'), (b'paraiso-dark', b'paraiso-dark'), (b'paraiso-light', b'paraiso-light'), (b'pastie', b'pastie'), (b'perldoc', b'perldoc'), (b'rrt', b'rrt'), (b'tango', b'tango'), (b'trac', b'trac'), (b'vim', b'vim'), (b'vs', b'vs'), (b'xcode', b'xcode')])),
('owner', models.ForeignKey(related_name='snippets', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('created',),
},
),
]
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.