blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0eaecf4f307a5ec0b780c2f317d6bc5189a441a3 | 1cc54e191b9d6e4ea2a469b92da0f3ac8ccd84b0 | /tasks/_iblrig_tasks_ephysChoiceWorld/session_params.py | 766d8dc8631798a874530df8264e1bcc3604d8ec | [
"MIT"
] | permissive | alejandropan/iblrig | 027c090dbe54b6ef2cbbf22c16ad60eb040ee949 | d8e746ccc52c2ad325404077ad2403e165e94d0c | refs/heads/master | 2020-04-28T11:45:36.182150 | 2019-06-12T01:38:06 | 2019-06-12T01:38:06 | 175,253,494 | 0 | 0 | MIT | 2019-05-28T01:34:28 | 2019-03-12T16:25:04 | Python | UTF-8 | Python | false | false | 13,957 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: Niccolò Bonacchi
# @Date: 2018-02-02 17:19:09
import os
import sys
from sys import platform
from pathlib import Path
import logging
from pythonosc import udp_client
from ibllib.graphic import numinput, multi_input
sys.path.append(str(Path(__file__).parent.parent)) # noqa
sys.path.append(str(Path(__file__).parent.parent.parent.parent)) # noqa
import adaptive
import ambient_sensor
import bonsai
import iotasks
import misc
import sound
from path_helper import SessionPathCreator
from rotary_encoder import MyRotaryEncoder
log = logging.getLogger('iblrig')
class SessionParamHandler(object):
"""Session object imports user_settings and task_settings
will and calculates other secondary session parameters,
runs Bonsai and saves all params in a settings file.json"""
def __init__(self, task_settings, user_settings, debug=False, fmake=True):
self.DEBUG = debug
make = True
self.IBLRIG_FOLDER = 'C:\\iblrig'
self.IBLRIG_DATA_FOLDER = None # ..\\iblrig_data if None
# =====================================================================
# IMPORT task_settings, user_settings, and SessionPathCreator params
# =====================================================================
ts = {i: task_settings.__dict__[i]
for i in [x for x in dir(task_settings) if '__' not in x]}
self.__dict__.update(ts)
us = {i: user_settings.__dict__[i]
for i in [x for x in dir(user_settings) if '__' not in x]}
self.__dict__.update(us)
self = iotasks.deserialize_pybpod_user_settings(self)
spc = SessionPathCreator(self.IBLRIG_FOLDER, self.IBLRIG_DATA_FOLDER,
self.PYBPOD_SUBJECTS[0],
protocol=self.PYBPOD_PROTOCOL,
board=self.PYBPOD_BOARD, make=make)
self.__dict__.update(spc.__dict__)
# =====================================================================
# SETTINGS
# =====================================================================
self.RECORD_SOUND = True
self.RECORD_AMBIENT_SENSOR_DATA = True
self.RECORD_VIDEO = True
self.OPEN_CAMERA_VIEW = True # Always True if RECORD_VIDEO is True
self.NTRIALS = 2000 # Number of trials for the current session
self.USE_AUTOMATIC_STOPPING_CRITERIONS = True # Weather to check for the Automatic stopping criterions or not # noqa
self.REPEAT_ON_ERROR = False # not used
self.INTERACTIVE_DELAY = 0.0
self.RESPONSE_WINDOW = 60
self.ITI_CORRECT = 1
self.ITI_ERROR = 2
self.CONTRAST_SET = [1., 0.25, 0.125, 0.0625, 0.] # Full contrast set
self.CONTRAST_SET_PROBABILITY_TYPE = 'biased'
self.STIM_FREQ = 0.10 # Probably constant - NOT IN USE
self.STIM_ANGLE = 0. # Vertical orientation of Gabor patch
self.STIM_SIGMA = 7. # (azimuth_degree) Size of Gabor patch
self.STIM_GAIN = 4. # (azimuth_degree/mm) Gain of the RE
# =====================================================================
# SUBJECT
# =====================================================================
self.SUBJECT_WEIGHT = self.get_subject_weight()
self.POOP_COUNT = True
# =====================================================================
# OSC CLIENT
# =====================================================================
self.OSC_CLIENT_PORT = 7110
self.OSC_CLIENT_IP = '127.0.0.1'
self.OSC_CLIENT = udp_client.SimpleUDPClient(self.OSC_CLIENT_IP,
self.OSC_CLIENT_PORT)
# =====================================================================
# PREVIOUS DATA FILES
# =====================================================================
self.LAST_TRIAL_DATA = iotasks.load_data(self.PREVIOUS_SESSION_PATH)
self.LAST_SETTINGS_DATA = iotasks.load_settings(
self.PREVIOUS_SESSION_PATH)
self.SESSION_ORDER = []
self.SESSION_IDX = None
self = iotasks.load_session_order_and_idx(self)
# Load from file
self.POSITIONS = None
self.CONTRASTS = None
self.QUIESCENT_PERIOD = None
self.STIM_PHASE = None
self.LEN_BLOCKS = None
self = iotasks.load_session_pcqs(self)
# =====================================================================
# ADAPTIVE STUFF
# =====================================================================
self.AUTOMATIC_CALIBRATION = True
self.CALIBRATION_VALUE = 0.067
self.REWARD_AMOUNT = 3.
self.REWARD_TYPE = 'Water 10% Sucrose'
self.CALIB_FUNC = adaptive.init_calib_func(self)
self.CALIB_FUNC_RANGE = adaptive.init_calib_func_range(self)
self.REWARD_VALVE_TIME = adaptive.init_reward_valve_time(self)
# =====================================================================
# ROTARY ENCODER
# =====================================================================
self.STIM_POSITIONS = [-35, 35] # All possible positions (deg)
self.QUIESCENCE_THRESHOLDS = [-2, 2] # degree
self.ALL_THRESHOLDS = (self.STIM_POSITIONS +
self.QUIESCENCE_THRESHOLDS)
self.ROTARY_ENCODER = MyRotaryEncoder(self.ALL_THRESHOLDS,
self.STIM_GAIN,
self.COM['ROTARY_ENCODER'])
# =====================================================================
# SOUNDS
# =====================================================================
self.SOFT_SOUND = None
self.SOUND_SAMPLE_FREQ = sound.sound_sample_freq(self.SOFT_SOUND)
self.SOUND_BOARD_BPOD_PORT = 'Serial3'
self.WHITE_NOISE_DURATION = float(0.5)
self.WHITE_NOISE_AMPLITUDE = float(0.05)
self.GO_TONE_DURATION = float(0.1)
self.GO_TONE_FREQUENCY = int(5000)
self.GO_TONE_AMPLITUDE = float(0.1)
self.SD = sound.configure_sounddevice(
output=self.SOFT_SOUND, samplerate=self.SOUND_SAMPLE_FREQ)
# Create sounds and output actions of state machine
self.GO_TONE = sound.make_sound(
rate=self.SOUND_SAMPLE_FREQ, frequency=self.GO_TONE_FREQUENCY,
duration=self.GO_TONE_DURATION, amplitude=self.GO_TONE_AMPLITUDE,
fade=0.01, chans='stereo')
self.WHITE_NOISE = sound.make_sound(
rate=self.SOUND_SAMPLE_FREQ, frequency=-1,
duration=self.WHITE_NOISE_DURATION,
amplitude=self.WHITE_NOISE_AMPLITUDE, fade=0.01, chans='stereo')
self.GO_TONE_IDX = 2
self.WHITE_NOISE_IDX = 3
sound.configure_sound_card(
sounds=[self.GO_TONE, self.WHITE_NOISE],
indexes=[self.GO_TONE_IDX, self.WHITE_NOISE_IDX],
sample_rate=self.SOUND_SAMPLE_FREQ)
# =====================================================================
# VISUAL STIM
# =====================================================================
self.SYNC_SQUARE_X = 0.95
self.SYNC_SQUARE_Y = 0.17
self.USE_VISUAL_STIMULUS = True # Run the visual stim in bonsai
self.BONSAI_EDITOR = False # Open the Bonsai editor of visual stim
bonsai.start_visual_stim(self)
self.get_recording_site_data()
# =====================================================================
# SAVE SETTINGS FILE AND TASK CODE
# =====================================================================
if not self.DEBUG:
iotasks.save_session_settings(self)
iotasks.copy_task_code(self)
iotasks.save_task_code(self)
self.bpod_lights(0)
self.display_logs()
# =========================================================================
# METHODS
# =========================================================================
def get_recording_site_data(self):
title = 'Recording site'
fields = ['X (float):', 'Y (float):', 'Z (flaot):', 'D (float):',
'Angle (10 or 20):', 'Origin (bregma or lambda):']
defaults = [None, None, None, None, '10', 'bregma']
types = [float, float, float, float, int, str]
userdata = multi_input(
title=title, add_fields=fields, defaults=defaults)
try:
out = [t(x) for x, t in zip(userdata, types)]
self.REC_SITE = {'xyzd':out[:4], 'angle': out[4], 'origin': out[5]}
return out
except Exception:
log.warning(
f"One or more inputs are of the wrong type. Expected {types}")
return self.get_recording_site_data()
def save_ambient_sensor_reading(self, bpod_instance):
return ambient_sensor.get_reading(bpod_instance,
save_to=self.SESSION_RAW_DATA_FOLDER)
def get_subject_weight(self):
return numinput(
"Subject weighing (gr)", f"{self.PYBPOD_SUBJECTS[0]} weight (gr):",
nullable=False)
def bpod_lights(self, command: int):
fpath = Path(self.IBLRIG_PARAMS_FOLDER) / 'bpod_lights.py'
os.system(f"python {fpath} {command}")
def get_port_events(self, events, name=''):
return misc.get_port_events(events, name=name)
# =========================================================================
# SOUND INTERFACE FOR STATE MACHINE
# =========================================================================
def play_tone(self):
self.SD.play(self.GO_TONE, self.SOUND_SAMPLE_FREQ)
def play_noise(self):
self.SD.play(self.WHITE_NOISE, self.SOUND_SAMPLE_FREQ)
def stop_sound(self):
self.SD.stop()
# =========================================================================
# JSON ENCODER PATCHES
# =========================================================================
def reprJSON(self):
def remove_from_dict(sx):
if "weighings" in sx.keys():
sx["weighings"] = None
if "water_administration" in sx.keys():
sx["water_administration"] = None
return sx
d = self.__dict__.copy()
d['GO_TONE'] = 'go_tone(freq={}, dur={}, amp={})'.format(
self.GO_TONE_FREQUENCY, self.GO_TONE_DURATION,
self.GO_TONE_AMPLITUDE)
d['WHITE_NOISE'] = 'white_noise(freq=-1, dur={}, amp={})'.format(
self.WHITE_NOISE_DURATION, self.WHITE_NOISE_AMPLITUDE)
d['SD'] = str(d['SD'])
d['OSC_CLIENT'] = str(d['OSC_CLIENT'])
d['SESSION_DATETIME'] = self.SESSION_DATETIME.isoformat()
d['CALIB_FUNC'] = str(d['CALIB_FUNC'])
d['CALIB_FUNC_RANGE'] = str(d['CALIB_FUNC_RANGE'])
if isinstance(d['PYBPOD_SUBJECT_EXTRA'], list):
sub = []
for sx in d['PYBPOD_SUBJECT_EXTRA']:
sub.append(remove_from_dict(sx))
d['PYBPOD_SUBJECT_EXTRA'] = sub
elif isinstance(d['PYBPOD_SUBJECT_EXTRA'], dict):
d['PYBPOD_SUBJECT_EXTRA'] = remove_from_dict(
d['PYBPOD_SUBJECT_EXTRA'])
d['LAST_TRIAL_DATA'] = None
d['LAST_SETTINGS_DATA'] = None
d['POSITIONS'] = None
d['CONTRASTS'] = None
d['QUIESCENT_PERIOD'] = None
d['STIM_PHASE'] = None
d['LEN_BLOCKS'] = None
return d
def display_logs(self):
if self.PREVIOUS_DATA_FILE:
msg = f"""
##########################################
PREVIOUS SESSION FOUND
LOADING PARAMETERS FROM: {self.PREVIOUS_DATA_FILE}
PREVIOUS SESSION NUMBER: {self.LAST_SETTINGS_DATA['SESSION_IDX'] + 1}
PREVIOUS NTRIALS: {self.LAST_TRIAL_DATA["trial_num"]}
PREVIOUS WATER DRANK: {self.LAST_TRIAL_DATA['water_delivered']}
PREVIOUS WEIGHT: {self.LAST_SETTINGS_DATA['SUBJECT_WEIGHT']}
##########################################"""
log.info(msg)
if __name__ == '__main__':
"""
SessionParamHandler fmake flag=False disables:
making folders/files;
SessionParamHandler debug flag disables:
running auto calib;
calling bonsai
turning off lights of bpod board
"""
import task_settings as _task_settings
import scratch._user_settings as _user_settings
import datetime
dt = datetime.datetime.now()
dt = [str(dt.year), str(dt.month), str(dt.day),
str(dt.hour), str(dt.minute), str(dt.second)]
dt = [x if int(x) >= 10 else '0' + x for x in dt]
dt.insert(3, '-')
_user_settings.PYBPOD_SESSION = ''.join(dt)
_user_settings.PYBPOD_SETUP = 'biasedChoiceWorld'
_user_settings.PYBPOD_PROTOCOL = '_iblrig_tasks_biasedChoiceWorld'
if platform == 'linux':
r = "/home/nico/Projects/IBL/github/iblrig"
_task_settings.IBLRIG_FOLDER = r
d = ("/home/nico/Projects/IBL/github/iblrig/scratch/" +
"test_iblrig_data")
_task_settings.IBLRIG_DATA_FOLDER = d
_task_settings.AUTOMATIC_CALIBRATION = False
_task_settings.USE_VISUAL_STIMULUS = False
sph = SessionParamHandler(_task_settings, _user_settings,
debug=False, fmake=True)
for k in sph.__dict__:
if sph.__dict__[k] is None:
print(f"{k}: {sph.__dict__[k]}")
self = sph
print("Done!")
| [
"[email protected]"
] | |
503695cf3f51fa183d89bf99e2ef120f05a702e6 | 3a06c339efa4008f4e351dc30fcf1b81b0691f9a | /day11/day11.py | 9f1d43c249fd9ac2f8d511fee2666c1eb752ac97 | [] | no_license | RedmondY/python | 73e84823b1a65fa4b31bee4448bb4eddd5500864 | 5b4dfe16735ec87d2cb9b07fb4723200e4bd472a | refs/heads/master | 2022-12-04T00:58:07.652676 | 2019-08-07T00:43:35 | 2019-08-07T00:43:35 | 172,700,007 | 0 | 0 | null | 2022-11-22T01:22:31 | 2019-02-26T11:38:02 | Python | UTF-8 | Python | false | false | 1,732 | py | '''
1、文件(db.txt)内容如下,标题为:姓名,性别,年纪,薪资
albert male 18 3000
james male 38 30000
林志玲 female 28 20000
新垣结衣 female 28 10000
要求:
从文件中取出每一条记录放入列表中,
列表的每个元素都是{'name':'albert','sex':'male','age':18,'salary':3000}的形式
2 根据1得到的列表,取出薪资最高的人的信息
3 根据1得到的列表,取出最年轻的人的信息
4 根据1得到的列表,将每个人的信息中的名字映射成首字母大写的形式
5 根据1得到的列表,过滤掉名字以a开头的人的信息
'''
with open('day010_db.txt') as f:
items=(line.split() for line in f)
info=[{'name':name,'sex':sex,'age':age,'salary':salary} \
for name,sex,age,salary in items]
print(info)
#task2 取出薪资最高的人的信息
print(max(info,key=lambda dic:dic['salary']))
#task3 取出最年轻的人的信息
print(min(info,key=lambda dic:dic['age']))
#task4 将每个人的信息中的名字映射成首字母大写的形式
info_new=map(lambda item:{'name':item['name'].capitalize(),
'sex':item['sex'],
'age':item['age'],
'salary':item['salary']},info)
print(list(info_new))
# 6 使用递归打印斐波那契数列(前两个数的和得到第三个数,如:0 1 1 2 3 4 7...)
#非递归
def fib(n):
a,b=0,1
while a < n:
print(a,end=' ')
a,b=b,a+b
print()
fib(10)
# 7 一个嵌套很多层的列表,如l=[1,2,[3,[4,5,6,[7,8,[9,10,[11,12,13,[14,15]]]]]]],用递归取出所有的值
#递归
def fib(a,b,stop):
if a > stop:
return
print(a,end=' ')
fib(b,a+b,stop)
fib(0,1,10)
| [
"[email protected]"
] | |
53182efc9c811568c760c27ee039b441abb2c3b1 | 5f3c8eddb8c5a14fb3b5931f332d401207666036 | /test/clients/test01.py | 75d3560fb9d6b17a62817b99f4d38cff956a18de | [
"Apache-2.0"
] | permissive | hwinther/lanot | dec8fe48efb6245af009bedf65b2bc089e92efa0 | f6700cacb3946535081624467b746fdfd38e021d | refs/heads/master | 2021-03-24T12:02:47.530833 | 2019-05-01T11:56:05 | 2019-05-01T11:56:05 | 91,605,521 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 249 | py | from deploy.clients import localtestclient
udp = localtestclient.LocalTestUdpClient('test01', remote_port=9190)
tcp = localtestclient.LocalTestTcpClient('test01', remote_port=9191)
print('udp: %s' % udp.version())
print('tcp: %s' % tcp.version())
| [
"[email protected]"
] | |
cbc05f5d0b2af05d6025033baddd15c57ea82bd8 | 72a146dad10c3330548f175643822e6cc2e2ccba | /net/data/verify_certificate_chain_unittest/generate-target-not-end-entity.py | b54053fa8699d0c2313d92c8f77f59f5dfb68e28 | [
"BSD-3-Clause"
] | permissive | daotianya/browser-android-tabs | bb6772394c2138e2f3859a83ec6e0860d01a6161 | 44e83a97eb1c7775944a04144e161d99cbb7de5b | refs/heads/master | 2020-06-10T18:07:58.392087 | 2016-12-07T15:37:13 | 2016-12-07T15:37:13 | 75,914,703 | 1 | 0 | null | 2016-12-08T07:37:51 | 2016-12-08T07:37:51 | null | UTF-8 | Python | false | false | 923 | py | #!/usr/bin/python
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Certificate chain with 1 intermediate, a trusted root, and a target
certificate that is also a CA. Verification is expected to succeed, as the test
code accepts any target certificate."""
import common
# Self-signed root certificate (used as trust anchor).
root = common.create_self_signed_root_certificate('Root')
# Intermediate certificate.
intermediate = common.create_intermediate_certificate('Intermediate', root)
# Target certificate (is also a CA)
target = common.create_intermediate_certificate('Target', intermediate)
chain = [target, intermediate]
trusted = common.TrustAnchor(root, constrained=False)
time = common.DEFAULT_TIME
verify_result = True
common.write_test_file(__doc__, chain, trusted, time, verify_result)
| [
"[email protected]"
] | |
590034654fa90f5a4256152448797f85f284ad4f | 952dc09c3e77016f4991d8b2297de32b3e3b45d8 | /apps/utils/locust_tests/common/stats/stats_loaders.py | 0354330ed6203958f07d36fd1fb2f5da1b0cb7b3 | [
"CC0-1.0",
"LicenseRef-scancode-public-domain"
] | permissive | CMSgov/beneficiary-fhir-data | 1f3bd5ff9171975bc77e1a4b6971222342bb3bd9 | 0d170907736c5f957b7545ae26b12ba16e0c2550 | refs/heads/master | 2023-08-08T12:53:42.348530 | 2023-08-07T15:27:15 | 2023-08-07T15:27:15 | 203,852,942 | 47 | 34 | NOASSERTION | 2023-09-14T18:25:26 | 2019-08-22T18:41:16 | Java | UTF-8 | Python | false | false | 16,220 | py | """Members of this file/module are related to the loading of performance statistics
from various data "stores" (such as from files or AWS S3)"""
import json
import os
import time
from abc import ABC, abstractmethod
from dataclasses import fields
from functools import cmp_to_key, reduce
from statistics import mean
from typing import Any, Dict, List, Optional, Tuple
from gevent import monkey
from common.stats.aggregated_stats import (
AggregatedStats,
FinalCompareResult,
StatsMetadata,
TaskStats,
)
from common.stats.stats_config import (
StatsComparisonType,
StatsConfiguration,
StatsStorageType,
)
from common.validation import ValidationResult
# botocore/boto3 is incompatible with gevent out-of-box causing issues with SSL.
# We need to monkey patch gevent _before_ importing boto3 to ensure this doesn't happen.
# See https://stackoverflow.com/questions/40878996/does-boto3-support-greenlets
monkey.patch_all()
import boto3
AthenaQueryRowResult = Dict[str, List[Dict[str, str]]]
"""Type representing a single row result from the result of an Athena query"""
TOTAL_RUNTIME_DELTA = 3.0
"""The delta under which two AggregatedStats instances are considered able to
be compared"""
class StatsLoader(ABC):
"""Loads AggregatedStats depending on what type of comparison is requested"""
def __init__(self, stats_config: StatsConfiguration, metadata: StatsMetadata) -> None:
self.stats_config = stats_config
self.metadata = metadata
def load(self) -> Optional[AggregatedStats]:
"""Loads an AggregatedStats instance constructed based on what type of comparison is
required
Returns:
AggregatedStats: An AggregatedStats instance representing the set of stats requested to
load
"""
is_avg_compare = self.stats_config.stats_compare == StatsComparisonType.AVERAGE
return self.load_average() if is_avg_compare else self.load_previous()
@abstractmethod
def load_previous(self) -> Optional[AggregatedStats]:
"""Loads an AggregatedStats instance constructed based on the most recent, previous test
suite runs' stats under the tag specified by the user
Returns:
AggregatedStats: An AggregatedStats instance representing the stats of the previous test
suite run
"""
pass
@abstractmethod
def load_average(self) -> Optional[AggregatedStats]:
"""Loads an AggregatedStats instance constructed based on the the average of all of the
previous test suite runs' stats under the tag specified by the user
Returns:
AggregatedStats: An AggregatedStats instance representing the stats of all specified
previous test suite runs
"""
pass
@staticmethod
def create(stats_config: StatsConfiguration, metadata: StatsMetadata) -> "StatsLoader":
"""Construct a new concrete instance of StatsLoader that will load from the appropriate
store as specified in stats_config
Args:
stats_config (StatsConfiguration): The configuration specified for storing and comparing
statistics
Returns:
StatsLoader: A concrete instance of StatsLoader that will load from the store specified
in configuration
"""
return (
StatsFileLoader(stats_config, metadata)
if stats_config.stats_store == StatsStorageType.FILE
else StatsAthenaLoader(stats_config, metadata)
)
class StatsFileLoader(StatsLoader):
"""Child class of StatsLoader that loads aggregated task stats from the local file system
through JSON files"""
def load_previous(self) -> Optional[AggregatedStats]:
# Get a list of all AggregatedStats from stats.json files under path
stats_list = self.__load_stats_from_files()
# Filter those that don't match the config and current run's metadata
filtered_stats = [
stats
for stats in stats_list
if stats.metadata and self.__verify_metadata(stats.metadata)
]
# Sort them based upon timestamp, greater to lower
filtered_stats.sort(key=lambda stats: stats.metadata.timestamp, reverse=True) # type: ignore
# Take the first item, if it exists -- this is the most recent, previous run
return filtered_stats[0] if filtered_stats else None
def load_average(self) -> Optional[AggregatedStats]:
stats_list = self.__load_stats_from_files()
verified_stats = [
stats
for stats in stats_list
if stats.metadata and self.__verify_metadata(stats.metadata)
]
limited_stats = sorted(
verified_stats,
key=cmp_to_key(
lambda item1, item2: item1.metadata.timestamp - item2.metadata.timestamp # type: ignore
),
reverse=True,
)[: self.stats_config.stats_compare_load_limit]
return _get_average_all_stats(limited_stats)
def __load_stats_from_files(self, suffix: str = ".stats.json") -> List[AggregatedStats]:
path = (
self.stats_config.stats_store_file_path
if self.stats_config and self.stats_config.stats_store_file_path
else ""
)
stats_files = [
os.path.join(path, file) for file in os.listdir(path) if file.endswith(suffix)
]
aggregated_stats_list = []
for stats_file in stats_files:
with open(stats_file, encoding="utf-8") as json_file:
aggregated_stats_list.append(AggregatedStats(**json.load(json_file)))
return aggregated_stats_list
def __verify_metadata(self, loaded_metadata: StatsMetadata):
return all(
[
self.stats_config.stats_compare_tag in loaded_metadata.tags,
loaded_metadata.hash == self.metadata.hash,
loaded_metadata.compare_result
in (FinalCompareResult.NOT_APPLICABLE, FinalCompareResult.PASSED),
loaded_metadata.validation_result
in (ValidationResult.NOT_APPLICABLE, ValidationResult.PASSED),
# Pick some delta that the runtimes should be under -- in this case, we're using 3
# seconds
# TODO: Determine the right delta for checking for matching runtimes
loaded_metadata.total_runtime - self.metadata.total_runtime < TOTAL_RUNTIME_DELTA,
]
)
class StatsAthenaLoader(StatsLoader):
"""Child class of StatsLoader that loads aggregated task stats from S3 via Athena"""
def __init__(self, stats_config: StatsConfiguration, metadata: StatsMetadata) -> None:
self.client = boto3.client("athena", region_name="us-east-1")
super().__init__(stats_config, metadata)
def load_previous(self) -> Optional[AggregatedStats]:
query = (
f"SELECT cast(totals as JSON), cast(tasks as JSON) "
f'FROM "{self.stats_config.stats_store_s3_database}"."{self.stats_config.stats_store_s3_table}" '
f"WHERE {self.__get_where_clause()} ORDER BY metadata.timestamp DESC "
"LIMIT 1"
)
queried_stats = self.__get_stats_from_query(query)
return queried_stats[0] if queried_stats else None
def load_average(self) -> Optional[AggregatedStats]:
query = (
f"SELECT cast(totals as JSON), cast(tasks as JSON) "
f'FROM "{self.stats_config.stats_store_s3_database}"."{self.stats_config.stats_store_s3_table}" '
f"WHERE {self.__get_where_clause()} "
"ORDER BY metadata.timestamp DESC "
f"LIMIT {self.stats_config.stats_compare_load_limit}"
)
queried_stats = self.__get_stats_from_query(query)
return _get_average_all_stats(queried_stats)
def __get_stats_from_query(self, query: str) -> List[AggregatedStats]:
query_result = self.__run_query(query)
if not query_result:
raise RuntimeError("Athena query result was empty or query failed")
raw_json_data = self.__get_raw_json_data(query_result)
return self.__stats_from_json_data(raw_json_data)
def __start_athena_query(self, query: str) -> Dict[str, Any]:
return self.client.start_query_execution(
QueryString=query,
QueryExecutionContext={"Database": self.stats_config.stats_store_s3_database},
# This method requires an OutputLocation, so we're using the "adhoc"
# path defined in the BFD Insights data organization standards to
# store query results
ResultConfiguration={
"OutputLocation": (
f"s3://{self.stats_config.stats_store_s3_bucket}/adhoc/query_results/"
f"{self.stats_config.stats_store_s3_database}/"
f"{self.stats_config.stats_store_s3_table}"
)
},
# The workgroup should always be "bfd" if we're targeting BFD Insights
# databases
WorkGroup="bfd",
)
def __get_athena_query_status(self, query_execution_id: str) -> str:
# See https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/athena.html#Athena.Client.get_query_execution
# for the structure of the returned Dict
return self.client.get_query_execution(QueryExecutionId=query_execution_id)[
"QueryExecution"
]["Status"]["State"]
def __get_athena_query_result(self, query_execution_id: str) -> List[AthenaQueryRowResult]:
# See https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/athena.html#Athena.Client.get_query_results
# for the structure of the returned Dict
return self.client.get_query_results(QueryExecutionId=query_execution_id)["ResultSet"][
"Rows"
]
def __run_query(
self, query: str, max_retries: int = 10
) -> Optional[List[AthenaQueryRowResult]]:
start_response = self.__start_athena_query(query)
query_execution_id = start_response["QueryExecutionId"]
for try_number in range(0, max_retries - 1):
# Exponentially back-off from hitting the API to ensure we don't hit the API limit
# See https://docs.aws.amazon.com/general/latest/gr/api-retries.html
time.sleep((2**try_number * 100.0) / 1000.0)
status = self.__get_athena_query_status(query_execution_id)
if status == "SUCCEEDED":
break
elif status == "FAILED" or status == "CANCELLED":
raise RuntimeError(f"Query failed to complete -- status returned was {status}")
return self.__get_athena_query_result(query_execution_id)
def __get_where_clause(self) -> str:
explicit_checks = [
f"contains(metadata.tags, '{self.stats_config.stats_compare_tag}')",
f"metadata.hash='{self.metadata.hash}'",
(
f"(metadata.compare_result='{FinalCompareResult.NOT_APPLICABLE.value}' OR "
f"metadata.compare_result='{FinalCompareResult.PASSED.value}')"
),
(
f"(metadata.validation_result='{ValidationResult.NOT_APPLICABLE.value}' OR "
f"metadata.validation_result='{ValidationResult.PASSED.value}')"
),
# TODO: Determine the right delta for checking for matching runtimes
f"(metadata.total_runtime - {self.metadata.total_runtime}) < {TOTAL_RUNTIME_DELTA}",
]
return " AND ".join(explicit_checks)
def __get_raw_json_data(
self, query_result: List[Dict[str, List[Dict[str, str]]]]
) -> List[Tuple[str, str]]:
# The data is returned as an array of dicts, each with a 'Data' key. These 'Data' dicts
# values are arrays of dicts with the key being the data type and the value being the actual
# returned result. The first 'Data' dict in the array is the column names, and subsequent
# 'Data' dict entries in the array are actual values
# We make a few assumptions:
# 1. The first 'Data' dict in the array is always the column names
# 2. The data returned is always of the type `VarCharValue`
# 3. We are only retrieving two columns
raw_data = [item["Data"] for item in query_result[1:]]
return [(data[0]["VarCharValue"], data[1]["VarCharValue"]) for data in raw_data]
def __stats_from_json_data(self, raw_json_data: List[Tuple[str, str]]) -> List[AggregatedStats]:
# Deserializing from a tuple of raw JSON objects; first tuple is a raw JSON object string
# representing the aggregated totals and second tuple is a raw JSON list of objects
# representing the statistics for each task
serialized_tuples: List[Tuple[Dict[str, Any], List[Dict[str, Any]]]] = [
(json.loads(raw_json_totals), json.loads(raw_json_tasks))
for raw_json_totals, raw_json_tasks in raw_json_data
]
# The metadata is unnecessary here since by the time we've gotten here the metadata for each
# of the tasks we're serializing here has already been checked
return [
AggregatedStats(
totals=TaskStats(**totals_as_dict),
tasks=[TaskStats(**task_vals_dict) for task_vals_dict in tasks_as_lists],
)
for totals_as_dict, tasks_as_lists in serialized_tuples
]
def _bucket_tasks_by_name(all_stats: List[AggregatedStats]) -> Dict[str, List[TaskStats]]:
tasks_by_name: Dict[str, List[TaskStats]] = {}
for stats in all_stats:
for task in stats.tasks:
if not task.task_name in tasks_by_name:
tasks_by_name[task.task_name] = []
tasks_by_name[task.task_name].append(task)
return tasks_by_name
def _get_average_task_stats(all_tasks: List[TaskStats]) -> TaskStats:
if not all_tasks:
raise ValueError("The list of tasks to average must not be empty")
if not all(x.task_name == all_tasks[0].task_name for x in all_tasks):
raise ValueError("The list of TaskStats must be for the same task")
# Exclude fields that are not statistics and the response time percentiles
# dict which will be handled on its own later
fields_to_exclude = ["task_name", "request_method", "response_time_percentiles"]
stats_to_average = [
field.name for field in fields(TaskStats) if not field.name in fields_to_exclude
]
# Calculate the mean automatically for every matching stat in the list of
# all stats, and then put the mean in a dict
avg_task_stats = {
stat_name: mean(getattr(task, stat_name) for task in all_tasks)
for stat_name in stats_to_average
}
# Get the common keys between all of the response time percentile dicts in
# the list of task stats
common_percents = reduce(
lambda prev, next: prev & next,
(task.response_time_percentiles.keys() for task in all_tasks),
)
# Do the same thing as above but for each entry in each response time percentile dict --
# get the mean of each percentile across all tasks and make it the value of a new
# percentile dict
avg_task_percents = {
p: mean(task.response_time_percentiles[p] for task in all_tasks) for p in common_percents
}
return TaskStats(
task_name=all_tasks[0].task_name,
request_method=all_tasks[0].request_method,
response_time_percentiles=avg_task_percents,
**avg_task_stats,
)
def _get_average_all_stats(all_stats: List[AggregatedStats]) -> Optional[AggregatedStats]:
partitioned_task_stats = _bucket_tasks_by_name(all_stats)
try:
averaged_tasks = [
_get_average_task_stats(tasks) for tasks in partitioned_task_stats.values()
]
averaged_totals = _get_average_task_stats([stat.totals for stat in all_stats])
except ValueError:
return None
return AggregatedStats(totals=averaged_totals, tasks=averaged_tasks) if averaged_tasks else None
| [
"[email protected]"
] | |
785e9b108450b244f05000b4b18cb942b56f3f04 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5708284669460480_0/Python/sunnylqm/test.py | 1dd36ce495f9584f34f3955d399d19388359a089 | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 2,355 | py | import math
f = open('B-small-attempt2.in')
#f = open('test.in')
count = int(f.readline())
output = ''
for x in xrange(1, count + 1):
arr = f.readline().split()
K = int(arr[0])
L = int(arr[1])
S = int(arr[2])
keyboard = f.readline()
target = f.readline()
keymap = {}
for i in xrange(0, K):
key = keyboard[i]
if key in keymap.keys():
keymap[key] += 1
else:
keymap[key] = 1
start = int((L+1)/2)
overlap = 0
for m in xrange(start, L):
n = 0
if target[m] == target[n]:
n = 1
overlap = 1
for p in xrange(m + 1, L):
if target[p] == target[n]:
n += 1
overlap += 1
else:
overlap = 0
break
P = 1.0
if overlap == 0:
maxbanana = S / L
for t in xrange(0, L):
word = target[t]
if word not in keymap.keys():
maxbanana = 0
P = 0.0
break
P *= float(keymap[word]) / K
if P < 0.0000001:
P = 0.0
break
ret = maxbanana * (1 - P)
else:
if S % (L - overlap) >= overlap:
maxbanana = S / (L - overlap)
else:
maxbanana = S / (L - overlap) - 1
P1 = 1.0
for t in xrange(0, L - overlap):
word = target[t]
if word not in keymap.keys():
maxbanana = 0
P1 = 0.0
break
P1 *= float(keymap[word]) / K
if P1 < 0.0000001:
P1 = 0.0
break
P2 = 1.0
for t in xrange(overlap, L):
word = target[t]
if word not in keymap.keys():
maxbanana = 0
P2 = 0.0
break
P2 *= float(keymap[word]) / K
if P2 < 0.0000001:
P2 = 0.0
break
if maxbanana == 0 or P1 == 0.0 or P2 == 0.0:
ret = 0.0
else:
remain = math.pow(P1, maxbanana)
ret = maxbanana * (1 - P1) - maxbanana * remain * (1 - P2)
output += 'Case #' + str(x) + ': %.7f\n' % ret
print(output)
newf = open('output.txt','w')
newf.write(output)
| [
"[email protected]"
] | |
5cb2978fbc35af9b1646d7addb63f245998a7327 | 89b3cf7b8246349e67ff3362cd4b9ed039426d93 | /celeryproject/celeryproject/settings.py | 89767bc3c9fb5f8f061d7ed449000fd00e57f034 | [] | no_license | vijaygwala/celeryproject | c4d67494fe5feca485e5e4daf56f0141dd7fd681 | 8f58bc4e3553cb8bb67fa980a49f96fe048f6af9 | refs/heads/master | 2023-05-31T03:20:30.180435 | 2021-06-08T18:24:50 | 2021-06-08T18:24:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,247 | py | """
Django settings for celeryproject project.
Generated by 'django-admin startproject' using Django 1.11.27.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
from django.contrib.messages import constants as messages
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PUBLIC_DIR = os.path.abspath(os.path.join(
os.path.dirname(__file__), '..', '..', 'public'))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '(f)dsx#)68h4lt$v*z#9wl2l&qb-q6ebnjp^cz#sem^*8&4ome'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['celeryproject']
MESSAGE_TAGS = {
messages.DEBUG: 'alert-info',
messages.INFO: 'alert-info',
messages.SUCCESS: 'alert-success',
messages.WARNING: 'alert-warning',
messages.ERROR: 'alert-danger',
}
# Application definition
INSTALLED_APPS = [
'channels',
'chat',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'celeryproject.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
#'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
'debug': DEBUG,
'loaders': [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]),
],
},
},
]
WSGI_APPLICATION = 'celeryproject.wsgi.application'
ASGI_APPLICATION = 'celeryproject.asgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(PUBLIC_DIR, "static")
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(PUBLIC_DIR, "media")
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
]
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
SESSION_CACHE_ALIAS = "default"
# import djcelery
# djcelery.setup_loader()
BROKER_URL = 'redis://localhost:6379'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': [
'127.0.0.1:6379',
],
'OPTIONS': {
#'DB': 1,
#'PASSWORD': 'yadayada',
'PARSER_CLASS': 'redis.connection.HiredisParser',
'CONNECTION_POOL_CLASS': 'redis.BlockingConnectionPool',
#'CONNECTION_POOL_CLASS_KWARGS': {
# 'max_connections': 50,
# 'timeout': 20,
#},
#'MAX_CONNECTIONS': 1000,
'PICKLE_VERSION': -1,
},
},
}
CHANNEL_LAYERS = {
"default": {
"BACKEND": "channels_redis.core.RedisChannelLayer",
"CONFIG": {
"hosts": [("localhost", 6379)],
},
},
}
| [
"[email protected]"
] | |
381c1cfa2d1c8f94daf58ed606f18762649e52b9 | 04a643a77927bc56ab58c7df91d4733321e61e51 | /new_targets/new_tracks_a001.py | 4f6ca1c33f96092e0a0fcb05ecc16fff9c964e83 | [] | no_license | dcollins4096/p19_newscripts | d2fae1807170a4d70cf4c87222a6258211f993ff | 23c780dd15b60944ed354406706de85282d0bee6 | refs/heads/master | 2023-07-21T11:53:55.188383 | 2023-07-18T17:38:21 | 2023-07-18T17:38:21 | 215,159,839 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 4,681 | py | #
# run get_mountain_tops first
#
from starter2 import *
import xtra_energy
import data_locations as dl
reload(dl)
reload(looper)
import looper2
reload(looper2)
LOOPER2 = True
looper_main = looper2.core_looper2
this_simname = 'a001'
other_simname = 'u301'
mountain_top_fname = "datasets_small/%s_mountain_tops_take_9.h5"%other_simname
outname = '%s_all_particles.h5'%this_simname
bad_particle_fname_read='datasets_small/%s_bad_particles.h5'%'u501'
bad_particle_fname_save='datasets_small/%s_bad_particles_save.h5'%'u501'
#bad_particle_fname_read="datasets_small/u601_bad_particles_srsly.h5"
#bad_particle_fname_save='datasets_small/%s_bad_particles_take3.h5'%this_simname
#bad_particle_fname='datasets_small/%s_bad_particles_TEST.h5'%this_simname
import xtra_energy
target_frame = dl.target_frames[other_simname]
if 0:
"""Just One"""
frame_list = list(range(0,target_frame,10)) + [target_frame]
frame_list = [5]
if 0:
"""all frames"""
target_frame = dl.target_frames[other_simname]
frame_list = list(range(0,target_frame+1,1))
if 0:
"""first 4"""
target_frame = dl.target_frames[other_simname]
frame_list = list(range(0,3,1))
if 1:
"""Every 10"""
target_frame = dl.target_frames[other_simname]
frame_list = list(range(0,target_frame,10)) + [target_frame]
if 1:
fields = ['x','y','z','density', 'cell_volume']
derived=[]
if 0:
fields = ['x','y','z','density', 'cell_volume']
fields += ['velocity_magnitude','magnetic_field_strength', 'velocity_divergence']
fields += ['velocity_x','velocity_y','velocity_z']
fields += ['magnetic_field_%s'%s for s in 'xyz']
fields += ['PotentialField','grav_x','grav_y','grav_z' ]
fields += ['particle_pos_x', 'particle_pos_y', 'particle_pos_z', 'particle_index']
derived=[xtra_energy.add_force_terms]
if target_frame not in frame_list:
print("YOU MUST HAVE THE LAST FRAME or the periodic unwrap fails")
frame_list += [target_frame]
if 1:
fields = [('gas',field) for field in fields]
new_looper = looper_main(directory= dl.sims[other_simname],
sim_name = this_simname,
out_prefix = this_simname,
target_frame = target_frame,
frame_list = frame_list,
core_list = None,
fields_from_grid=fields,
derived = derived,
do_shift=False
)
new_looper.plot_directory = "./plots_to_sort"
if 1:
core_id=0
new_looper.core_list=[core_id]
i,j,k=np.mgrid[0:128:1,0:128:1,0:128:1]
SL = tuple([slice(32,40)]*3)
SL = tuple([slice(None)]*3)
i_keep=i[SL]
j_keep=j[SL]
k_keep=k[SL]
index = i_keep+128*(j_keep+128*k_keep)
new_looper.target_indices=np.sort(index.flatten())
new_looper.core_ids = np.zeros_like(new_looper.target_indices)
import bad_particle_hunt
if 1:
print("Look for bad particles again. Somehow we can't get ahead of this.")
aaa = set(np.arange(128**3))
badones=set()
for frame in new_looper.frame_list:
ds=new_looper.load(frame)
ad=ds.all_data()
pi = set(ad['all','particle_index'].v)
badones.update(aaa-pi)
for grid in ds.index.grids:
these = set(grid['all','particle_index'].v)
pi.difference_update( these)
badones.update(pi)
also_bad = bad_particle_hunt.check_particles(ds)
badones.update(set(also_bad))
print(frame, len(badones))
new_looper.read_bad_particles(bad_particle_fname_read, core_hijack=0)
bad_particle_id = [ 724134, 635702, 661226, 743270, 751995, 718196, 1354060,
1362500, 610123, 610189, 1930558, 1046537, 1841352, 1844125,
1845574, 1849410, 1853445, 1300291]
badones.update(set(bad_particle_id))
bad_particle_id = list(badones) #this is some confusing variable naming.
bad_core_id = [0]*len(bad_particle_id)
for bad_core, bad_part in zip(bad_core_id, bad_particle_id):
new_looper.bad_particles[bad_core]=np.append(
new_looper.bad_particles[bad_core], bad_part)
if 1:
new_looper.remove_bad_particles()
if 0:
import warnings
with warnings.catch_warnings():
warnings.simplefilter('error')
#pdb.set_trace()
new_looper.get_tracks()
if 1:
new_looper.get_tracks()
if 1:
import tracks_read_write
tracks_read_write.save_loop_trackage_only( new_looper, outname)
| [
"[email protected]"
] | |
09eb164bc57814f0f45096a91ef444fd22d8c657 | 6a9f06b967d7641ddff7b56425651b29d3e577f4 | /mindinsight/tests/st/func/lineagemgr/cache/test_lineage_cache.py | 0bfb2c40e19d447ee9e5ba86e951d9c403998356 | [
"Apache-2.0"
] | permissive | ZeroWangZY/DL-VIS | b3117016547007b88dc66cfe7339ef02b0d84e9c | 8be1c70c44913a6f67dd424aa0e0330f82e48b06 | refs/heads/master | 2023-08-18T00:22:30.906432 | 2020-12-04T03:35:50 | 2020-12-04T03:35:50 | 232,723,696 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,993 | py | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
Function:
Test the query module about lineage information.
Usage:
The query module test should be run after lineagemgr/collection/model/test_model_lineage.py
pytest lineagemgr
"""
from unittest import TestCase
import pytest
from mindinsight.datavisual.data_transform.data_manager import DataManager
from mindinsight.lineagemgr.cache_item_updater import LineageCacheItemUpdater
from mindinsight.lineagemgr.api.model import general_filter_summary_lineage, \
general_get_summary_lineage
from ..api.test_model_api import LINEAGE_INFO_RUN1, LINEAGE_FILTRATION_EXCEPT_RUN, \
LINEAGE_FILTRATION_RUN1, LINEAGE_FILTRATION_RUN2
from ..conftest import BASE_SUMMARY_DIR
from .....ut.lineagemgr.querier import event_data
from .....utils.tools import check_loading_done, assert_equal_lineages
@pytest.mark.usefixtures("create_summary_dir")
class TestModelApi(TestCase):
"""Test get lineage from data_manager."""
@classmethod
def setup_class(cls):
data_manager = DataManager(BASE_SUMMARY_DIR)
data_manager.register_brief_cache_item_updater(LineageCacheItemUpdater())
data_manager.start_load_data(reload_interval=0)
check_loading_done(data_manager)
cls._data_manger = data_manager
@pytest.mark.level0
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.platform_x86_cpu
@pytest.mark.env_single
def test_get_summary_lineage(self):
"""Test the interface of get_summary_lineage."""
total_res = general_get_summary_lineage(data_manager=self._data_manger, summary_dir="./run1")
expect_total_res = LINEAGE_INFO_RUN1
assert_equal_lineages(expect_total_res, total_res, self.assertDictEqual)
@pytest.mark.level0
@pytest.mark.platform_arm_ascend_training
@pytest.mark.platform_x86_gpu_training
@pytest.mark.platform_x86_ascend_training
@pytest.mark.platform_x86_cpu
@pytest.mark.env_single
def test_filter_summary_lineage(self):
"""Test the interface of filter_summary_lineage."""
expect_result = {
'customized': event_data.CUSTOMIZED__1,
'object': [
LINEAGE_FILTRATION_EXCEPT_RUN,
LINEAGE_FILTRATION_RUN1,
LINEAGE_FILTRATION_RUN2
],
'count': 3
}
search_condition = {
'sorted_name': 'summary_dir'
}
res = general_filter_summary_lineage(data_manager=self._data_manger, search_condition=search_condition)
expect_objects = expect_result.get('object')
for idx, res_object in enumerate(res.get('object')):
expect_objects[idx]['model_lineage']['dataset_mark'] = res_object['model_lineage'].get('dataset_mark')
assert_equal_lineages(expect_result, res, self.assertDictEqual)
expect_result = {
'customized': {},
'object': [],
'count': 0
}
search_condition = {
'summary_dir': {
"in": ['./dir_with_empty_lineage']
}
}
res = general_filter_summary_lineage(data_manager=self._data_manger, search_condition=search_condition)
assert_equal_lineages(expect_result, res, self.assertDictEqual)
| [
"[email protected]"
] | |
8ddcecbb9c1ffc68b7862a87dcac432c60ab5ff8 | d6589ff7cf647af56938a9598f9e2e674c0ae6b5 | /hitsdb-20200615/alibabacloud_hitsdb20200615/models.py | d12b6031a3aa47e59d84b91c8b0ed869d8364fa3 | [
"Apache-2.0"
] | permissive | hazho/alibabacloud-python-sdk | 55028a0605b1509941269867a043f8408fa8c296 | cddd32154bb8c12e50772fec55429a9a97f3efd9 | refs/heads/master | 2023-07-01T17:51:57.893326 | 2021-08-02T08:55:22 | 2021-08-02T08:55:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 44,585 | py | # -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from Tea.model import TeaModel
from typing import List, Dict
class DescribeRegionsRequest(TeaModel):
def __init__(
self,
security_token: str = None,
owner_id: int = None,
resource_owner_account: str = None,
resource_owner_id: int = None,
owner_account: str = None,
accept_language: str = None,
):
self.security_token = security_token
self.owner_id = owner_id
self.resource_owner_account = resource_owner_account
self.resource_owner_id = resource_owner_id
self.owner_account = owner_account
self.accept_language = accept_language
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.security_token is not None:
result['SecurityToken'] = self.security_token
if self.owner_id is not None:
result['OwnerId'] = self.owner_id
if self.resource_owner_account is not None:
result['ResourceOwnerAccount'] = self.resource_owner_account
if self.resource_owner_id is not None:
result['ResourceOwnerId'] = self.resource_owner_id
if self.owner_account is not None:
result['OwnerAccount'] = self.owner_account
if self.accept_language is not None:
result['AcceptLanguage'] = self.accept_language
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('SecurityToken') is not None:
self.security_token = m.get('SecurityToken')
if m.get('OwnerId') is not None:
self.owner_id = m.get('OwnerId')
if m.get('ResourceOwnerAccount') is not None:
self.resource_owner_account = m.get('ResourceOwnerAccount')
if m.get('ResourceOwnerId') is not None:
self.resource_owner_id = m.get('ResourceOwnerId')
if m.get('OwnerAccount') is not None:
self.owner_account = m.get('OwnerAccount')
if m.get('AcceptLanguage') is not None:
self.accept_language = m.get('AcceptLanguage')
return self
class DescribeRegionsResponseBodyRegions(TeaModel):
def __init__(
self,
local_name: str = None,
region_endpoint: str = None,
region_id: str = None,
):
self.local_name = local_name
self.region_endpoint = region_endpoint
self.region_id = region_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.local_name is not None:
result['LocalName'] = self.local_name
if self.region_endpoint is not None:
result['RegionEndpoint'] = self.region_endpoint
if self.region_id is not None:
result['RegionId'] = self.region_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('LocalName') is not None:
self.local_name = m.get('LocalName')
if m.get('RegionEndpoint') is not None:
self.region_endpoint = m.get('RegionEndpoint')
if m.get('RegionId') is not None:
self.region_id = m.get('RegionId')
return self
class DescribeRegionsResponseBody(TeaModel):
def __init__(
self,
request_id: str = None,
regions: List[DescribeRegionsResponseBodyRegions] = None,
):
self.request_id = request_id
self.regions = regions
def validate(self):
if self.regions:
for k in self.regions:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
result['Regions'] = []
if self.regions is not None:
for k in self.regions:
result['Regions'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
self.regions = []
if m.get('Regions') is not None:
for k in m.get('Regions'):
temp_model = DescribeRegionsResponseBodyRegions()
self.regions.append(temp_model.from_map(k))
return self
class DescribeRegionsResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: DescribeRegionsResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = DescribeRegionsResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetInstanceIpWhiteListRequest(TeaModel):
def __init__(
self,
security_token: str = None,
owner_id: int = None,
resource_owner_account: str = None,
resource_owner_id: int = None,
owner_account: str = None,
instance_id: str = None,
group_name: str = None,
):
self.security_token = security_token
self.owner_id = owner_id
self.resource_owner_account = resource_owner_account
self.resource_owner_id = resource_owner_id
self.owner_account = owner_account
self.instance_id = instance_id
self.group_name = group_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.security_token is not None:
result['SecurityToken'] = self.security_token
if self.owner_id is not None:
result['OwnerId'] = self.owner_id
if self.resource_owner_account is not None:
result['ResourceOwnerAccount'] = self.resource_owner_account
if self.resource_owner_id is not None:
result['ResourceOwnerId'] = self.resource_owner_id
if self.owner_account is not None:
result['OwnerAccount'] = self.owner_account
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.group_name is not None:
result['GroupName'] = self.group_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('SecurityToken') is not None:
self.security_token = m.get('SecurityToken')
if m.get('OwnerId') is not None:
self.owner_id = m.get('OwnerId')
if m.get('ResourceOwnerAccount') is not None:
self.resource_owner_account = m.get('ResourceOwnerAccount')
if m.get('ResourceOwnerId') is not None:
self.resource_owner_id = m.get('ResourceOwnerId')
if m.get('OwnerAccount') is not None:
self.owner_account = m.get('OwnerAccount')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('GroupName') is not None:
self.group_name = m.get('GroupName')
return self
class GetInstanceIpWhiteListResponseBody(TeaModel):
def __init__(
self,
request_id: str = None,
instance_id: str = None,
ip_list: List[str] = None,
):
self.request_id = request_id
self.instance_id = instance_id
self.ip_list = ip_list
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.ip_list is not None:
result['IpList'] = self.ip_list
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('IpList') is not None:
self.ip_list = m.get('IpList')
return self
class GetInstanceIpWhiteListResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: GetInstanceIpWhiteListResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = GetInstanceIpWhiteListResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetLindormInstanceRequest(TeaModel):
def __init__(
self,
security_token: str = None,
owner_id: int = None,
resource_owner_account: str = None,
resource_owner_id: int = None,
owner_account: str = None,
region_id: str = None,
instance_id: str = None,
):
self.security_token = security_token
self.owner_id = owner_id
self.resource_owner_account = resource_owner_account
self.resource_owner_id = resource_owner_id
self.owner_account = owner_account
self.region_id = region_id
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.security_token is not None:
result['SecurityToken'] = self.security_token
if self.owner_id is not None:
result['OwnerId'] = self.owner_id
if self.resource_owner_account is not None:
result['ResourceOwnerAccount'] = self.resource_owner_account
if self.resource_owner_id is not None:
result['ResourceOwnerId'] = self.resource_owner_id
if self.owner_account is not None:
result['OwnerAccount'] = self.owner_account
if self.region_id is not None:
result['RegionId'] = self.region_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('SecurityToken') is not None:
self.security_token = m.get('SecurityToken')
if m.get('OwnerId') is not None:
self.owner_id = m.get('OwnerId')
if m.get('ResourceOwnerAccount') is not None:
self.resource_owner_account = m.get('ResourceOwnerAccount')
if m.get('ResourceOwnerId') is not None:
self.resource_owner_id = m.get('ResourceOwnerId')
if m.get('OwnerAccount') is not None:
self.owner_account = m.get('OwnerAccount')
if m.get('RegionId') is not None:
self.region_id = m.get('RegionId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class GetLindormInstanceResponseBodyEngineList(TeaModel):
def __init__(
self,
version: str = None,
cpu_count: str = None,
core_count: str = None,
engine: str = None,
memory_size: str = None,
is_last_version: bool = None,
):
self.version = version
self.cpu_count = cpu_count
self.core_count = core_count
self.engine = engine
self.memory_size = memory_size
self.is_last_version = is_last_version
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.version is not None:
result['Version'] = self.version
if self.cpu_count is not None:
result['CpuCount'] = self.cpu_count
if self.core_count is not None:
result['CoreCount'] = self.core_count
if self.engine is not None:
result['Engine'] = self.engine
if self.memory_size is not None:
result['MemorySize'] = self.memory_size
if self.is_last_version is not None:
result['IsLastVersion'] = self.is_last_version
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('Version') is not None:
self.version = m.get('Version')
if m.get('CpuCount') is not None:
self.cpu_count = m.get('CpuCount')
if m.get('CoreCount') is not None:
self.core_count = m.get('CoreCount')
if m.get('Engine') is not None:
self.engine = m.get('Engine')
if m.get('MemorySize') is not None:
self.memory_size = m.get('MemorySize')
if m.get('IsLastVersion') is not None:
self.is_last_version = m.get('IsLastVersion')
return self
class GetLindormInstanceResponseBody(TeaModel):
def __init__(
self,
engine_list: List[GetLindormInstanceResponseBodyEngineList] = None,
auto_renew: bool = None,
disk_usage: str = None,
network_type: str = None,
service_type: str = None,
instance_alias: str = None,
instance_status: str = None,
engine_type: int = None,
instance_storage: str = None,
request_id: str = None,
zone_id: str = None,
instance_id: str = None,
create_time: str = None,
cold_storage: int = None,
disk_category: str = None,
pay_type: str = None,
deletion_protection: str = None,
vswitch_id: str = None,
vpc_id: str = None,
region_id: str = None,
expire_time: str = None,
ali_uid: int = None,
):
self.engine_list = engine_list
self.auto_renew = auto_renew
self.disk_usage = disk_usage
self.network_type = network_type
self.service_type = service_type
self.instance_alias = instance_alias
self.instance_status = instance_status
self.engine_type = engine_type
self.instance_storage = instance_storage
self.request_id = request_id
self.zone_id = zone_id
self.instance_id = instance_id
self.create_time = create_time
self.cold_storage = cold_storage
self.disk_category = disk_category
self.pay_type = pay_type
self.deletion_protection = deletion_protection
self.vswitch_id = vswitch_id
self.vpc_id = vpc_id
self.region_id = region_id
self.expire_time = expire_time
self.ali_uid = ali_uid
def validate(self):
if self.engine_list:
for k in self.engine_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['EngineList'] = []
if self.engine_list is not None:
for k in self.engine_list:
result['EngineList'].append(k.to_map() if k else None)
if self.auto_renew is not None:
result['AutoRenew'] = self.auto_renew
if self.disk_usage is not None:
result['DiskUsage'] = self.disk_usage
if self.network_type is not None:
result['NetworkType'] = self.network_type
if self.service_type is not None:
result['ServiceType'] = self.service_type
if self.instance_alias is not None:
result['InstanceAlias'] = self.instance_alias
if self.instance_status is not None:
result['InstanceStatus'] = self.instance_status
if self.engine_type is not None:
result['EngineType'] = self.engine_type
if self.instance_storage is not None:
result['InstanceStorage'] = self.instance_storage
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.zone_id is not None:
result['ZoneId'] = self.zone_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.create_time is not None:
result['CreateTime'] = self.create_time
if self.cold_storage is not None:
result['ColdStorage'] = self.cold_storage
if self.disk_category is not None:
result['DiskCategory'] = self.disk_category
if self.pay_type is not None:
result['PayType'] = self.pay_type
if self.deletion_protection is not None:
result['DeletionProtection'] = self.deletion_protection
if self.vswitch_id is not None:
result['VswitchId'] = self.vswitch_id
if self.vpc_id is not None:
result['VpcId'] = self.vpc_id
if self.region_id is not None:
result['RegionId'] = self.region_id
if self.expire_time is not None:
result['ExpireTime'] = self.expire_time
if self.ali_uid is not None:
result['AliUid'] = self.ali_uid
return result
def from_map(self, m: dict = None):
m = m or dict()
self.engine_list = []
if m.get('EngineList') is not None:
for k in m.get('EngineList'):
temp_model = GetLindormInstanceResponseBodyEngineList()
self.engine_list.append(temp_model.from_map(k))
if m.get('AutoRenew') is not None:
self.auto_renew = m.get('AutoRenew')
if m.get('DiskUsage') is not None:
self.disk_usage = m.get('DiskUsage')
if m.get('NetworkType') is not None:
self.network_type = m.get('NetworkType')
if m.get('ServiceType') is not None:
self.service_type = m.get('ServiceType')
if m.get('InstanceAlias') is not None:
self.instance_alias = m.get('InstanceAlias')
if m.get('InstanceStatus') is not None:
self.instance_status = m.get('InstanceStatus')
if m.get('EngineType') is not None:
self.engine_type = m.get('EngineType')
if m.get('InstanceStorage') is not None:
self.instance_storage = m.get('InstanceStorage')
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('ZoneId') is not None:
self.zone_id = m.get('ZoneId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('CreateTime') is not None:
self.create_time = m.get('CreateTime')
if m.get('ColdStorage') is not None:
self.cold_storage = m.get('ColdStorage')
if m.get('DiskCategory') is not None:
self.disk_category = m.get('DiskCategory')
if m.get('PayType') is not None:
self.pay_type = m.get('PayType')
if m.get('DeletionProtection') is not None:
self.deletion_protection = m.get('DeletionProtection')
if m.get('VswitchId') is not None:
self.vswitch_id = m.get('VswitchId')
if m.get('VpcId') is not None:
self.vpc_id = m.get('VpcId')
if m.get('RegionId') is not None:
self.region_id = m.get('RegionId')
if m.get('ExpireTime') is not None:
self.expire_time = m.get('ExpireTime')
if m.get('AliUid') is not None:
self.ali_uid = m.get('AliUid')
return self
class GetLindormInstanceResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: GetLindormInstanceResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = GetLindormInstanceResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetLindormInstanceEngineListRequest(TeaModel):
def __init__(
self,
security_token: str = None,
owner_id: int = None,
resource_owner_account: str = None,
resource_owner_id: int = None,
owner_account: str = None,
region_id: str = None,
instance_id: str = None,
):
self.security_token = security_token
self.owner_id = owner_id
self.resource_owner_account = resource_owner_account
self.resource_owner_id = resource_owner_id
self.owner_account = owner_account
self.region_id = region_id
self.instance_id = instance_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.security_token is not None:
result['SecurityToken'] = self.security_token
if self.owner_id is not None:
result['OwnerId'] = self.owner_id
if self.resource_owner_account is not None:
result['ResourceOwnerAccount'] = self.resource_owner_account
if self.resource_owner_id is not None:
result['ResourceOwnerId'] = self.resource_owner_id
if self.owner_account is not None:
result['OwnerAccount'] = self.owner_account
if self.region_id is not None:
result['RegionId'] = self.region_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('SecurityToken') is not None:
self.security_token = m.get('SecurityToken')
if m.get('OwnerId') is not None:
self.owner_id = m.get('OwnerId')
if m.get('ResourceOwnerAccount') is not None:
self.resource_owner_account = m.get('ResourceOwnerAccount')
if m.get('ResourceOwnerId') is not None:
self.resource_owner_id = m.get('ResourceOwnerId')
if m.get('OwnerAccount') is not None:
self.owner_account = m.get('OwnerAccount')
if m.get('RegionId') is not None:
self.region_id = m.get('RegionId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class GetLindormInstanceEngineListResponseBodyEngineListNetInfoList(TeaModel):
def __init__(
self,
access_type: int = None,
connection_string: str = None,
net_type: str = None,
port: int = None,
):
self.access_type = access_type
self.connection_string = connection_string
self.net_type = net_type
self.port = port
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.access_type is not None:
result['AccessType'] = self.access_type
if self.connection_string is not None:
result['ConnectionString'] = self.connection_string
if self.net_type is not None:
result['NetType'] = self.net_type
if self.port is not None:
result['Port'] = self.port
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('AccessType') is not None:
self.access_type = m.get('AccessType')
if m.get('ConnectionString') is not None:
self.connection_string = m.get('ConnectionString')
if m.get('NetType') is not None:
self.net_type = m.get('NetType')
if m.get('Port') is not None:
self.port = m.get('Port')
return self
class GetLindormInstanceEngineListResponseBodyEngineList(TeaModel):
def __init__(
self,
engine_type: str = None,
net_info_list: List[GetLindormInstanceEngineListResponseBodyEngineListNetInfoList] = None,
):
self.engine_type = engine_type
self.net_info_list = net_info_list
def validate(self):
if self.net_info_list:
for k in self.net_info_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.engine_type is not None:
result['EngineType'] = self.engine_type
result['NetInfoList'] = []
if self.net_info_list is not None:
for k in self.net_info_list:
result['NetInfoList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('EngineType') is not None:
self.engine_type = m.get('EngineType')
self.net_info_list = []
if m.get('NetInfoList') is not None:
for k in m.get('NetInfoList'):
temp_model = GetLindormInstanceEngineListResponseBodyEngineListNetInfoList()
self.net_info_list.append(temp_model.from_map(k))
return self
class GetLindormInstanceEngineListResponseBody(TeaModel):
def __init__(
self,
engine_list: List[GetLindormInstanceEngineListResponseBodyEngineList] = None,
request_id: str = None,
instance_id: str = None,
):
self.engine_list = engine_list
self.request_id = request_id
self.instance_id = instance_id
def validate(self):
if self.engine_list:
for k in self.engine_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
result['EngineList'] = []
if self.engine_list is not None:
for k in self.engine_list:
result['EngineList'].append(k.to_map() if k else None)
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
return result
def from_map(self, m: dict = None):
m = m or dict()
self.engine_list = []
if m.get('EngineList') is not None:
for k in m.get('EngineList'):
temp_model = GetLindormInstanceEngineListResponseBodyEngineList()
self.engine_list.append(temp_model.from_map(k))
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
return self
class GetLindormInstanceEngineListResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: GetLindormInstanceEngineListResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = GetLindormInstanceEngineListResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class GetLindormInstanceListRequest(TeaModel):
def __init__(
self,
security_token: str = None,
owner_id: int = None,
resource_owner_account: str = None,
resource_owner_id: int = None,
owner_account: str = None,
region_id: str = None,
query_str: str = None,
page_number: int = None,
page_size: int = None,
service_type: str = None,
support_engine: int = None,
):
self.security_token = security_token
self.owner_id = owner_id
self.resource_owner_account = resource_owner_account
self.resource_owner_id = resource_owner_id
self.owner_account = owner_account
self.region_id = region_id
self.query_str = query_str
self.page_number = page_number
self.page_size = page_size
self.service_type = service_type
self.support_engine = support_engine
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.security_token is not None:
result['SecurityToken'] = self.security_token
if self.owner_id is not None:
result['OwnerId'] = self.owner_id
if self.resource_owner_account is not None:
result['ResourceOwnerAccount'] = self.resource_owner_account
if self.resource_owner_id is not None:
result['ResourceOwnerId'] = self.resource_owner_id
if self.owner_account is not None:
result['OwnerAccount'] = self.owner_account
if self.region_id is not None:
result['RegionId'] = self.region_id
if self.query_str is not None:
result['QueryStr'] = self.query_str
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.service_type is not None:
result['ServiceType'] = self.service_type
if self.support_engine is not None:
result['SupportEngine'] = self.support_engine
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('SecurityToken') is not None:
self.security_token = m.get('SecurityToken')
if m.get('OwnerId') is not None:
self.owner_id = m.get('OwnerId')
if m.get('ResourceOwnerAccount') is not None:
self.resource_owner_account = m.get('ResourceOwnerAccount')
if m.get('ResourceOwnerId') is not None:
self.resource_owner_id = m.get('ResourceOwnerId')
if m.get('OwnerAccount') is not None:
self.owner_account = m.get('OwnerAccount')
if m.get('RegionId') is not None:
self.region_id = m.get('RegionId')
if m.get('QueryStr') is not None:
self.query_str = m.get('QueryStr')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('ServiceType') is not None:
self.service_type = m.get('ServiceType')
if m.get('SupportEngine') is not None:
self.support_engine = m.get('SupportEngine')
return self
class GetLindormInstanceListResponseBodyInstanceList(TeaModel):
def __init__(
self,
vpc_id: str = None,
engine_type: str = None,
expire_time: str = None,
create_time: str = None,
pay_type: str = None,
instance_storage: str = None,
ali_uid: int = None,
service_type: str = None,
network_type: str = None,
instance_id: str = None,
region_id: str = None,
instance_alias: str = None,
zone_id: str = None,
instance_status: str = None,
):
self.vpc_id = vpc_id
self.engine_type = engine_type
self.expire_time = expire_time
self.create_time = create_time
self.pay_type = pay_type
self.instance_storage = instance_storage
self.ali_uid = ali_uid
self.service_type = service_type
self.network_type = network_type
self.instance_id = instance_id
self.region_id = region_id
self.instance_alias = instance_alias
self.zone_id = zone_id
self.instance_status = instance_status
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.vpc_id is not None:
result['VpcId'] = self.vpc_id
if self.engine_type is not None:
result['EngineType'] = self.engine_type
if self.expire_time is not None:
result['ExpireTime'] = self.expire_time
if self.create_time is not None:
result['CreateTime'] = self.create_time
if self.pay_type is not None:
result['PayType'] = self.pay_type
if self.instance_storage is not None:
result['InstanceStorage'] = self.instance_storage
if self.ali_uid is not None:
result['AliUid'] = self.ali_uid
if self.service_type is not None:
result['ServiceType'] = self.service_type
if self.network_type is not None:
result['NetworkType'] = self.network_type
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.region_id is not None:
result['RegionId'] = self.region_id
if self.instance_alias is not None:
result['InstanceAlias'] = self.instance_alias
if self.zone_id is not None:
result['ZoneId'] = self.zone_id
if self.instance_status is not None:
result['InstanceStatus'] = self.instance_status
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('VpcId') is not None:
self.vpc_id = m.get('VpcId')
if m.get('EngineType') is not None:
self.engine_type = m.get('EngineType')
if m.get('ExpireTime') is not None:
self.expire_time = m.get('ExpireTime')
if m.get('CreateTime') is not None:
self.create_time = m.get('CreateTime')
if m.get('PayType') is not None:
self.pay_type = m.get('PayType')
if m.get('InstanceStorage') is not None:
self.instance_storage = m.get('InstanceStorage')
if m.get('AliUid') is not None:
self.ali_uid = m.get('AliUid')
if m.get('ServiceType') is not None:
self.service_type = m.get('ServiceType')
if m.get('NetworkType') is not None:
self.network_type = m.get('NetworkType')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('RegionId') is not None:
self.region_id = m.get('RegionId')
if m.get('InstanceAlias') is not None:
self.instance_alias = m.get('InstanceAlias')
if m.get('ZoneId') is not None:
self.zone_id = m.get('ZoneId')
if m.get('InstanceStatus') is not None:
self.instance_status = m.get('InstanceStatus')
return self
class GetLindormInstanceListResponseBody(TeaModel):
def __init__(
self,
request_id: str = None,
page_size: int = None,
page_number: int = None,
total: int = None,
instance_list: List[GetLindormInstanceListResponseBodyInstanceList] = None,
):
self.request_id = request_id
self.page_size = page_size
self.page_number = page_number
self.total = total
self.instance_list = instance_list
def validate(self):
if self.instance_list:
for k in self.instance_list:
if k:
k.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
if self.page_size is not None:
result['PageSize'] = self.page_size
if self.page_number is not None:
result['PageNumber'] = self.page_number
if self.total is not None:
result['Total'] = self.total
result['InstanceList'] = []
if self.instance_list is not None:
for k in self.instance_list:
result['InstanceList'].append(k.to_map() if k else None)
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
if m.get('PageSize') is not None:
self.page_size = m.get('PageSize')
if m.get('PageNumber') is not None:
self.page_number = m.get('PageNumber')
if m.get('Total') is not None:
self.total = m.get('Total')
self.instance_list = []
if m.get('InstanceList') is not None:
for k in m.get('InstanceList'):
temp_model = GetLindormInstanceListResponseBodyInstanceList()
self.instance_list.append(temp_model.from_map(k))
return self
class GetLindormInstanceListResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: GetLindormInstanceListResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = GetLindormInstanceListResponseBody()
self.body = temp_model.from_map(m['body'])
return self
class UpdateInstanceIpWhiteListRequest(TeaModel):
def __init__(
self,
security_token: str = None,
owner_id: int = None,
resource_owner_account: str = None,
resource_owner_id: int = None,
owner_account: str = None,
instance_id: str = None,
security_ip_list: str = None,
group_name: str = None,
):
self.security_token = security_token
self.owner_id = owner_id
self.resource_owner_account = resource_owner_account
self.resource_owner_id = resource_owner_id
self.owner_account = owner_account
self.instance_id = instance_id
self.security_ip_list = security_ip_list
self.group_name = group_name
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.security_token is not None:
result['SecurityToken'] = self.security_token
if self.owner_id is not None:
result['OwnerId'] = self.owner_id
if self.resource_owner_account is not None:
result['ResourceOwnerAccount'] = self.resource_owner_account
if self.resource_owner_id is not None:
result['ResourceOwnerId'] = self.resource_owner_id
if self.owner_account is not None:
result['OwnerAccount'] = self.owner_account
if self.instance_id is not None:
result['InstanceId'] = self.instance_id
if self.security_ip_list is not None:
result['SecurityIpList'] = self.security_ip_list
if self.group_name is not None:
result['GroupName'] = self.group_name
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('SecurityToken') is not None:
self.security_token = m.get('SecurityToken')
if m.get('OwnerId') is not None:
self.owner_id = m.get('OwnerId')
if m.get('ResourceOwnerAccount') is not None:
self.resource_owner_account = m.get('ResourceOwnerAccount')
if m.get('ResourceOwnerId') is not None:
self.resource_owner_id = m.get('ResourceOwnerId')
if m.get('OwnerAccount') is not None:
self.owner_account = m.get('OwnerAccount')
if m.get('InstanceId') is not None:
self.instance_id = m.get('InstanceId')
if m.get('SecurityIpList') is not None:
self.security_ip_list = m.get('SecurityIpList')
if m.get('GroupName') is not None:
self.group_name = m.get('GroupName')
return self
class UpdateInstanceIpWhiteListResponseBody(TeaModel):
def __init__(
self,
request_id: str = None,
):
self.request_id = request_id
def validate(self):
pass
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.request_id is not None:
result['RequestId'] = self.request_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('RequestId') is not None:
self.request_id = m.get('RequestId')
return self
class UpdateInstanceIpWhiteListResponse(TeaModel):
def __init__(
self,
headers: Dict[str, str] = None,
body: UpdateInstanceIpWhiteListResponseBody = None,
):
self.headers = headers
self.body = body
def validate(self):
self.validate_required(self.headers, 'headers')
self.validate_required(self.body, 'body')
if self.body:
self.body.validate()
def to_map(self):
_map = super().to_map()
if _map is not None:
return _map
result = dict()
if self.headers is not None:
result['headers'] = self.headers
if self.body is not None:
result['body'] = self.body.to_map()
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('headers') is not None:
self.headers = m.get('headers')
if m.get('body') is not None:
temp_model = UpdateInstanceIpWhiteListResponseBody()
self.body = temp_model.from_map(m['body'])
return self
| [
"[email protected]"
] | |
8ab26bb0e961827bf5aa99e77eb27bc71be0bf30 | cad396ca2df76e4521f6a4b5c059ba3931e72f11 | /pabi_asset_management/__openerp__.py | 4f52adf93cb7521961a68f99292244f2351916c9 | [] | no_license | mulaudzicalvin/pb2_addons | 1ee835bd0e5d6f215603aa5d3f1099df40a3d14c | 57e0cb59e83853248dda37e2205722ab9bce1852 | refs/heads/master | 2020-03-26T11:47:48.470437 | 2018-08-15T12:16:25 | 2018-08-15T12:16:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,319 | py | # -*- coding: utf-8 -*-
{
"name": "NSTDA :: PABI2 - Asset Management",
"version": "0.1",
"author": "Ecosoft",
"website": "http://ecosoft.co.th",
"category": "Customs Modules",
"depends": [
"account_asset_management",
"stock_account",
"account_anglo_saxon",
"pabi_purchase_work_acceptance",
"account_budget_activity",
"pabi_invest_construction",
"pabi_chartfield_merged",
"pabi_utils",
"pabi_account_move_adjustment",
"hr_expense_auto_invoice",
"pabi_utils",
],
"description": """
This module allow creating asset during incoming shipment.
""",
"data": [
"security/ir.model.access.csv",
# "data/import_templates.xml",
"xlsx_template/templates.xml",
"xlsx_template/load_template.xml",
"xlsx_template/xlsx_template_wizard.xml",
"data/sequence_data.xml",
"data/asset_purchase_method.xml",
"data/account_data.xml",
"data/location_data.xml",
"data/journal_data.xml",
"data/asset_status.xml",
"data/default_value.xml",
"wizard/asset_parent_deliver_wizard.xml",
"views/asset_view.xml",
"wizard/account_asset_remove_view.xml",
"wizard/create_asset_request_view.xml",
"wizard/create_asset_removal_view.xml",
"wizard/create_asset_adjust_wizard.xml",
"wizard/account_asset_compute.xml",
"views/account_invoice_view.xml",
"views/account_view.xml",
"views/asset_request_view.xml",
"views/asset_changeowner_view.xml",
"views/asset_transfer_view.xml",
"views/asset_adjust_view.xml",
"views/asset_removal_view.xml",
"views/asset_receive_view.xml",
"views/product_view.xml",
"views/purchase_requisition_view.xml",
"views/stock_view.xml",
"views/purchase_view.xml",
"views/purchase_master_data_view.xml",
"views/res_project_view.xml",
"views/res_section_view.xml",
"views/hr_expense_view.xml",
"views/ir_sequence_view.xml",
# "wizard/asset_action_excel_import.xml",
],
'installable': True,
'active': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| [
"[email protected]"
] | |
01beb8d00b05303dbfe6a6de48c5ffd06c388bb5 | b948da1493329127a9a9ab567bae874c8cfa0bf4 | /gallery/migrations/0001_initial.py | 0ae5ff858f1c4d7f41180948b4fd9bd84b6b05a0 | [] | no_license | Kotodian/portfolio | edb93bec72d66d1fececd71b67a8e7f92cebb260 | 5661bf5d8134bbb576b2ea771fe5a6210c942feb | refs/heads/master | 2020-06-13T02:32:59.356331 | 2019-07-04T08:22:28 | 2019-07-04T08:24:22 | 194,503,065 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 515 | py | # Generated by Django 2.0.2 on 2019-06-30 12:39
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Gallery',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(max_length=100)),
],
),
]
| [
"[email protected]"
] | |
47c9d4b25a6ac97dc5789a701dd77e1f0e2ff125 | 97933c7f0973cb5671a26d28763e2688882b6ba9 | /akshare/stock/stock_hk_fhpx_ths.py | 0dad04c91e08af6df46e8f48cb404a95d9b2d66c | [
"MIT"
] | permissive | jinzaizhichi/akshare | a1eacae7a5a94142b2e05e4fed9a48a9448b1755 | c3c9f67364dcfb0c8b507f991540541179a0e87b | refs/heads/master | 2023-08-10T07:45:22.377848 | 2023-07-25T08:23:08 | 2023-07-25T08:23:08 | 243,995,284 | 0 | 0 | MIT | 2022-09-08T05:48:01 | 2020-02-29T15:43:00 | Python | UTF-8 | Python | false | false | 2,078 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
"""
Date: 2023/5/16 18:20
Desc: 同花顺-港股-分红派息
http://stockpage.10jqka.com.cn/HK0700/bonus/
"""
import pandas as pd
import requests
def stock_hk_fhpx_detail_ths(symbol: str = "0700") -> pd.DataFrame:
"""
同花顺-港股-分红派息
http://stockpage.10jqka.com.cn/HK0700/bonus/
:param symbol: 港股代码
:type symbol: str
:return: 分红派息
:rtype: pandas.DataFrame
"""
url = f"http://basic.10jqka.com.cn/176/HK{symbol}/bonus.html"
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/89.0.4389.90 Safari/537.36",
}
r = requests.get(url, headers=headers)
r.encoding = "utf-8"
temp_df = pd.read_html(r.text)[0]
temp_df.columns = [
"公告日期",
"方案",
"除净日",
"派息日",
"过户日期起止日-起始",
"过户日期起止日-截止",
"类型",
"进度",
"以股代息",
]
# 剔除异常格式,由以股代息产生的异常
temp_df.dropna(subset=["派息日", "除净日"], inplace=True, ignore_index=True)
temp_df["公告日期"] = pd.to_datetime(
temp_df["公告日期"], format="%Y-%m-%d", errors="coerce"
).dt.date
temp_df["除净日"] = pd.to_datetime(
temp_df["除净日"], format="%Y-%m-%d", errors="coerce"
).dt.date
temp_df["派息日"] = pd.to_datetime(
temp_df["派息日"], format="%Y-%m-%d", errors="coerce"
).dt.date
temp_df["过户日期起止日-起始"] = pd.to_datetime(
temp_df["过户日期起止日-起始"], format="%Y-%m-%d", errors="coerce"
).dt.date
temp_df["过户日期起止日-截止"] = pd.to_datetime(
temp_df["过户日期起止日-截止"], format="%Y-%m-%d", errors="coerce"
).dt.date
return temp_df
if __name__ == "__main__":
stock_hk_fhpx_detail_ths_df = stock_hk_fhpx_detail_ths(symbol="0968")
print(stock_hk_fhpx_detail_ths_df)
| [
"[email protected]"
] | |
a75ac8c01944ccea8b2dbe6c82b21057a5546ede | 27d44e4eb737cdacd46e08d3c6810424e9751872 | /homedns/interface/interface.py | 885850df63faf3ef8cf6576e8f42454731ab4879 | [] | no_license | liuyug/homedns | 96d88c28d88a31e837270f609b8ea408e763cc80 | 236fd19eaec6dd54f1ae29872a5c627ec1a4ae76 | refs/heads/master | 2021-07-04T08:48:25.905798 | 2020-09-30T02:26:08 | 2020-09-30T02:26:08 | 50,401,927 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 872 | py | #!/usr/bin/env python
# -*- encoding:utf-8 -*-
class InterfaceBase(object):
def __init__(self):
self.interfaces = {}
self.gateway_iface = ''
def isIPv4(self, ip):
if not ip:
return False
if ':' in ip:
return False
return True
def includeIPv4(self, ips):
for ip in ips:
if self.isIPv4(ip):
return True
return False
def get_gateway(self):
iface = self.interfaces.get(self.gateway_iface)
if iface:
return iface.get('gateway')
def get_dnserver(self):
iface = self.interfaces.get(self.gateway_iface)
if iface:
return iface.get('dnserver')
def get_dhcpserver(self):
iface = self.interfaces.get(self.gateway_iface)
if iface:
return iface.get('dhcpserver')
| [
"[email protected]"
] | |
a59d04daa9fd2fdef55a8c490457f5c838ff0d96 | d3a0a2cabd572a9e597a399cf6b7012e34a99475 | /flask/script-api/flask/lib/python3.6/importlib/machinery.py | fff54fcd97759ad7872c8d14ac1f72a732ccce52 | [] | no_license | rahulgoyal911/Face-Recogniton-Using-OpenCV-and-Python-on-RaspberryPI | 06ed227dc56a0956d668de0d9f78287c75e790f4 | 1bbdfd45adebda0728831065df0580cd48dedef8 | refs/heads/master | 2020-05-01T01:18:18.868473 | 2019-03-24T16:48:21 | 2019-03-24T16:48:21 | 177,192,198 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 66 | py | /home/rahulgoyal911/anaconda3/lib/python3.6/importlib/machinery.py | [
"[email protected]"
] | |
fbeadd27699e5fd815ec94cbbecb496b634ac17c | 8eb448bde626c47d36fafdb26c97caecb833d578 | /myscript.py | a3e295aca5cd9b3ea50833536062bb069b83b31e | [] | no_license | AlJohri/testenvdir | 9c2502e1b54e2e6952569abe62c664d5d1cad6b2 | c450961c8e29af60c82736fe7cd27e16f996a8a8 | refs/heads/master | 2020-09-21T19:51:50.761607 | 2016-09-08T02:53:54 | 2016-09-08T02:53:54 | 67,660,848 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 70 | py | #!/usr/bin/env python3
from mypackage.settings import *
print(DEBUG)
| [
"[email protected]"
] | |
23130632ae3ca75ff2aa72020b905c030c10dc4b | 92c8743c51a75e5173f1eef139f2796c7027ed2a | /src/forms/about.py | e5d21a74d3dce76787deaee0892494d7ba270b1e | [
"LicenseRef-scancode-warranty-disclaimer",
"MIT",
"CC-BY-3.0"
] | permissive | dave45678/Turing | 79565c0c89fb6b131ea90d7a0c95099e3882cc8c | 99107296ca5785306eb275e515d937e787d4f6d8 | refs/heads/master | 2020-03-15T06:42:48.233418 | 2018-05-03T15:11:55 | 2018-05-03T15:11:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 654 | py | # -*- coding: utf-8 -*-
from PyQt5.QtCore import *
from PyQt5.QtWidgets import *
from forms.ui_about import Ui_AboutWindow
from util.widgets import center_widget
translate = QCoreApplication.translate
class AboutWindow(QDialog):
def __init__(self, parent, version, channel):
super().__init__(parent)
self.ui = Ui_AboutWindow()
self.ui.setupUi(self)
self.setFixedSize(self.size())
txt = self.ui.textBrowser_about.toHtml().replace("{version}", version).replace("{channel}", channel)
self.ui.textBrowser_about.setHtml(txt)
center_widget(self, parent)
def run(self):
self.exec_()
| [
"[email protected]"
] | |
edb3c21bf6b70f937e62563753c0327a117557bb | 3adf9934a74077c328b9a0afff37f8ca355eead1 | /comicresizer/wsgi.py | 0fd76014e66332ac4573eba7ee336a637babd864 | [] | no_license | jgasteiz/comic-resizer | 36671623fe9909f23fba793b44cf4ac56380926a | 12d2e12efdf2017746d67a4b6d9616613ee58bb9 | refs/heads/master | 2021-07-05T05:56:00.911958 | 2017-09-27T07:43:58 | 2017-09-27T07:45:25 | 104,987,910 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 402 | py | """
WSGI config for comicresizer project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "comicresizer.settings")
application = get_wsgi_application()
| [
"[email protected]"
] | |
04cd0012628d3dfda86a4d2353d6d72de1583872 | bbfa3b7ee2008617d33a7c5c7770d22e1aa8836b | /luolearn/metrics/classification.py | 0ccb8670d109cc630df5d4db6f8b84270668eda1 | [
"MIT"
] | permissive | luoshao23/ML_algorithm | 1a0046ce9c3abed029cceffa35defe57fffa82b2 | 6e94fdd0718cd892118fd036c7c5851cf3e6d796 | refs/heads/master | 2021-08-07T08:38:16.102455 | 2020-03-18T06:49:43 | 2020-03-18T06:49:43 | 92,467,636 | 4 | 1 | MIT | 2018-01-16T05:01:29 | 2017-05-26T03:20:08 | Jupyter Notebook | UTF-8 | Python | false | false | 1,875 | py | import numpy as np
from scipy.sparse import csr_matrix
from ..utils import column_or_1d
from ..utils import check_consistent_length
from ..utils.multiclass import type_of_target
from ..utils.sparsefuncs import count_nonzero
def _check_targets(y_true, y_pred):
check_consistent_length(y_true, y_pred)
type_true = type_of_target(y_true)
type_pred = type_of_target(y_pred)
y_type = set([type_true, type_pred])
if y_type == set(["binary", "multiclass"]):
y_type = set(["multiclass"])
if len(y_type) > 1:
raise ValueError("Cannot handle!")
y_type = y_type.pop()
if (y_type not in ["binary", "multiclass", "multilabel-indicator"]):
raise ValueError("{0} is not supported".format(y_type))
if y_type in ["binary", "multiclass"]:
y_true = column_or_1d(y_true)
y_pred = column_or_1d(y_pred)
if y_pred == "binary":
unique_values = np.union1d(y_true, y_pred)
if len(unique_values) > 2:
y_type = "multiclass"
if y_type.startswith('multilabel'):
y_true = csr_matrix(y_true)
y_pred = csr_matrix(y_pred)
y_type = 'multilabel-indicator'
return y_type, y_true, y_pred
def _weighted_sum(sample_score, sample_weight, normalize=False):
if normalize:
return np.average(sample_score, weights=sample_weight)
elif sample_weight is not None:
return np.dot(sample_score, sample_weight)
else:
return sample_score.sum()
def accuracy_score(y_true, y_pred, normalize=True, sample_weight=None):
y_type, y_true, y_pred = _check_targets(y_true, y_pred)
if y_type.startswith('multilabel'):
differing_labels = count_nonzero(y_true - y_pred, axis=1)
score = differing_labels == 0
else:
score = y_true == y_pred
return _weighted_sum(score, sample_weight, normalize)
| [
"[email protected]"
] | |
4a3e93bdc7b589f147e164f7c8dae95f265344d0 | 6c1527b2dc3f944b8907d0de5bda6cdfbaeb1f7f | /otree-core-master/otree/asgi.py | 890f1dd51a9d99f28048c1cc560b3593a729526c | [
"MIT"
] | permissive | dcthomas4679/otree | f0a9204b12cd395e55fd9b77ac90584c2cd3c049 | 363a05d2f70f9225628e4857473dedcb449018dc | refs/heads/master | 2021-06-23T20:07:02.499724 | 2020-11-18T15:32:30 | 2020-11-18T15:32:30 | 37,225,765 | 1 | 1 | NOASSERTION | 2021-06-10T23:28:55 | 2015-06-10T22:22:33 | Python | UTF-8 | Python | false | false | 926 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import # for channels module
import os
import channels.asgi
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from otree.common_internal import (
release_any_stale_locks, get_redis_conn # noqa
)
release_any_stale_locks()
# clear any tasks in Huey DB, so they don't pile up over time,
# especially if you run the server without the timeoutworker to consume the
# tasks.
# ideally we would only schedule a task in Huey if timeoutworker is running,
# so that we don't pile up messages that never get consumed, but I don't know
# how and when to check if Huey is running, in a performant way.
# this code is also in timeoutworker.
from huey.contrib.djhuey import HUEY # noqa
HUEY.flush()
from otree.bots.browser import redis_flush_bots # noqa
redis_flush_bots(get_redis_conn())
channel_layer = channels.asgi.get_channel_layer()
| [
"[email protected]"
] | |
b469199380c66c46be47c9d6a2ca9a4c78298f1b | 9b53a4c0a1980aeb13b73d905afb3322b26def52 | /page/classtimetablePage/room_managementPage/degree_course_classmate_r.py | f48eb36a5d5dce6ea4f4bf9693c3b4d7f7562552 | [] | no_license | xmaimiao/wmPC | 6340386aac10e4c8273ec4aec53a6494820e46ff | 07319cc6f4e0bf1a53bf61b9baf1c8440dfc02bd | refs/heads/master | 2022-12-30T09:50:56.908362 | 2020-10-18T12:49:38 | 2020-10-18T12:49:38 | 305,076,069 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,295 | py | from common.contants import degree_course_classmate_r_dir
from page.basepage import BasePage
class Degree_Course_Classmate_R(BasePage):
def get_lessontime(self):
'''
驗證顯示了授課時間
'''
return self.step(degree_course_classmate_r_dir,"get_lessontime")
def get_lesson_student_num(self):
'''
驗證顯示了課程總人數
'''
result = self.step(degree_course_classmate_r_dir, "get_lesson_student_num")
return int(re.search("(\d+)",result).group(1))
def search_studentname(self,stu_keywords):
'''
查詢學生姓名
'''
self._params["stu_keywords"] = stu_keywords
return self.step(degree_course_classmate_r_dir,"search_studentname")
def search_studentstaffNo(self,stu_keywords):
'''
查詢學生學號
'''
self._params["stu_keywords"] = stu_keywords
return self.step(degree_course_classmate_r_dir,"search_studentstaffNo")
def back_to_room_management_degree(self):
self.step(degree_course_classmate_r_dir, "back_to_room_management_degree")
from page.classtimetablePage.room_managementPage.room_management_degree import Room_Management_Degree
return Room_Management_Degree(self._driver) | [
"[email protected]"
] | |
c6eea2c388f7e1c2c5d2a8ef0481770d4e11c70e | 1285703d35b5a37734e40121cd660e9c1a73b076 | /codility/5_count_div.py | b5e1146cfd9a5719bf56ac1dfb738d5edb83f263 | [] | no_license | takin6/algorithm-practice | 21826c711f57131108168775f08e4e13d07a3b38 | f4098bea2085a77d11c29e1593b3cc3f579c24aa | refs/heads/master | 2022-11-30T09:40:58.083766 | 2020-08-07T22:07:46 | 2020-08-07T22:07:46 | 283,609,862 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 362 | py | def solution(A, B, K):
# cur = (A-1) // 2
# divisibles = [cur]
# for i in range(A, B+1):
# if i % K == 0:
# cur += 1
# divisibles.append(cur)
# return divisibles[-1] - divisibles[0]
if A % K == 0:
return (B-A) // K + 1
else:
return (B - (A - A%K)) // K
print(solution(6,11,2))
| [
"[email protected]"
] | |
ab61bd103d445a862db59de85a923a2e32e2ad48 | 4bd793140684ddd73a91014f820c08fd00d4472d | /scripts/addons_extern/jarch_vis0_4/jarch_stairs.py | 56be54f9555804abf088674932bb93efe970f0ca | [] | no_license | Sielgaire/blenderpython | b737a03e2716e156648fe38cd03e84487db67776 | 1c54642533a62aa8ec46b560d1b4bb6265292992 | refs/heads/master | 2021-01-18T08:36:10.458457 | 2015-09-03T06:33:07 | 2015-09-03T06:33:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 69,549 | py | import bpy
from bpy.props import BoolProperty, EnumProperty, FloatProperty, StringProperty, IntProperty, FloatVectorProperty
from math import radians, atan, tan, sin, cos, sqrt, asin
from mathutils import Euler, Vector
import bmesh
from random import uniform
from . jarch_materials import Image
from . jarch_utils import convert, point_rotation
#create stairs
def create_stairs(self, context, style, overhang, steps, t_width, r_height, o_front, o_sides, width, n_landing, is_close, t_width0, r_height0,
l_depth0, l_rot0, o_front0, o_sides0, overhang0, is_back0, l_rot1, t_width1, r_height1, l_depth1, o_front1, o_sides1, overhang1, is_back1,
w_rot, stair_to_rot, rot, steps0, steps1, set_in, is_riser, is_landing, is_light, num_steps2, tread_res, pole_dia, pole_res):
verts = []; faces = []; names = []
#convert variables
tw = t_width
rh = r_height
of = o_front
os = o_sides
w = width
tw0 = t_width0
rh0 = r_height0
ld0 = l_depth0
of0 = o_front0
os0 = o_sides0
tw1 = t_width1
rh1 = r_height1
ld1 = l_depth1
of1 = o_front1
os1 = o_sides1
pd = pole_dia
#figure out what angle to use for winding stairs if them
angles = [None, radians(-90), radians(-45), radians(45), radians(90)]
angle = angles[int(w_rot)]
#collect names of materials
mats = []
for i in context.object.data.materials:
mats.append(i.name)
#normal
if style == "1":
#get starting positions and rotation
pre_pos = [0.0, 0.0, 0.0]; pre_rot = context.object.rotation_euler.copy()
#for each set of stairs and landings
for i in range(n_landing + 1):
#calculate variables to pass in
if i == 0: pass_in = [tw, rh, of, os, overhang, steps]
elif i == 1: pass_in = [tw0, rh0, of0, os0, overhang0, steps0, l_rot0, ld0]
elif i == 2: pass_in = [tw1, rh1, of1, os1, overhang1, steps1, l_rot1, ld1]
#get step data
verts_temp, faces_temp, cx, cy, cz, verts1_temp, faces1_temp = normal_stairs(self, context, pass_in[0], pass_in[1], pass_in[2], pass_in[3], w, pass_in[4], pass_in[5], is_close, set_in, is_riser, is_light, i)
#temporary value for rot
rot = 0
#go head and create jacks, or wait till rotation and location values are figured out depending on which level you are on
if i == 0:
verts = verts_temp; faces = faces_temp
mesh4 = bpy.data.meshes.new("jacks_" + str(i))
mesh4.from_pydata(verts1_temp, [], faces1_temp)
ob3 = bpy.data.objects.new("jacks_" + str(i), mesh4)
context.scene.objects.link(ob3); ob3.rotation_euler = context.object.rotation_euler; ob3.location = context.object.location; names.append(ob3.name); ob3.scale = context.object.scale
if context.scene.render.engine == "CYCLES":
if len(mats) >= 2:
mat = bpy.data.materials[mats[1]]; ob3.data.materials.append(mat)
else:
mat = bpy.data.materials.new("jack_temp"); mat.use_nodes = True; ob3.data.materials.append(mat)
else:
pre_pos2 = pre_pos[:]
#calculate rotation
if l_rot0 == "2":
if is_back0 == True: rot += 180
else: rot += 90
elif l_rot0 == "3":
if is_back0 == True: rot -= 180
else: rot -= 90
if n_landing == 2 and i == 2:
if l_rot1 == "2":
if is_back1 == True: rot += 180
else: rot += 90
elif l_rot1 == "3":
if is_back1 == True: rot -= 180
else: rot -= 90
#calculate position (adjusting for rotation to the sides and how that offsets the stairs in relation to the landing)
if pass_in[6] == "1": #forwards
type = "straight"
#level 1
if i == 1: pre_pos[1] += pass_in[7]
#level 2
elif i == 2 and l_rot0 == "1": pre_pos[1] += pass_in[7]
elif i == 2 and l_rot0 == "2" and is_back0 == False: pre_pos[0] -= pass_in[7]
elif i == 2 and l_rot0 == "2" and is_back0 == True: pre_pos[1] -= pass_in[7]
elif i == 2 and l_rot0 == "3" and is_back0 == False: pre_pos[0] += pass_in[7]
elif i == 2 and l_rot0 == "3" and is_back0 == True: pre_pos[1] -= pass_in[7]
elif pass_in[6] == "2": #left
#level 1
if i == 1 and is_back0 == False: pre_pos[0] -= w / 2; pre_pos[1] += pass_in[7] / 2; type = "straight"
elif i == 1 and is_back0 == True: pre_pos[0] -= w; type = "left"
#level 2
#first goes forwards
elif i == 2 and l_rot0 == "1" and is_back1 == False: pre_pos[1] += pass_in[7] / 2; pre_pos[0] -= w / 2; type = "straight"
elif i == 2 and l_rot0 == "1" and is_back1 == True: pre_pos[0] -= w; type = "left"
#first goes left
elif i == 2 and l_rot0 == "2" and is_back0 == False and is_back1 == False: pre_pos[0] -= pass_in[7] / 2; pre_pos[1] -= w / 2; type = "straight"
elif i == 2 and l_rot0 == "2" and is_back0 == False and is_back1 == True: pre_pos[1] -= w; type = "left"
elif i == 2 and l_rot0 == "2" and is_back0 == True and is_back1 == False: pre_pos[0] += w / 2; pre_pos[1] -= pass_in[7] / 2; type = "straight"
elif i == 2 and l_rot0 == "2" and is_back0 == True and is_back1 == True: pre_pos[0] += w; type = "left"
#first goes right
elif i == 2 and l_rot0 == "3" and is_back0 == False and is_back1 == False: pre_pos[0] += pass_in[7] / 2; pre_pos[1] += w / 2; type = "straight"
elif i == 2 and l_rot0 == "3" and is_back0 == False and is_back1 == True: pre_pos[1] += w; type = "left"
elif i == 2 and l_rot0 == "3" and is_back0 == True and is_back1 == False: pre_pos[0] += w / 2; pre_pos[1] -= pass_in[7] / 2; type = "straight"
elif i == 2 and l_rot0 == "3" and is_back0 == True and is_back1 == True: pre_pos[0] += w; type = "left"
elif pass_in[6] == "3": #right
#level 1
if i == 1 and is_back0 == False: pre_pos[1] += pass_in[7] / 2; pre_pos[0] += w / 2; type = "straight"
elif i == 1 and is_back0 == True: pre_pos[0] += w; type = "right"
#level 2
#first goes forwards
elif i == 2 and l_rot0 == "1" and is_back1 == False: pre_pos[1] += pass_in[7] / 2; pre_pos[0] += w / 2; type = "straight"
elif i == 2 and l_rot0 == "1" and is_back1 == True: pre_pos[0] += w; type = "right"
#first goes left
elif i == 2 and l_rot0 == "2" and is_back0 == False and is_back1 == False: pre_pos[0] -= pass_in[7] / 2; pre_pos[1] += w / 2; type = "straight"
elif i == 2 and l_rot0 == "2" and is_back0 == False and is_back1 == True: pre_pos[1] += w; type = "right"
elif i == 2 and l_rot0 == "2" and is_back0 == True and is_back1 == False: pre_pos[0] -= w / 2; pre_pos[1] -= pass_in[7] / 2; type = "straight"
elif i == 2 and l_rot0 == "2" and is_back0 == True and is_back1 == True: pre_pos[0] -= w ; type = "right"
#first goes right
elif i == 2 and l_rot0 == "3" and is_back0 == False and is_back1 == False: pre_pos[0] += pass_in[7] / 2; pre_pos[1] -= w / 2; type = "straight"
elif i == 2 and l_rot0 == "3" and is_back0 == False and is_back1 == True: pre_pos[1] -= w; type = "right"
elif i == 2 and l_rot0 == "3" and is_back0 == True and is_back1 == False: pre_pos[0] -= w / 2; pre_pos[1] -= pass_in[7] / 2; type = "straight"
elif i == 2 and l_rot0 == "3" and is_back0 == True and is_back1 == True: pre_pos[0] -= w; type = "right"
#create stairs
pre_pos[2] += 1 / 39.3701
mesh2 = bpy.data.meshes.new("stair_" + str(i))
mesh2.from_pydata(verts_temp, [], faces_temp)
ob = bpy.data.objects.new("stair_" + str(i), mesh2)
context.scene.objects.link(ob); o = context.object; eur = o.rotation_euler.copy(); eur2 = Euler((0.0, 0.0, radians(rot))); eur.rotate(eur2)
matrix = o.matrix_world.inverted()
vpos = Vector(pre_pos) * matrix; pos = list(vpos); pos[0] += o.location[0]; pos[1] += o.location[1]; pos[2] += o.location[2]
names.append(ob.name); ob.rotation_euler = eur; ob.location = pos; ob.scale = o.scale
#jacks
mesh4 = bpy.data.meshes.new("jacks_" + str(i))
mesh4.from_pydata(verts1_temp, [], faces1_temp)
ob3 = bpy.data.objects.new("jacks_" + str(i), mesh4)
context.scene.objects.link(ob3); ob3.rotation_euler = eur; ob3.location = pos; names.append(ob3.name); ob3.scale = o.scale
if context.scene.render.engine == "CYCLES":
if len(mats) >= 2:
mat = bpy.data.materials[mats[1]]; ob3.data.materials.append(mat)
else:
mat = bpy.data.materials.new("jack_temp"); mat.use_nodes = True; ob3.data.materials.append(mat)
#landings
if is_landing == True:
pre_pos2[2] += 1 / 39.3701
verts2, faces2 = stair_landing(self, context, w, pass_in[7], pass_in[1], type, set_in, rot)
mesh3 = bpy.data.meshes.new("landing_" + str(i))
mesh3.from_pydata(verts2, [], faces2)
ob2 = bpy.data.objects.new("landing_" + str(i), mesh3)
context.scene.objects.link(ob2); names.append(ob2.name); pre_pos2[2] -= pass_in[1]
vpos2 = Vector(pre_pos2) * matrix; pos2 = list(vpos2); pos2[0] += o.location[0]; pos2[1] += o.location[1]; pos2[2] += o.location[2]
ob2.location = pos2; ob2.rotation_euler = pre_rot; ob2.scale = o.scale
pre_rot = eur
###Apply translations correctly in relation to the rotation of the stairs###
if rot == 0 or rot == 360:
pre_pos = [cx + pre_pos[0], cy + pre_pos[1], cz + pre_pos[2]]
elif rot == 90 or rot == -270:
pre_pos = [pre_pos[0] - cy, pre_pos[1], pre_pos[2] + cz]
elif rot == -90 or rot == 270:
pre_pos = [pre_pos[0] + cy, pre_pos[1], pre_pos[2] + cz]
elif rot == 180 or rot == -180:
pre_pos = [pre_pos[0], pre_pos[1] - cy, pre_pos[2] + cz]
elif style == "2": #winding
verts, faces = winding_stairs(self, context, tw, rh, of, w, steps, is_close, angle, stair_to_rot)
elif style == "3": #spiral
verts, faces, verts2, faces2 = spiral_stairs(self, context, w, num_steps2, tw, rh, rot, of, tread_res, pd, pole_res)
mesh = bpy.data.meshes.new("pole_temp")
mesh.from_pydata(verts2, [], faces2)
ob4 = bpy.data.objects.new("pole_temp", mesh)
context.scene.objects.link(ob4); ob4.location = context.object.location; ob4.rotation_euler = context.object.rotation_euler; names.append(ob4.name); o = context.object; ob4.scale = o.scale
#context.scene.objects.active = ob; o.select = False; ob.select = True; bpy.ops.object.shade_smooth(); o.select = True; ob.select = False; context.scene.objects.active = o
if context.scene.render.engine == "CYCLES":
#materials
if len(mats) >= 2:
mat = bpy.data.materials[mats[1]]; ob4.data.materials.append(mat)
else:
mat = bpy.data.materials.new("pole_temp"); mat.use_nodes = True; ob4.data.materials.append(mat)
return (verts, faces, names)
def spiral_stairs(self, context, w, steps, tw, rh, rot, of, tread_res, pd, pole_res):
verts = []; faces = []
#calculate rotation per step
ang = rot / (steps - 1); t = 1 / 39.3701
#other needed variables
cur_ang = 0.0; hof = of / 2; cz = rh - t
#half of the tread width out at the end
hh = w * tan(ang / 2); ih = of / (tread_res + 1)
for step in range(steps - 1):
v = (); p = len(verts)
points = [(0.0, -hof), (w, 0.0)]
for i in range(tread_res): points.append((w, 0.0))
points += ((w, 0.0), (0.0, hof))
for i in range(tread_res): points.append((0.0, hof - (i * ih) - ih))
counter = 0
for i in points:
if step != 0: e_rot = asin(of / w)
else: e_rot = 0.0
#if positive rotation
if counter == 1 and rot >= 0: angle = cur_ang - e_rot
elif counter == 2 + tread_res and rot >= 0: angle = cur_ang + ang
#if negative rotation
elif counter == 1 and rot < 0: angle = cur_ang + ang
elif counter == 2 + tread_res and rot < 0: angle = cur_ang
#middle vertices
elif counter > 1 and counter < 2 + tread_res and rot >= 0: angle = cur_ang + (((ang + e_rot) / (tread_res + 1)) * (counter - 1)) - e_rot
elif counter > 1 and counter < 2 + tread_res and rot < 0: angle = (cur_ang + ang) - (((ang + e_rot) / (tread_res + 1)) * (counter - 1)) - e_rot
else: angle = step * ang; angle += radians(180); angle *= -1; angle -= ang / 2
x, y = point_rotation(i, (0.0, 0.0), angle)
v += ((x, y, cz), (x, y, cz + t)); counter += 1
cz += rh; cur_ang += ang; f = []
for vert in v: verts.append(vert)
#faces
#edge faces
lp = p #local version of the number of vertices
for i in range(4 + (tread_res * 2)):
if i != 3 + (tread_res * 2): f.append((lp, lp + 2, lp + 3, lp + 1)) #if not last face
else: f.append((lp, p, p + 1, lp + 1)) #if last face
lp += 2 #update local p
#calculate top faces
op = []; ip = [p + 1]
for i in range(p + 3, p + 3 + (tread_res * 2) + 4, 2): op.append(i)
for i in range(p + 7 + (tread_res * 4), p + 7 + (tread_res * 4) - (tread_res * 2) - 2, -2): ip.append(i)
for i in range(tread_res + 1): f.append((ip[i], op[i], op[i + 1], ip[i + 1]))
#bottom faces
op = []; ip = [p]
for i in range(p + 2, p + 2 + (tread_res * 2) + 4, 2): op.append(i)
for i in range(p + 6 + (tread_res * 4), p + 6 + (tread_res * 4) - (tread_res * 2) - 2, -2): ip.append(i)
for i in range(tread_res + 1): f.append((ip[i], ip[i + 1], op[i + 1], op[i]))
for face in f: faces.append(face)
#pole
cz -= rh - t; verts2 = []; faces2 = []
ang = radians(360 / pole_res); v = []; z = 0.0; p = len(verts2)
for i in range(2):
for vs in range(pole_res):
cur_ang = vs * ang
x, y = point_rotation((pd / 2, 0.0), (0.0, 0.0), cur_ang)
v.append((x, y, z))
z += cz
for vert in v: verts2.append(vert)
#faces
pr = pole_res
for i in range(pole_res):
if i != pole_res - 1:
f = (p + i, p + i + 1, p + i + pr + 1, p + i + pr)
else:
f = (p + i, p, p + pr, p + i + pr)
faces2.append(f)
f = []
for i in range(pole_res, pole_res * 2):
f.append(p + i)
faces2.append(f)
return (verts, faces, verts2, faces2)
def winding_stairs(self, context, tw, rh, of, w, steps, is_close, w_rot, stair_to_rot):
#create radians measures and round to 4 decimal places
w_rot = round(w_rot, 4); r45 = round(radians(45), 4); r180 = round(radians(180), 4); r90 = round(radians(90), 4)
#winding stairs
verts = []; faces = []; inch = 1 / 39.3701; str = stair_to_rot; tw -= of
#figure out the distance farther to go on left or right side based on rotation
if -r45 < w_rot < r45:
gy = abs(w * tan(w_rot)); ex = 0.0
else: gy = w; ex = w * tan(abs(w_rot) - r45)
t = 1 / 39.3701; gy += t
#calculate number of steps on corner
if w_rot != 0.0:
ti = 10 / 39.3701
c_steps = int((ti * abs(w_rot)) / (tw - inch))
else: c_steps = 0
###needed variables###
cx = 0.0; cy = 0.0; cz = 0.0; dy = str * tw; dz = str * rh; hw = w / 2; rh -= t
ay = (steps - str - c_steps) * tw; az = (steps - str) * rh #ay is the distance from the corner to the top of the stairs
temp_dw = sqrt((gy ** 2 + w ** 2)); temp_x = 0.0
if -r45 <= w_rot <= r45: dw = temp_dw
else:
if w_rot < 0: angle = w_rot + r45
else: angle = w_rot - r45
dw = temp_dw * cos(w_rot)
#steps
for step in range(steps):
p = len(verts); face_type = None; v = ()
if step + 1 < str: #before rotation
v = ((cx - hw, cy, cz), (cx - hw, cy + t, cz), (cx - hw, cy + t, cz + rh), (cx - hw, cy, cz + rh))
v += ((cx + hw, cy, cz), (cx + hw, cy + t, cz), (cx + hw, cy + t, cz + rh), (cx + hw, cy, cz + rh)); cz += rh
v += ((cx - hw, cy - of, cz), (cx - hw, cy + tw, cz), (cx - hw, cy + tw, cz + t), (cx - hw, cy - of, cz + t))
v += ((cx + hw, cy - of, cz), (cx + hw, cy + tw, cz), (cx + hw, cy + tw, cz + t), (cx + hw, cy - of, cz + t)); cy += tw; face_type = "normal"
elif str <= step + 1 <= str + c_steps: #in rotation
if -r45 <= w_rot <= r45:
yp = (gy / (c_steps + 1)); y = yp * (step + 2 - str); y2 = yp * (step + 1 - str)
if 0 < w_rot <= r45: #positive rotation
cx = hw; v = ((cx, cy, cz), (cx, cy, cz + rh), (cx - w, cy + y2, cz), (cx - w, cy + y2, cz + rh), (cx - w, cy + y2 + t, cz), (cx - w, cy + y2 + t, cz + rh), (cx, cy + t, cz), (cx, cy + t, cz + rh)); cz += rh
v += ((cx, cy - of, cz), (cx, cy - of, cz + t), (cx - w, cy + y2 - of, cz), (cx - w, cy + y2 - of, cz + t), (cx - w, cy + y, cz), (cx - w, cy + y, cz + t), (cx, cy + t, cz), (cx, cy + t, cz + t)); face_type = "pos"
elif -r45 <= w_rot < 0: #negative rotation
cx = -hw; v = ((cx, cy, cz), (cx, cy, cz + rh), (cx + w, cy + y2, cz), (cx + w, cy + y2, cz + rh), (cx + w, cy + y2 + t, cz), (cx + w, cy + y2 + t, cz + rh), (cx, cy + t, cz), (cx, cy + t, cz + rh)); cz += rh
v += ((cx, cy - of, cz), (cx, cy - of, cz + t), (cx + w, cy + y2 - of, cz), (cx + w, cy + y2 - of, cz + t), (cx + w, cy + y, cz), (cx + w, cy + y, cz + t), (cx, cy + t, cz), (cx, cy + t, cz + t)); face_type = "neg"
else: #more than abs(45)
ang = w_rot / (c_steps + 1); cs = step + 1 - str; cur_ang = ang * (step + 2 - str)
if w_rot > 0: #positive rotation
if abs(cur_ang) <= r45:
y = (gy / r45) * abs(cur_ang); y2 = (gy / r45) * (abs(cur_ang - ang))
cx = hw; v = ((cx, cy, cz), (cx, cy, cz + rh), (cx - w, cy + y2, cz), (cx - w, cy + y2, cz + rh), (cx - w, cy + y2 + t, cz), (cx - w, cy + y2 + t, cz + rh), (cx, cy + t, cz), (cx, cy + t, cz + rh)); cz += rh
v += ((cx, cy - of, cz), (cx, cy - of, cz + t), (cx - w, cy - of, cz), (cx - w, cy - of, cz + t), (cx - w, cy + y, cz), (cx - w, cy + y, cz + t), (cx, cy + t, cz), (cx, cy + t, cz + t)); face_type = "pos"
elif abs(cur_ang) > r45 and abs(cur_ang - ang) < r45: #step on corner
x = (ex / (abs(w_rot) - r45)) * (abs(cur_ang) - r45); y2 = (gy / r45) * (abs(cur_ang - ang)); x -= t; temp_x = x
cx = hw; v = ((cx, cy, cz), (cx, cy, cz + rh), (cx - w, cy + y2, cz), (cx - w, cy + y2, cz + rh), (cx - w, cy + y2 + t, cz), (cx - w, cy + y2 + t, cz + rh), (cx, cy + t, cz), (cx, cy + t, cz + rh)); cz += rh
v += ((cx, cy - of, cz), (cx, cy - of, cz + t), (cx - w, cy - of + y2, cz), (cx - w, cy - of + y2, cz + t), (cx - w, cy + gy, cz), (cx - w, cy + gy, cz + t), (cx - w + x, cy + gy, cz), (cx - w + x, cy + gy, cz + t))
v += ((cx, cy, cz), (cx, cy, cz + t)); face_type = "pos_tri"
else: #last step
points = ((cx, cy), (cx - w + temp_x - t, cy + gy - t), (cx - w + temp_x - t, cy + gy), (cx - t, cy)); counter = 0
points += ((cx + of, cy), (cx - w + temp_x - t, cy + gy - of - t), (cx - w + temp_x - t, cy + gy + ex - temp_x - t), (cx - t, cy))
for i in points:
if counter in (1, 2, 5, 6): origin = (cx - w + temp_x, cy + gy - t)
else: origin = (cx, cy)
x, y = point_rotation(i, origin, w_rot + r180)
if counter <= 3:
v += ((x, y, cz), (x, y, cz + rh))
else:
v += ((x, y, cz + rh), (x, y, cz + rh + t))
counter += 1
cz += rh; face_type = "pos"
if w_rot < 0: #negative rotation
if abs(cur_ang) <= r45:
y = (gy / r45) * abs(cur_ang); y2 = (gy / r45) * (abs(cur_ang - ang))
cx = -hw; v = ((cx, cy, cz), (cx, cy, cz + rh), (cx + w, cy + y2, cz), (cx + w, cy + y2, cz + rh), (cx + w, cy + y2 + t, cz), (cx + w, cy + y2 + t, cz + rh), (cx, cy + t, cz), (cx, cy + t, cz + rh)); cz += rh
v += ((cx, cy - of, cz), (cx, cy - of, cz + t), (cx + w, cy - of, cz), (cx + w, cy - of, cz + t), (cx + w, cy + y, cz), (cx + w, cy + y, cz + t), (cx, cy + t, cz), (cx, cy + t, cz + t)); face_type = "neg"
elif abs(cur_ang) > r45 and abs(cur_ang - ang) < r45: #step on corner
x = (ex / (abs(w_rot) - r45)) * (abs(cur_ang) - r45); y2 = (gy / r45) * (abs(cur_ang - ang)); x += t; temp_x = x
cx = -hw; v = ((cx, cy, cz), (cx, cy, cz + rh), (cx + w, cy + y2, cz), (cx + w, cy + y2, cz + rh), (cx + w, cy + y2 + t, cz), (cx + w, cy + y2 + t, cz + rh), (cx, cy + t, cz), (cx, cy + t, cz + rh)); cz += rh
v += ((cx, cy - of, cz), (cx, cy - of, cz + t), (cx + w, cy - of + y2, cz), (cx + w, cy - of + y2, cz + t), (cx + w, cy + gy, cz), (cx + w, cy + gy, cz + t), (cx + w - x, cy + gy, cz), (cx + w - x, cy + gy, cz + t))
v += ((cx, cy, cz), (cx, cy, cz + t)); face_type = "neg_tri"
else: #last step
points = ((cx, cy), (cx + w - temp_x + t, cy + gy - t), (cx + w - temp_x + t, cy + gy), (cx + t, cy)); counter = 0
points += ((cx - of, cy), (cx + w - temp_x + t, cy + gy - of - t), (cx + w - temp_x + t, cy + gy + ex - temp_x - t), (cx + t, cy))
for i in points:
if counter in (1, 2, 5, 6): origin = (cx + w - temp_x, cy + gy - t)
else: origin = (cx, cy)
x, y = point_rotation(i, origin, w_rot + r180)
if counter <= 3:
v += ((x, y, cz), (x, y, cz + rh))
else:
v += ((x, y, cz + rh), (x, y, cz + rh + t))
counter += 1
cz += rh; face_type = "neg"
else: #after rotation
if step == str + c_steps: cy += t; cz += rh + t
else: cz += t
cs = step - c_steps - str
z = cz + (rh * cs); counter = 0
if w_rot > 0:
o2 = (cx - w + ex, cy + gy - t); face_type = "pos"
points = ((cx, cy + (cs * tw) - of), (cx - w + ex, cy + (cs * tw) + gy - t - of), (cx - w + ex, cy + (cs * tw) + tw + gy - t), (cx, cy + (cs * tw) + tw))
points += ((cx, cy + (cs * tw)), (cx - w + ex, cy + (cs * tw) + gy - t), (cx - w + ex, cy + (cs * tw) + gy), (cx, cy + (cs * tw) + t))
else:
o2 = (cx + w - ex, cy + gy - t); face_type = "neg"
points = ((cx, cy + (cs * tw) - of), (cx + w - ex, cy + (cs * tw) + gy - t - of), (cx + w - ex, cy + (cs * tw) + tw + gy - t), (cx, cy + (cs * tw) + tw))
points += ((cx, cy + (cs * tw)), (cx + w - ex, cy + (cs * tw) + gy - t), (cx + w - ex, cy + (cs * tw) + gy), (cx, cy + (cs * tw) + t))
for i in points:
if counter in (1, 2, 5, 6): origin = o2
else: origin = (cx, cy)
x, y = point_rotation(i, origin, w_rot + r180)
if counter <= 3:
v += ((x, y, z), (x, y, z + t))
else:
v += ((x, y, z - rh - t), (x, y, z))
counter += 1
for vert in v: verts.append(vert)
f = ()
if face_type == "normal":
f = ((p, p + 4, p + 7, p + 3), (p, p + 3, p + 2, p + 1), (p + 1, p + 2, p + 6, p + 5), (p + 2, p + 3, p + 7, p + 6), (p, p + 4, p + 5, p + 1), (p + 4, p + 5, p + 6, p + 7),
(p + 8, p + 11, p + 10, p + 9), (p + 8, p + 12, p + 15, p + 11), (p + 12, p + 13, p + 14, p + 15), (p + 14, p + 13, p + 9, p + 10), (p + 11, p + 15, p + 14, p + 10), (p + 8, p + 12, p + 13, p + 9))
elif face_type == "pos":
for i in range(2):
f += ((p, p + 6, p + 7, p + 1), (p, p + 1, p + 3, p + 2), (p + 2, p + 3, p + 5, p + 4), (p + 4, p + 5, p + 7, p + 6), (p + 1, p + 7, p + 5, p + 3), (p, p + 2, p + 4, p + 6)); p += 8
elif face_type == "neg":
for i in range(2):
f += ((p, p + 1, p + 7, p + 6), (p, p + 2, p + 3, p + 1), (p + 2, p + 4, p + 5, p + 3), (p + 4, p + 6, p + 7, p + 5), (p + 1, p + 3, p + 5, p + 7), (p, p + 6, p + 4, p + 2)); p += 8
elif face_type == "pos_tri":
f += ((p, p + 6, p + 7, p + 1), (p, p + 1, p + 3, p + 2), (p + 2, p + 3, p + 5, p + 4), (p + 4, p + 5, p + 7, p + 6), (p + 1, p + 7, p + 5, p + 3), (p, p + 2, p + 4, p + 6)); p += 8
f += ((p, p + 8, p + 9, p + 1), (p + 1, p + 9, p + 7, p + 3), (p + 3, p + 7, p + 5), (p, p + 1, p + 3, p + 2), (p + 6, p + 7, p + 9, p + 8), (p + 4, p + 5, p + 7, p + 6), (p + 2, p + 3, p + 5, p + 4), (p, p + 2, p + 6, p + 8), (p + 2, p + 4, p + 6))
elif face_type == "neg_tri":
f += ((p, p + 1, p + 7, p + 6), (p, p + 2, p + 3, p + 1), (p + 2, p + 4, p + 5, p + 3), (p + 4, p + 6, p + 7, p + 5), (p + 1, p + 3, p + 5, p + 7), (p, p + 6, p + 4, p + 2)); p += 8
f += ((p, p + 1, p + 9, p + 8), (p + 1, p + 3, p + 7, p + 9), (p + 3, p + 5, p + 7), (p, p + 2, p + 3, p + 1), (p + 6, p + 8, p + 9, p + 7), (p + 4, p + 6, p + 7, p + 5), (p + 2, p + 4, p + 5, p + 3), (p, p + 8, p + 6, p + 2), (p + 2, p + 6, p + 4))
for face in f: faces.append(face)
return (verts, faces)
def stair_landing(self, context, w, depth, riser, type, set_in, rot):
hw = w / 2; verts = []; faces = []; p = 0
if type == "straight":
#if set_in == False and rot in (0, 360):
# depth += 1 / 39.3701
v = ((-hw, 0.0, 0.0), (-hw, 0.0, riser), (-hw, depth, 0.0), (-hw, depth, riser), (hw, depth, 0.0), (hw, depth, riser), (hw, 0.0, 0.0), (hw, 0.0, riser))
elif type == "left":
v = ((-hw - w, 0.0, 0.0), (-hw - w, 0.0, riser), (-hw - w, depth, 0.0), (-hw - w, depth, riser), (hw, depth, 0.0), (hw, depth, riser), (hw, 0.0, 0.0), (hw, 0.0, riser))
elif type == "right":
v = ((-hw, 0.0, 0.0), (-hw, 0.0, riser), (-hw, depth, 0.0), (-hw, depth, riser), (hw + w, depth, 0.0), (hw + w, depth, riser), (hw + w, 0.0, 0.0), (hw + w, 0.0, riser))
for vert in v: verts.append(vert)
f = ((p, p + 1, p + 3, p + 2), (p + 1, p + 7, p + 5, p + 3), (p + 4, p + 5, p + 7, p + 6), (p, p + 2, p + 4, p + 6), (p + 2, p + 3, p + 5, p + 4), (p, p + 6, p + 7, p + 1))
for face in f: faces.append(face)
return (verts, faces)
def normal_stairs(self, context, tw, rh, of, os, w, overhang, steps, is_close, set_in, is_riser, is_light, landing):
tw -= of
verts = []; faces = []; inch = 1 / 39.3701
#figure number of jacks
if set_in == True: jack_type = "set_in"; jack_num = 2
elif is_close == True: jack_num = int(w / 0.3048); jack_type = "normal"
else: jack_type = "normal"; jack_num = int(w / 0.3048)
#space if light
if is_light == True and jack_type == "normal" and jack_num % 2 != 0:
jack_num += 1
tread = tw; riser = rh; over_front = of; over_sides = os; t_steps = steps; ov = overhang
#create jacks
cx = -(w / 2); thick = 0.0508; t = thick
space = (w - thick) / (jack_num - 1) #current x, thickness, space between jacks
###set_in variables###
if jack_type == "set_in":
riser -= t
ty = (t * tread) / (riser + t)
ow = tread + tread + ty; width = tread + (2 * ty)
for jack in range(jack_num): #each jack
extra = 6 / 39.3701; t_height = t_steps * riser; t_depth = t_steps * tread - inch
#amount gained on y and z axis because of jack slope
angle = atan((t_height - extra) / (t_depth - extra))
gz = riser - ((tread - extra) * tan(angle))
if jack_type != "set_in":
if is_riser == False: cy = 0.0
else: cy = inch
else:
cy = 0.0
#calculate line slope
if is_close == False or (jack != 0 and jack != jack_num - 1):
point = [extra + cy, 0.0]; point_1 = [tread * t_steps + cy - inch, (riser * t_steps) - extra]
slope = (point[1] - point_1[1]) / (point[0] - point_1[0])
b = point[1] - (slope * point[0])
elif (jack == 0 or jack == jack_num - 1) and is_close == True:
b = 0.0; slope = 0.0
last = 0.0; out_faces = []
if is_close == True and jack in (0, jack_num - 1): last = t_height - extra;
#stairs
cz = riser; sy = cy
for stair in range(t_steps):
face_type = "normal"; p = len(verts)
if jack_type == "normal":
if stair == 0:
#first step
z = (slope * (cy + tread)) + b
v = ((cx, cy, cz - riser), (cx + t, cy, cz - riser), (cx, cy, cz), (cx + t, cy, cz), (cx, cy + tread, cz), (cx + t, cy + tread, cz))
v += ((cx, cy + tread, z), (cx + t, cy + tread, z), (cx, cy + extra, cz - riser), (cx + t, cy + extra, cz - riser))
face_type = "first"
elif stair == t_steps - 1:
#last step
v = ((cx, cy, cz), (cx + t, cy, cz), (cx, cy + tread - inch, cz), (cx + t, cy + tread - inch, cz), (cx, cy + tread - inch, cz - extra - last), (cx + t, cy + tread - inch, cz - extra - last))
else:
#other steps
z = (slope * (cy + tread)) + b
v = ((cx, cy, cz), (cx + t, cy, cz), (cx, cy + tread, cz), (cx + t, cy + tread, cz), (cx, cy + tread, z), (cx + t, cy + tread, z))
for vert in v:
verts.append(vert)
if face_type == "first":
f = [(p, p + 1, p + 3, p + 2), (p + 2, p + 3, p + 5, p + 4), (p, p + 2, p + 4, p + 6), (p + 1, p + 7, p + 5, p + 3),
(p + 6, p + 7, p + 9, p + 8), (p, p + 8, p + 9, p + 1), (p, p + 6, p + 8), (p + 1, p + 9, p + 7)]
else:
if stair == 1:
f = [(p, p + 1, p + 3, p + 2), (p, p + 2, p + 4, p - 6), (p - 6, p + 4, p - 4), (p, p - 6, p - 5, p + 1),
(p + 1, p - 5, p + 5, p + 3), (p - 5, p - 3, p + 5), (p - 3, p - 4, p + 4, p + 5)]
else:
f = [(p, p + 1, p + 3, p + 2), (p, p + 2, p + 4, p - 4), (p - 4, p + 4, p - 2), (p, p - 4, p - 3, p + 1),
(p + 1, p - 3, p + 5, p + 3), (p - 3, p - 1, p + 5), (p - 1, p - 2, p + 4, p + 5)]
if stair == t_steps - 1:
if t_steps == 1:
f.append((p + 4, p + 5, p + 7, p + 6))
else:
f.append((p + 2, p + 3, p + 5, p + 4))
for face in f:
faces.append(face)
#update variables
cy += tread; cz += riser
elif jack_type == "set_in":
face_type = "normal"; p = len(verts)
if stair == 0:
if landing != 0: cy = -tread
else: cy = 0.0
cz = 0.0
if landing != 0:
v = ((cx, cy + tread, cz), (cx + inch, cy + tread, cz), (cx + t, cy + tread, cz), (cx, cy + tread, cz + riser), (cx + inch, cy + tread, cz + riser), (cx + t, cy + tread, cz + riser))
else:
v = ((cx, cy, cz), (cx + inch, cy, cz), (cx + t, cy, cz), (cx, cy + tread - ty, cz + riser), (cx + inch, cy + tread - ty, cz + riser), (cx + t, cy + tread - ty, cz + riser))
v += ((cx, cy + tread, cz + riser + t), (cx + inch, cy + tread, cz + riser + t), (cx + t, cy + tread, cz + riser + t), (cx, cy + ow, cz + riser + t))
v += ((cx + inch, cy + ow, cz + riser + t), (cx + t, cy + ow, cz + riser + t), (cx, cy + ow - ty, cz + riser), (cx + inch, cy + ow - ty, cz + riser), (cx + t, cy + ow - ty, cz + riser))
v += ((cx, cy + width - ty, cz), (cx + inch, cy + width - ty, cz), (cx + t, cy + width - ty, cz)); cy += tread + tread - ty; cz += riser + riser + t
if jack == 0: face_type = "first"
else: face_type = "first_right"
elif stair == t_steps - 1: #last step
v = ((cx, cy, cz), (cx + inch, cy, cz), (cx + t, cy, cz), (cx, cy + ty, cz + t), (cx + inch, cy + ty, cz + t), (cx + t, cy + ty, cz + t))
v += ((cx, cy + width - ty, cz + t), (cx + inch, cy + width - ty, cz + t), (cx + t, cy + width - ty, cz + t))
v += ((cx, cy + tread + ty, cz), (cx + inch, cy + tread + ty, cz), (cx + t, cy + tread + ty, cz)); cz += riser + t + t; cy += tread + ty
v += ((cx, cy, cz), (cx + inch, cy, cz), (cx + t, cy, cz))
if jack == 0: face_type = "last"
else: face_type = "last_right"
else: #normal steps
v = ((cx, cy, cz), (cx + inch, cy, cz), (cx + t, cy, cz), (cx, cy + ty, cz + t), (cx + inch, cy + ty, cz + t), (cx + t, cy + ty, cz + t))
v += ((cx, cy + width, cz + t), (cx + inch, cy + width, cz + t), (cx + t, cy + width, cz + t))
v += ((cx, cy + tread + ty, cz), (cx + inch, cy + tread + ty, cz), (cx + t, cy + tread + ty, cz))
cy += tread; cz += riser + t
if jack != 0: face_type = "normal_right"
for vert in v: verts.append(vert)
#faces
f = ()
if face_type == "first":
f = [(p, p + 3, p + 12, p + 15), (p, p + 1, p + 4, p + 3), (p + 1, p + 2, p + 5, p + 4), (p + 2, p + 17, p + 14, p + 5), (p + 16, p + 13, p + 14, p + 17),
(p + 15, p + 12, p + 13, p + 16), (p, p + 15, p + 16, p + 1), (p + 1, p + 16, p + 17, p + 2), (p + 3, p + 4, p + 7, p + 6), (p + 4, p + 5, p + 14, p + 13),
(p + 4, p + 13, p + 10, p + 7), (p + 12, p + 9, p + 10, p + 13), (p + 3, p + 6, p + 9, p + 12)]
if t_steps != 1: f.append((p + 7, p + 10, p + 11, p + 8))
else: f.append((p + 6, p + 7, p + 10, p + 9))
elif face_type == "first_right":
f = [(p, p + 3, p + 12, p + 15), (p, p + 1, p + 4, p + 3), (p + 1, p + 2, p + 5, p + 4), (p + 2, p + 17, p + 14, p + 5), (p + 16, p + 13, p + 14, p + 17),
(p + 15, p + 12, p + 13, p + 16), (p + 4, p + 5, p + 8, p + 7), (p + 5, p + 14, p + 11, p + 8), (p + 14, p + 13, p + 10, p + 11),
(p + 12, p + 3, p + 4, p + 13), (p + 4, p + 7, p + 10, p + 13), (p, p + 15, p + 16, p + 1), (p + 1, p + 16, p + 17, p + 2)]
if t_steps != 1: f.append((p + 6, p + 9, p + 10, p + 7))
else: f.append((p + 7, p + 8, p + 11, p + 10))
elif face_type == "normal":
if stair == 1:
f = ((p, p - 12, p - 11, p + 1), (p + 1, p - 11, p - 10, p + 2), (p + 2, p - 10, p - 7, p + 11), (p - 8, p + 10, p + 11, p - 7), (p - 9, p + 9, p + 10, p - 8),
(p, p + 9, p - 9, p - 12), (p, p + 1, p + 4, p + 3), (p + 1, p + 2, p + 11, p + 10), (p + 1, p + 10, p + 7, p + 4), (p + 4, p + 7, p + 8, p + 5),
(p + 6, p + 7, p + 10, p + 9), (p, p + 3, p + 6, p + 9))
else:
f = ((p, p - 9, p - 8, p + 1), (p + 1, p - 8, p - 7, p + 2), (p - 7, p - 4, p + 11, p + 2), (p - 4, p - 5, p + 10, p + 11), (p - 5, p - 6, p + 9, p + 10),
(p, p + 9, p - 6, p - 9), (p, p + 1, p + 4, p + 3), (p + 1, p + 2, p + 11, p + 10), (p + 1, p + 10, p + 7, p + 4), (p + 4, p + 7, p + 8, p + 5),
(p + 6, p + 7, p + 10, p + 9), (p, p + 3, p + 6, p + 9))
elif face_type == "normal_right":
if stair == 1:
f = ((p, p - 12, p - 11, p + 1), (p + 1, p - 11, p - 10, p + 2), (p + 2, p - 10, p - 7, p + 11), (p - 8, p + 10, p + 11, p - 7), (p - 9, p + 9, p + 10, p - 8),
(p, p + 9, p - 9, p - 12), (p + 1, p + 2, p + 5, p + 4), (p + 2, p + 11, p + 8, p + 5), (p + 7, p + 8, p + 11, p + 10), (p, p + 1, p + 10, p + 9),
(p + 1, p + 4, p + 7, p + 10), (p + 3, p + 6, p + 7, p + 4))
else:
f = ((p, p - 9, p - 8, p + 1), (p + 1, p - 8, p - 7, p + 2), (p - 7, p - 4, p + 11, p + 2), (p - 4, p - 5, p + 10, p + 11), (p - 5, p - 6, p + 9, p + 10),
(p, p + 9, p - 6, p - 9), (p + 1, p + 2, p + 5, p + 4), (p + 2, p + 11, p + 8, p + 5), (p + 7, p + 8, p + 11, p + 10), (p, p + 1, p + 10, p + 9),
(p + 1, p + 4, p + 7, p + 10), (p + 3, p + 6, p + 7, p + 4))
elif face_type == "last":
f = ((p, p - 9, p - 8, p + 1), (p + 1, p - 8, p - 7, p + 2), (p - 7, p - 4, p + 11, p + 2), (p - 4, p - 5, p + 10, p + 11), (p - 5, p - 6, p + 9, p + 10),
(p, p + 9, p - 6, p - 9), (p, p + 1, p + 4, p + 3), (p + 1, p + 2, p + 11, p + 10), (p + 1, p + 10, p + 7, p + 4), (p + 4, p + 7, p + 8, p + 5),
(p + 6, p + 7, p + 10, p + 9), (p, p + 3, p + 6, p + 9))
elif face_type == "last_right":
f = ((p, p - 9, p - 8, p + 1), (p + 1, p - 8, p - 7, p + 2), (p - 7, p - 4, p + 11, p + 2), (p - 4, p - 5, p + 10, p + 11), (p - 5, p - 6, p + 9, p + 10),
(p, p + 9, p - 6, p - 9), (p + 1, p + 2, p + 5, p + 4), (p + 2, p + 11, p + 8, p + 5), (p + 7, p + 8, p + 11, p + 10), (p, p + 1, p + 10, p + 9),
(p + 1, p + 4, p + 7, p + 10), (p + 3, p + 6, p + 7, p + 4))
if face_type in ("last_right", "last"):
f += ((p + 3, p + 4, p + 13, p + 12), (p + 4, p + 5, p + 14, p + 13), (p + 5, p + 8, p + 14), (p + 7, p + 13, p + 14, p + 8), (p + 6, p + 12, p + 13, p + 7), (p + 3, p + 12, p + 6))
for face in f: faces.append(face)
#update variables
cx += space
verts1 = verts[:]; faces1 = faces[:]; verts = []; faces = []
#treads and risers
ry = cy
cx = 0.0; cy = sy; cz = 0.0; hw = w / 2
if jack_type == "normal":
if overhang == "4": left = hw + os; right = hw + os #both
elif overhang == "2": left = hw; right = hw + os #right
elif overhang == "3": left = hw + os; right = hw #left
else: left = hw; right = hw #normal
#front
if is_riser == True:
front = of + inch
else:
front = of
#steps
for step in range(t_steps):
if is_riser == False: e = 0.0
else: e = inch
p = len(verts)
if is_riser == True:
v = ((cx - hw, cy, cz), (cx - hw, cy - inch, cz), (cx - hw, cy, cz + riser), (cx - hw, cy - inch, cz + riser))
v += ((cx + hw, cy, cz + riser), (cx + hw, cy - inch, cz + riser), (cx + hw, cy, cz), (cx + hw, cy - inch, cz))
for vert in v: verts.append(vert)
f = ((p, p + 1, p + 3, p + 2), (p + 1, p + 7, p + 5, p + 3), (p + 4, p + 5, p + 7, p + 6), (p, p + 2, p + 4, p + 6), (p + 2, p + 3, p + 5, p + 4), (p, p + 6, p + 7, p + 1))
for face in f: faces.append(face)
p += 8
cz += riser
#treads
v = ((cx - left, cy - front, cz), (cx - left, cy - front, cz + inch), (cx - left, cy + tread - e, cz), (cx - left, cy + tread - e, cz + inch))
v += ((cx + right, cy + tread - e, cz), (cx + right, cy + tread - e, cz + inch), (cx + right, cy - front, cz), (cx + right, cy - front, cz + inch))
for vert in v: verts.append(vert)
f = ((p, p + 1, p + 3, p + 2), (p + 1, p + 7, p + 5, p + 3), (p + 4, p + 5, p + 7, p + 6), (p, p + 2, p + 4, p + 6), (p + 2, p + 3, p + 5, p + 4), (p, p + 6, p + 7, p + 1))
for face in f: faces.append(face)
cy += tread
ry -= inch
elif jack_type == "set_in":
hw -= inch; cz += riser
if landing == 0: cy += tread
else: cy = 0.0
for step in range(t_steps):
p = len(verts)
v = ((cx - hw, cy, cz), (cx - hw, cy, cz + t), (cx - hw, cy + tread, cz), (cx - hw, cy + tread, cz + t))
v += ((cx + hw, cy + tread, cz), (cx + hw, cy + tread, cz + t), (cx + hw, cy, cz), (cx + hw, cy, cz + t))
for vert in v: verts.append(vert)
f = ((p, p + 1, p + 3, p + 2), (p + 1, p + 7, p + 5, p + 3), (p + 4, p + 5, p + 7, p + 6), (p, p + 2, p + 4, p + 6), (p + 2, p + 3, p + 5, p + 4), (p, p + 6, p + 7, p + 1))
for face in f: faces.append(face)
cz += t + riser; cy += tread
t_height += t * t_steps + t - inch
return (verts, faces, 0.0, ry, t_height + riser, verts1, faces1)
def UpdateStairs(self, context):
o = context.object; mats = []
for i in o.data.materials:
mats.append(i.name)
verts, faces, names = create_stairs(self, context, o.s_style, o.s_overhang, o.s_num_steps, o.s_tread_width, o.s_riser_height, o.s_over_front, o.s_over_sides, o.s_width,
o.s_num_land, o.s_is_close, o.s_tread_width0, o.s_riser_height0, o.s_landing_depth0, o.s_landing_rot0, o.s_over_front0, o.s_over_sides0, o.s_overhang0, o.s_is_back0,
o.s_landing_rot1, o.s_tread_width1, o.s_riser_height1, o.s_landing_depth1, o.s_over_front1, o.s_over_sides1, o.s_overhang1, o.s_is_back1, o.s_w_rot, o.s_num_rot, o.s_rot,
o.s_num_steps0, o.s_num_steps1, o.s_is_set_in, o.s_is_riser, o.s_is_landing, o.s_is_light, o.s_num_steps2, o.s_tread_res, o.s_pole_dia, o.s_pole_res)
emesh = o.data
mesh = bpy.data.meshes.new(name = "siding")
mesh.from_pydata(verts, [], faces)
mesh.update(calc_edges = True)
for i in bpy.data.objects:
if i.data == emesh:
i.data = mesh
emesh.user_clear()
bpy.data.meshes.remove(emesh)
if context.scene.render.engine == "CYCLES":
if len(mats) >= 1:
mat = bpy.data.materials[mats[0]]; o.data.materials.append(mat)
else:
mat = bpy.data.materials.new("stairs_temp"); mat.use_nodes = True; o.data.materials.append(mat)
#join objects if needed
if names != []:
for name in names:
ob = bpy.data.objects[name]; o.select = False; ob.select = True; o.select = True; context.scene.objects.active = o
bpy.ops.object.join()
if o.s_unwrap == True:
UnwrapStairs(self, context)
if o.s_random_uv == True:
RandomUV(self, context)
for i in mats:
if i not in o.data.materials:
mat = bpy.data.materials[i]; o.data.materials.append(mat)
for i in bpy.data.materials: #remove unused materials
if i.users == 0: bpy.data.materials.remove(i)
def StairsMaterials(self, context):
o = context.object
if context.scene.render.engine == "CYCLES":
#run file checker
error = False
if o.s_col_image == "": error = True
if o.s_norm_image == "" and o.s_is_bump == True: error = True
#check if first image is empty
if error == False and len(o.data.materials) >= 1:
mat = Image(bpy, context, o.s_im_scale, o.s_col_image, o.s_norm_image, o.s_bump_amo, o.s_is_bump, "stairs_temp_" + o.name, True, 0.1, 0.05, o.s_is_rotate)
if mat != None:
o.data.materials[0] = mat.copy(); o.data.materials[0].name = "stairs_" + o.name
else: self.report({"ERROR"}, "Images Not Found, Make Sure Path Is Correct")
else:
self.report({"ERROR"}, "First Material Invalid, Try Updating Object")
#second material
if o.s_style in ("1", "3"):
error2 = False
if o.s_col_image2 == "": error2 = True
if o.s_norm_image2 == "" and o.s_is_bump2 == True: error2 = True
if error2 == False and len(o.data.materials) >= 2:
mat = Image(bpy, context, o.s_im_scale2, o.s_col_image2, o.s_norm_image2, o.s_bump_amo2, o.s_is_bump2, "second_temp_" + o.name, True, 0.1, 0.05, o.s_is_rotate2)
if mat != None:
o.data.materials[1] = mat.copy()
if o.s_style == "1": o.data.materials[1].name = "jacks_" + o.name
else: o.data.materials[1].name = "pole_" + o.name
else: self.report({"ERROR"}, "Images Not Found, Make Sure Path Is Correct")
else:
self.report({"ERROR"}, "Second Material Invalid, Try Updating Object")
for i in bpy.data.materials: #remove unused materials
if i.users == 0: bpy.data.materials.remove(i)
else:
self.report({"ERROR"}, "Render Engine Must Be Cycles")
def UnwrapStairs(self, context):
o = context.object
#uv unwrap
for i in bpy.data.objects: i.select = False
o.select = True; bpy.context.scene.objects.active = o
for area in bpy.context.screen.areas:
if area.type == 'VIEW_3D':
for region in area.regions:
if region.type == 'WINDOW':
bpy.ops.object.editmode_toggle()
override = bpy.context.copy(); override["area"] = area; override["region"] = region; override["active_object"] = (bpy.context.selected_objects)[0]
bpy.ops.mesh.select_all(action = "SELECT"); bpy.ops.uv.cube_project(override); bpy.ops.object.editmode_toggle()
def RandomUV(self, context):
for area in bpy.context.screen.areas:
if area.type == 'VIEW_3D':
for region in area.regions:
if region.type == 'WINDOW':
bpy.ops.object.editmode_toggle()
bpy.ops.mesh.select_all(action = "SELECT")
obj = bpy.context.object
me = obj.data
bm = bmesh.from_edit_mesh(me)
uv_layer = bm.loops.layers.uv.verify()
bm.faces.layers.tex.verify()
# adjust UVs
for f in bm.faces:
offset = Vector((uniform(-1.0, 1.0), uniform(-1.0, 1.0)))
for v in f.loops:
luv = v[uv_layer]
luv.uv = (luv.uv + offset).xy
bmesh.update_edit_mesh(me)
bpy.ops.object.editmode_toggle()
def DeleteMaterials(self, context):
o = context.object
if o.s_is_material == False and o.s_mat != "2":
for i in o.data.materials:
bpy.ops.object.material_slot_remove()
for i in bpy.data.materials:
if i.users == 0:
bpy.data.materials.remove(i)
def PreviewMaterials(self, context):
for area in bpy.context.screen.areas:
if area.type == 'VIEW_3D':
for space in area.spaces:
if space.type == 'VIEW_3D':
if bpy.context.object.f_is_preview == True: space.viewport_shade = 'RENDERED'
else: space.viewport_shade = "SOLID"
#properties
#setup
bpy.types.Object.s_object_add = StringProperty(default = "none", update = UpdateStairs)
#style
bpy.types.Object.s_style = EnumProperty(items = (("1", "Normal", ""), ("2", "Winding", ""), ("3", "Spiral", "")),
default = "1", description = "Stair Style", update = UpdateStairs, name = "")
bpy.types.Object.s_overhang = EnumProperty(items = (("1", "Normal", ""), ("2", "Right", ""), ("3", "Left", ""), ("4", "Both", "")),
default = "1", description = "Overhang Style", update = UpdateStairs, name = "")
#common variables
bpy.types.Object.s_num_steps = IntProperty(name = "Number Of Steps", min = 1, max = 24, default = 13, update = UpdateStairs)
bpy.types.Object.s_num_steps2 = IntProperty(name = "Number Of Steps", min = 1, max = 48, default = 13, update = UpdateStairs)
bpy.types.Object.s_tread_width = FloatProperty(name = "Tread Width", min = 9.0 / 39.3701, max = 16.0 / 39.3701, default = 9.5 / 39.3701, subtype = "DISTANCE", update = UpdateStairs)
bpy.types.Object.s_riser_height = FloatProperty(name = "Riser Height", min = 5.0 / 39.3701, max = 8.0 / 39.3701, default = 7.4 / 39.3701, subtype = "DISTANCE", update = UpdateStairs)
bpy.types.Object.s_over_front = FloatProperty(name = "Front Overhang", min = 0.0, max = 1.25 / 39.3701, default = 1.0 / 39.3701, subtype = "DISTANCE", update = UpdateStairs)
bpy.types.Object.s_over_sides = FloatProperty(name = "Side Overhang", min = 0.0, max = 2.0 / 39.3701, default = 1.0 / 39.3701, subtype = "DISTANCE", update = UpdateStairs)
bpy.types.Object.s_width = FloatProperty(name = "Stair Width", min = 36.0 / 39.3701, max = 60.0 / 39.3701, default = 40.0 / 39.3701, subtype = "DISTANCE", update = UpdateStairs)
bpy.types.Object.s_is_riser = BoolProperty(name = "Risers?", default = True, update = UpdateStairs)
#normal style
bpy.types.Object.s_num_land = IntProperty(name = "Number Of Landings", min = 0, max = 2, default = 0, update = UpdateStairs)
bpy.types.Object.s_is_close = BoolProperty(name = "Close Sides?", default = False, update = UpdateStairs)
bpy.types.Object.s_is_set_in = BoolProperty(name = "Set Steps In?", default = False, update = UpdateStairs)
bpy.types.Object.s_is_landing = BoolProperty(name = "Create Landings?", default = True, update = UpdateStairs)
bpy.types.Object.s_is_light = BoolProperty(name = "Allow Recessed Lights?", default = False, update = UpdateStairs, description = "Space Middle Step Jacks To Allow Recessed Lights")
#landing 0
bpy.types.Object.s_num_steps0 = IntProperty(name = "Number Of Steps", min = 1, max = 24, default = 13, update = UpdateStairs)
bpy.types.Object.s_tread_width0 = FloatProperty(name = "Tread Width", min = 9.0 / 39.3701, max = 16.0 / 39.3701, default = 9.5 / 39.3701, subtype = "DISTANCE", update = UpdateStairs)
bpy.types.Object.s_riser_height0 = FloatProperty(name = "Riser Height", min = 5.0 / 39.3701, max = 8.0 / 39.3701, default = 7.4 / 39.3701, subtype = "DISTANCE", update = UpdateStairs)
bpy.types.Object.s_landing_depth0 = FloatProperty(name = "Landing 1 Depth", min = 36 / 39.3701, max = 60 / 39.3701, default = 40 / 39.3701, subtype = "DISTANCE", update = UpdateStairs)
bpy.types.Object.s_landing_rot0 = EnumProperty(items = (("1", "Forwards", ""), ("2", "Left", ""), ("3", "Right", "")), update = UpdateStairs, name = "")
bpy.types.Object.s_over_front0 = FloatProperty(name = "Front Overhang", min = 0.0, max = 1.25 / 39.3701, default = 1.0 / 39.3701, subtype = "DISTANCE", update = UpdateStairs)
bpy.types.Object.s_over_sides0 = FloatProperty(name = "Side Overhang", min = 0.0, max = 2.0 / 39.3701, default = 1.0 / 39.3701, subtype = "DISTANCE", update = UpdateStairs)
bpy.types.Object.s_overhang0 = EnumProperty(items = (("1", "Normal", ""), ("2", "Right", ""), ("3", "Left", ""), ("4", "Both", "")),
default = "1", description = "Overhang Style", update = UpdateStairs, name = "")
bpy.types.Object.s_is_back0 = BoolProperty(name = "Turn Backwards?", default = False, update = UpdateStairs)
#landing 1
bpy.types.Object.s_num_steps1 = IntProperty(name = "Number Of Steps", min = 1, max = 24, default = 13, update = UpdateStairs)
bpy.types.Object.s_landing_rot1 = EnumProperty(items = (("1", "Forwards", ""), ("2", "Left", ""), ("3", "Right", "")), update = UpdateStairs, name = "")
bpy.types.Object.s_tread_width1 = FloatProperty(name = "Tread Width", min = 9.0 / 39.3701, max = 16.0 / 39.3701, default = 9.5 / 39.3701, subtype = "DISTANCE", update = UpdateStairs)
bpy.types.Object.s_riser_height1 = FloatProperty(name = "Riser Height", min = 5.0 / 39.3701, max = 8.0 / 39.3701, default = 7.4 / 39.3701, subtype = "DISTANCE", update = UpdateStairs)
bpy.types.Object.s_landing_depth1 = FloatProperty(name = "Landing 2 Depth", min = 36 / 39.3701, max = 60 / 39.3701, default = 40 / 39.3701, subtype = "DISTANCE", update = UpdateStairs)
bpy.types.Object.s_over_front1 = FloatProperty(name = "Front Overhang", min = 0.0, max = 1.25 / 39.3701, default = 1.0 / 39.3701, subtype = "DISTANCE", update = UpdateStairs)
bpy.types.Object.s_over_sides1 = FloatProperty(name = "Side Overhang", min = 0.0, max = 2.0 / 39.3701, default = 1.0 / 39.3701, subtype = "DISTANCE", update = UpdateStairs)
bpy.types.Object.s_overhang1 = EnumProperty(items = (("1", "Normal", ""), ("2", "Right", ""), ("3", "Left", ""), ("4", "Both", "")),
default = "1", description = "Overhang Style", update = UpdateStairs, name = "")
bpy.types.Object.s_is_back1 = BoolProperty(name = "Turn Backwards?", default = False, update = UpdateStairs)
#winding
bpy.types.Object.s_w_rot = EnumProperty(name = "", items = (("1", "-90", ""), ("2", "-45", ""), ("3", "45", ""), ("4", "90", "")), default = "3", update = UpdateStairs)
bpy.types.Object.s_num_rot = IntProperty(name = "Stair To Begin Rotation On", update = UpdateStairs, min = 1, max = 13, default = 6)
#spiral
bpy.types.Object.s_rot = FloatProperty(name = "Total Rotation", unit = "ROTATION", min = radians(-720), max = radians(720), default = radians(90), update = UpdateStairs)
bpy.types.Object.s_pole_dia = FloatProperty(name = "Pole Diameter", min = 3.0 / 39.3701, max = 10.0 / 39.3701, default = 4.0 / 39.3701, subtype = "DISTANCE", update = UpdateStairs)
bpy.types.Object.s_pole_res = IntProperty(name = "Pole Resolution", min = 8, max = 64, default = 16, update = UpdateStairs)
bpy.types.Object.s_tread_res = IntProperty(name = "Tread Resolution", min = 0, max = 8, default = 0, update = UpdateStairs)
#materials
bpy.types.Object.s_is_material = BoolProperty(name = "Cycles Materials?", default = False, description = "Adds Cycles Materials", update = DeleteMaterials)
bpy.types.Object.s_is_preview = BoolProperty(name = "Preview Material?", default = False, description = "Preview Material On Object", update = PreviewMaterials)
#stairs
bpy.types.Object.s_im_scale = FloatProperty(name = "Image Scale", max = 10.0, min = 0.1, default = 1.0, description = "Change Image Scaling")
bpy.types.Object.s_col_image = StringProperty(name = "", subtype = "FILE_PATH", description = "File Path For Color Image")
bpy.types.Object.s_is_bump = BoolProperty(name = "Normal Map?", default = False, description = "Add Normal To Material?")
bpy.types.Object.s_norm_image = StringProperty(name = "", subtype = "FILE_PATH", description = "File Path For Normal Map Image")
bpy.types.Object.s_bump_amo = FloatProperty(name = "Normal Stength", min = 0.001, max = 2.000, default = 0.250, description = "Normal Map Strength")
bpy.types.Object.s_is_rotate = BoolProperty(name = "Rotate Image?", default = False, description = "Rotate Image 90 Degrees")
#pole/jacks
bpy.types.Object.s_im_scale2 = FloatProperty(name = "Image Scale", max = 10.0, min = 0.1, default = 1.0, description = "Change Image Scaling")
bpy.types.Object.s_col_image2 = StringProperty(name = "", subtype = "FILE_PATH", description = "File Path For Color Image")
bpy.types.Object.s_is_bump2 = BoolProperty(name = "Normal Map?", default = False, description = "Add Normal To Material?")
bpy.types.Object.s_norm_image2 = StringProperty(name = "", subtype = "FILE_PATH", description = "File Path For Normal Map Image")
bpy.types.Object.s_bump_amo2 = FloatProperty(name = "Normal Stength", min = 0.001, max = 2.000, default = 0.250, description = "Normal Map Strength")
bpy.types.Object.s_is_rotate2 = BoolProperty(name = "Rotate Image?", default = False, description = "Rotate Image 90 Degrees")
#uv
bpy.types.Object.s_unwrap = BoolProperty(name = "UV Unwrap?", default = True, description = "UV Unwraps Siding", update = UnwrapStairs)
bpy.types.Object.s_random_uv = BoolProperty(name = "Random UV's?", default = True, description = "Random UV's", update = UpdateStairs)
#panel
class StairsPanel(bpy.types.Panel):
bl_idname = "OBJECT_PT_jarch_stairs"
bl_label = "JARCH Vis: Stairs"
bl_space_type = "VIEW_3D"
bl_region_type = "TOOLS"
bl_category = "JARCH Vis"
bl_options = {"DEFAULT_CLOSED"}
def draw(self, context):
layout = self.layout
o = context.object
if bpy.context.mode == "EDIT_MESH":
layout.label("JARCH Vis Doesn't Work In Edit Mode", icon = "ERROR")
else:
if o != None:
#if o.object_add == "none" and o.f_object_add == "none":
if True:
if o.s_object_add != "mesh":
if o.s_object_add == "add":
layout.label("Style:", icon = "OBJECT_DATA"); layout.prop(o, "s_style"); layout.separator(); layout.prop(o, "s_width"); layout.separator()
if o.s_style != "3": layout.prop(o, "s_num_steps")
else: layout.prop(o, "s_num_steps2")
if o.s_style != "3": layout.prop(o, "s_tread_width")
else: pass
layout.prop(o, "s_riser_height");
#show height
h = round((o.s_num_steps * o.s_riser_height) + (1 / 39.3701), 2)
if context.scene.unit_settings.system == "IMPERIAL": layout.label("Height: " + str(round(((h * 39.3701) / 12), 2)) + " ft", icon = "INFO"); layout.separator()
else: layout.label("Height: " + str(round(h, 2)) + " m", icon = "INFO"); layout.separator()
if o.s_style == "1":
layout.prop(o, "s_is_set_in", icon = "OOPS")
if o.s_is_set_in == False: layout.prop(o, "s_is_close", icon = "AUTOMERGE_ON"); layout.prop(o, "s_is_light", icon = "OUTLINER_OB_LAMP")
if o.s_is_set_in == False and o.s_style != "3":
layout.separator()
if o.s_style == "1": layout.label("Overhang Style:", icon = "OUTLINER_OB_SURFACE"); layout.prop(o, "s_overhang")
layout.prop(o, "s_over_front")
if o.s_overhang != "1": layout.prop(o, "s_over_sides")
if o.s_style == "1":
layout.separator(); layout.prop(o, "s_is_riser", icon = "TRIA_UP")
layout.separator()
else: layout.prop(o, "s_over_front"); layout.separator()
if o.s_style == "1": #normal stairs
layout.separator(); layout.prop(o, "s_num_land")
if o.s_num_land > 0: layout.prop(o, "s_is_landing", icon = "FULLSCREEN")
for i in range(int(o.s_num_land)):
layout.separator(); layout.separator(); box = layout.box()
box.label("Stair Set " + str(i + 2) + ":", icon = "MOD_ARRAY"); box.separator()
box.prop(o, "s_num_steps" + str(i)); box.prop(o, "s_tread_width" + str(i)); box.prop(o, "s_riser_height" + str(i))
#display height
if i == 0:
h2 = h + round((o.s_riser_height0 * o.s_num_steps0) + o.s_riser_height + (1 / 39.3701), 2);
if context.scene.unit_settings.system == "IMPERIAL": box.label("Height: " + str(round(((h2 * 39.3701) / 12), 2)) + " ft", icon = "INFO")
else: box.label("Height: " + str(round(h2, 2)) + " m", icon = "INFO")
else:
h2 = h + round((o.s_riser_height0 * o.s_num_steps0) + (o.s_riser_height0 * o.s_num_steps0) + (2 / 39.3701) + o.s_riser_height + o.s_riser_height0, 2)
if context.scene.unit_settings.system == "IMPERIAL": box.label("Height: " + str(round(((h2 * 39.3701) / 12), 2)) + " ft", icon = "INFO")
else: box.label("Height: " + str(round(h2, 2)) + " m", icon = "INFO")
box.separator(); box.label("Landing " + str(i + 1) + " Rotation:")
box.prop(o, "s_landing_rot" + str(i))
if (i == 0 and o.s_landing_rot0 != "1") or (i == 1 and o.s_landing_rot1 != "1"):
box.prop(o, "s_is_back" + str(i), icon = "LOOP_BACK")
box.prop(o, "s_landing_depth" + str(i))
if o.s_is_set_in == False:
box.separator(); box.label("Overhang Style:", icon = "OUTLINER_OB_SURFACE"); box.prop(o, "s_overhang" + str(i))
box.prop(o, "s_over_front" + str(i))
if (i == 0 and o.s_overhang0 != "1") or (i == 1 and o.s_overhang1 != "1"): box.prop(o, "s_over_sides" + str(i))
elif o.s_style == "2": #winding stairs
layout.prop(o, "s_num_rot"); row = self.layout.row(); row.label("Rotation: "); row.prop(o, "s_w_rot")
elif o.s_style == "3": #spiral stairs
layout.prop(o, "s_rot", icon = "MAN_ROT"); layout.prop(o, "s_tread_res"); layout.separator(); layout.prop(o, "s_pole_dia"); layout.prop(o, "s_pole_res")
#materials
layout.separator(); layout.prop(o, "s_unwrap", icon = "GROUP_UVS")
if o.s_unwrap == True:
layout.prop(o, "s_random_uv", icon = "RNDCURVE")
layout.separator()
if context.scene.render.engine == "CYCLES": layout.prop(o, "s_is_material", icon = "MATERIAL")
else: layout.label("Materials Only Supported With Cycles", icon = "POTATO")
layout.separator()
if o.s_is_material == True and context.scene.render.engine == "CYCLES":
#steps
box = layout.box()
box.label("Stairs:"); box.prop(o, "s_col_image", icon = "COLOR"); box.prop(o, "s_is_bump", icon = "SMOOTHCURVE"); box.separator()
if o.s_is_bump == True: box.prop(o, "s_norm_image", icon = "TEXTURE"); box.prop(o, "s_bump_amo")
box.prop(o, "s_im_scale", icon = "MAN_SCALE"); box.prop(o, "s_is_rotate", icon = "MAN_ROT")
#pole/jacks
layout.separator()
if o.s_style in ("1", "3"):
box = layout.box()
if o.s_style == "1": box.label("Jacks:")
else: box.label("Pole:")
box.prop(o, "s_col_image2", icon = "COLOR"); box.prop(o, "s_is_bump2", icon = "SMOOTHCURVE"); box.separator()
if o.s_is_bump2 == True: box.prop(o, "s_norm_image2", icon = "TEXTURE"); box.prop(o, "s_bump_amo2")
box.prop(o, "s_im_scale2", icon = "MAN_SCALE"); box.prop(o, "s_is_rotate2", icon = "MAN_ROT")
layout.separator(); layout.operator("mesh.jarch_stairs_materials", icon = "MATERIAL")
#operators
layout.separator(); layout.separator()
layout.operator("mesh.jarch_stairs_update", icon = "FILE_REFRESH")
layout.operator("mesh.jarch_stairs_mesh", icon = "OUTLINER_OB_MESH")
layout.operator("mesh.jarch_stairs_delete", icon = "CANCEL")
else:
layout.operator("mesh.jarch_stairs_add", icon = "MOD_ARRAY")
else:
layout.label("This Is A Mesh JARCH Vis Object", icon = "INFO")
else:
layout.label("This Is Already A JARCH Vis Object", icon = "POTATO")
else:
layout.operator("mesh.jarch_stairs_add", icon = "MOD_ARRAY")
class StairsAdd(bpy.types.Operator):
bl_idname = "mesh.jarch_stairs_add"
bl_label = "JARCH Vis: Add Stairs"
bl_description = "JARCH Vis: Stair Generator"
bl_options = {"UNDO"}
@classmethod
def poll(self, context):
return context.mode == "OBJECT"
def execute(self, context):
bpy.ops.mesh.primitive_cube_add()
o = context.object
o.s_object_add = "add"
return {"FINISHED"}
class StairsDelete(bpy.types.Operator):
bl_idname = "mesh.jarch_stairs_delete"
bl_label = "Delete Stairs"
bl_options = {"UNDO"}
def execute(self, context):
o = context.object
bpy.ops.object.delete()
for i in bpy.data.materials: #remove unused materials
if i.users == 0: bpy.data.materials.remove(i)
return {"FINISHED"}
class StairsUpdate(bpy.types.Operator):
bl_idname = "mesh.jarch_stairs_update"
bl_label = "Update Stairs"
bl_options = {"UNDO"}
def execute(self, context):
UpdateStairs(self, context)
return {"FINISHED"}
class StairsMaterial(bpy.types.Operator):
bl_idname = "mesh.jarch_stairs_materials"
bl_label = "Update\\Generate Materials"
bl_options = {"UNDO"}
def execute(self, context):
StairsMaterials(self, context)
return {"FINISHED"}
class StairsMesh(bpy.types.Operator):
bl_idname = "mesh.jarch_stairs_mesh"
bl_label = "Convert To Mesh"
bl_description = "Converts Stair Object To Normal Object (No Longer Editable)"
bl_options = {"UNDO"}
def execute(self, context):
o = context.object
o.s_object_add = "mesh"
return {"FINISHED"} | [
"[email protected]"
] | |
76f5e6b143c51b334cbf71e4876ac6baff943cc9 | f305f84ea6f721c2391300f0a60e21d2ce14f2a5 | /20_杂题/atc競プロ/AtCoder Beginner Contest/136/C - Build Stairs.py | a3c7a4f9144316a94745655b759a0702d680cf76 | [] | no_license | 981377660LMT/algorithm-study | f2ada3e6959338ae1bc21934a84f7314a8ecff82 | 7e79e26bb8f641868561b186e34c1127ed63c9e0 | refs/heads/master | 2023-09-01T18:26:16.525579 | 2023-09-01T12:21:58 | 2023-09-01T12:21:58 | 385,861,235 | 225 | 24 | null | null | null | null | UTF-8 | Python | false | false | 513 | py | # 给你一串数,每个数都只能做将高度减1和不改变两种操作,问这串数是否可以变成不减序列
from typing import List
def buildStairs(nums: List[int]) -> bool:
"""倒序遍历"""
for i in range(len(nums) - 2, -1, -1):
if nums[i] > nums[i + 1]:
nums[i] -= 1
if nums[i] > nums[i + 1]:
return False
return True
n = int(input())
nums = list(map(int, input().split()))
print("Yes" if buildStairs(nums) else "No")
| [
"[email protected]"
] | |
8b93eb66cc12288ac281f6f475b7920c885c8b8e | 6685318f6ef4ea44b38b8ecc5dd2c3186d895bb3 | /test/test_rw_lock.py | 4d039ede68ec9c518390a92ff6484e174ac3fac6 | [
"MIT",
"HPND"
] | permissive | samrushing/shrapnel | cd372da1f08a43776ffc6d39c71f1758269db0fa | 5835454dcfd4b526d7b117d11e4384f5ed60ae03 | refs/heads/master | 2021-01-18T09:45:35.979434 | 2012-06-22T04:39:42 | 2012-06-22T04:39:42 | 4,021,029 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,784 | py | # Copyright (c) 2002-2011 IronPort Systems and Cisco Systems
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Unittests for read-write lock."""
__version__ = '$Revision: #1 $'
import coro
import coro_unittest
import unittest
class Test(unittest.TestCase):
def test_write_block_interrupt_schedule(self):
"""Test write block interrupt then schedule on rw_lock."""
lock = coro.rw_lock()
lock.read_lock()
self._resume_count = 0
threads = []
# Spawn some threads that will block and be interrupted.
for unused in xrange(5):
threads.append(coro.spawn(self._write_block, lock))
# Spawn a thread that we will not interrupt.
no_interrupt_thread = coro.spawn(self._write_block, lock)
coro.yield_slice()
# Cause an interrupt on these threads.
for t in threads:
t.shutdown()
# Now try to get the non-interrupted thread to run.
lock.read_unlock()
coro.yield_slice()
# Verify that it ran.
self.assertEqual(self._resume_count, 1)
def _write_block(self, lock):
lock.write_lock()
self._resume_count += 1
lock.write_unlock()
def _read_block(self, lock):
lock.read_lock()
self._resume_count += 1
lock.read_unlock()
def test_write_block_schedule_interrupt(self):
"""Test write block schedule then interrupt on rw_lock."""
lock = coro.rw_lock()
lock.read_lock()
self._resume_count = 0
threads = []
# Spawn some threads that will block and be interrupted.
for unused in xrange(5):
threads.append(coro.spawn(self._write_block, lock))
# Spawn a thread that we will not interrupt.
no_interrupt_thread = coro.spawn(self._write_block, lock)
coro.yield_slice()
# Schedule all of the threads.
lock.read_unlock()
# Now interrupt them.
for t in threads:
t.shutdown()
coro.yield_slice()
# Verify that it ran.
self.assertEqual(self._resume_count, 1)
def test_read_block_interrupt_schedule(self):
"""Test read block interrupt then schedule on rw_lock."""
lock = coro.rw_lock()
lock.write_lock()
self._resume_count = 0
threads = []
# Spawn some threads that will block and be interrupted.
for unused in xrange(5):
threads.append(coro.spawn(self._read_block, lock))
# Spawn a thread that we will not interrupt.
no_interrupt_thread = coro.spawn(self._read_block, lock)
coro.yield_slice()
# Cause an interrupt on these threads.
for t in threads:
t.shutdown()
# Now try to get the non-interrupted thread to run.
lock.write_unlock()
coro.yield_slice()
# Verify that it ran.
self.assertEqual(self._resume_count, 1)
def test_read_block_schedule_interrupt(self):
"""Test read block schedule then interrupt on rw_lock."""
lock = coro.rw_lock()
lock.write_lock()
self._resume_count = 0
threads = []
# Spawn some threads that will block and be interrupted.
for unused in xrange(5):
threads.append(coro.spawn(self._read_block, lock))
# Spawn a thread that we will not interrupt.
no_interrupt_thread = coro.spawn(self._read_block, lock)
coro.yield_slice()
# Schedule all of the threads.
lock.write_unlock()
# Now interrupt them.
for t in threads:
t.shutdown()
coro.yield_slice()
# Verify that it ran.
self.assertEqual(self._resume_count, 1)
if __name__ == '__main__':
coro_unittest.run_tests()
| [
"[email protected]"
] | |
22f57b81144a68d7684aceacd81df68b3acc95eb | bba0a10d2bced816410badcd0792826f0cee5fa8 | /pythoncode/cext/setup_CubicStokeslet2D.py | 538993e6c3be519f4e2c893edf31cd04f9dd6eb4 | [] | no_license | breecummins/polymercode | 8aae6bc3ff52d9824158a9c7835f0478693ff165 | f65b515ddf23bac38eacbc9d32ecd9ec5ec3de12 | refs/heads/master | 2021-05-13T11:54:12.029811 | 2018-01-11T19:39:11 | 2018-01-11T19:39:11 | 117,145,003 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 519 | py | '''python setup_foo.py build will build the extension module
foo.so in ./build/lib.arch-id/'''
from distutils.core import setup, Extension
import sys, os, numpy
includen=[numpy.get_include()]
module1 = Extension('CubicStokeslet2D',
include_dirs=includen,
sources = ['CubicStokeslet2D.c'])
setup (name = '2D Cubic Stokeslet module',
version = '1.0',
description = 'Functions implementing regularized Stokeslets in 2D using a/( )^3 blob.',
ext_modules = [module1])
| [
"[email protected]"
] | |
ec3215b8fe8c9daf0af807cc25701f60f26bc323 | 089e53103ab25cd57c2d12b3f68533ef4c49493c | /backend/meme_world_27493/wsgi.py | 6a8b4f7e1cdd26f68fa75ca33a59179750134310 | [] | no_license | crowdbotics-apps/meme-world-27493 | b20ddc08fe77a59fa57e25e751dde40591da16fa | 5ad8483eb9d1949486bfb6f9fa077a9c23c34818 | refs/heads/master | 2023-05-02T05:48:06.923336 | 2021-05-27T20:53:49 | 2021-05-27T20:53:49 | 371,501,481 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 409 | py | """
WSGI config for meme_world_27493 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'meme_world_27493.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
8bfd5859526dac8c7e09cfe981f01560487073ca | b0f0aaf75b3b6dfdf08e2356970b3b3d007e331f | /configs/example/fs_multiThread.py | 697c86460662d9e714551f436ea1f132903a1755 | [
"BSD-3-Clause",
"LicenseRef-scancode-proprietary-license",
"LGPL-2.0-or-later",
"MIT"
] | permissive | jjkotni/gem5-master | c30e0826f5d1d20a5714444389474ec9990c7539 | dc47dbf308fd04e506b712b588ff0c8eb12f079f | refs/heads/master | 2022-12-11T08:29:48.584708 | 2019-11-13T10:06:07 | 2019-11-13T10:06:07 | 221,422,045 | 0 | 1 | BSD-3-Clause | 2022-12-08T17:17:24 | 2019-11-13T09:31:33 | C++ | UTF-8 | Python | false | false | 16,277 | py | # Copyright (c) 2010-2013, 2016, 2019 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2012-2014 Mark D. Hill and David A. Wood
# Copyright (c) 2009-2011 Advanced Micro Devices, Inc.
# Copyright (c) 2006-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Ali Saidi
# Brad Beckmann
from __future__ import print_function
from __future__ import absolute_import
import optparse
import sys
import m5
from m5.defines import buildEnv
from m5.objects import *
from m5.util import addToPath, fatal, warn
from m5.util.fdthelper import *
addToPath('../')
from ruby import Ruby
from common.FSConfig import *
from common.SysPaths import *
from common.Benchmarks import *
from common import Simulation
from common import CacheConfig
from common import MemConfig
from common import CpuConfig
from common import BPConfig
from common.Caches import *
from common import Options
def cmd_line_template():
if options.command_line and options.command_line_file:
print("Error: --command-line and --command-line-file are "
"mutually exclusive")
sys.exit(1)
if options.command_line:
return options.command_line
if options.command_line_file:
return open(options.command_line_file).read().strip()
return None
def build_test_system(np):
cmdline = cmd_line_template()
if buildEnv['TARGET_ISA'] == "alpha":
test_sys = makeLinuxAlphaSystem(test_mem_mode, bm[0], options.ruby,
cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == "mips":
test_sys = makeLinuxMipsSystem(test_mem_mode, bm[0], cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == "sparc":
test_sys = makeSparcSystem(test_mem_mode, bm[0], cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == "x86":
test_sys = makeLinuxX86System(test_mem_mode, np, bm[0], options.ruby,
cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == "arm":
test_sys = makeArmSystem(test_mem_mode, options.machine_type, np,
bm[0], options.dtb_filename,
bare_metal=options.bare_metal,
cmdline=cmdline,
external_memory=
options.external_memory_system,
ruby=options.ruby,
security=options.enable_security_extensions)
if options.enable_context_switch_stats_dump:
test_sys.enable_context_switch_stats_dump = True
else:
fatal("Incapable of building %s full system!", buildEnv['TARGET_ISA'])
# Set the cache line size for the entire system
test_sys.cache_line_size = options.cacheline_size
# Create a top-level voltage domain
test_sys.voltage_domain = VoltageDomain(voltage = options.sys_voltage)
# Create a source clock for the system and set the clock period
test_sys.clk_domain = SrcClockDomain(clock = options.sys_clock,
voltage_domain = test_sys.voltage_domain)
# Create a CPU voltage domain
test_sys.cpu_voltage_domain = VoltageDomain()
# Create a source clock for the CPUs and set the clock period
test_sys.cpu_clk_domain = SrcClockDomain(clock = options.cpu_clock,
voltage_domain =
test_sys.cpu_voltage_domain)
if options.kernel is not None:
test_sys.kernel = binary(options.kernel)
else:
print("Error: a kernel must be provided to run in full system mode")
sys.exit(1)
if options.script is not None:
test_sys.readfile = options.script
if options.lpae:
test_sys.have_lpae = True
if options.virtualisation:
test_sys.have_virtualization = True
test_sys.init_param = options.init_param
# For now, assign all the CPUs to the same clock domain
test_sys.cpu = [TestCPUClass(clk_domain=test_sys.cpu_clk_domain, cpu_id=i)
for i in range(np)]
if CpuConfig.is_kvm_cpu(TestCPUClass) or CpuConfig.is_kvm_cpu(FutureClass):
test_sys.kvm_vm = KvmVM()
if options.ruby:
bootmem = getattr(test_sys, 'bootmem', None)
Ruby.create_system(options, True, test_sys, test_sys.iobus,
test_sys._dma_ports, bootmem)
# Create a seperate clock domain for Ruby
test_sys.ruby.clk_domain = SrcClockDomain(clock = options.ruby_clock,
voltage_domain = test_sys.voltage_domain)
# Connect the ruby io port to the PIO bus,
# assuming that there is just one such port.
test_sys.iobus.master = test_sys.ruby._io_port.slave
for (i, cpu) in enumerate(test_sys.cpu):
#
# Tie the cpu ports to the correct ruby system ports
#
cpu.clk_domain = test_sys.cpu_clk_domain
cpu.createThreads()
cpu.createInterruptController()
cpu.icache_port = test_sys.ruby._cpu_ports[i].slave
cpu.dcache_port = test_sys.ruby._cpu_ports[i].slave
if buildEnv['TARGET_ISA'] in ("x86", "arm"):
cpu.itb.walker.port = test_sys.ruby._cpu_ports[i].slave
cpu.dtb.walker.port = test_sys.ruby._cpu_ports[i].slave
if buildEnv['TARGET_ISA'] in "x86":
cpu.interrupts[0].pio = test_sys.ruby._cpu_ports[i].master
cpu.interrupts[0].int_master = test_sys.ruby._cpu_ports[i].slave
cpu.interrupts[0].int_slave = test_sys.ruby._cpu_ports[i].master
else:
if options.caches or options.l2cache:
# By default the IOCache runs at the system clock
test_sys.iocache = IOCache(addr_ranges = test_sys.mem_ranges)
test_sys.iocache.cpu_side = test_sys.iobus.master
test_sys.iocache.mem_side = test_sys.membus.slave
elif not options.external_memory_system:
test_sys.iobridge = Bridge(delay='50ns', ranges = test_sys.mem_ranges)
test_sys.iobridge.slave = test_sys.iobus.master
test_sys.iobridge.master = test_sys.membus.slave
# Sanity check
if options.simpoint_profile:
if not CpuConfig.is_noncaching_cpu(TestCPUClass):
fatal("SimPoint generation should be done with atomic cpu")
if np > 1:
fatal("SimPoint generation not supported with more than one CPUs")
for i in range(np):
if options.simpoint_profile:
test_sys.cpu[i].addSimPointProbe(options.simpoint_interval)
if options.checker:
test_sys.cpu[i].addCheckerCpu()
if options.bp_type:
bpClass = BPConfig.get(options.bp_type)
test_sys.cpu[i].branchPred = bpClass()
if options.indirect_bp_type:
IndirectBPClass = \
BPConfig.get_indirect(options.indirect_bp_type)
test_sys.cpu[i].branchPred.indirectBranchPred = \
IndirectBPClass()
test_sys.cpu[i].createThreads()
# If elastic tracing is enabled when not restoring from checkpoint and
# when not fast forwarding using the atomic cpu, then check that the
# TestCPUClass is DerivO3CPU or inherits from DerivO3CPU. If the check
# passes then attach the elastic trace probe.
# If restoring from checkpoint or fast forwarding, the code that does this for
# FutureCPUClass is in the Simulation module. If the check passes then the
# elastic trace probe is attached to the switch CPUs.
if options.elastic_trace_en and options.checkpoint_restore == None and \
not options.fast_forward:
CpuConfig.config_etrace(TestCPUClass, test_sys.cpu, options)
CacheConfig.config_cache(options, test_sys)
MemConfig.config_mem(options, test_sys)
test_sys.membus.master = test_sys.cpu[0].interrupts[1].pio
test_sys.membus.master = test_sys.cpu[0].interrupts[1].int_slave
test_sys.membus.slave = test_sys.cpu[0].interrupts[1].int_master
return test_sys
def build_drive_system(np):
# driver system CPU is always simple, so is the memory
# Note this is an assignment of a class, not an instance.
DriveCPUClass = AtomicSimpleCPU
drive_mem_mode = 'atomic'
DriveMemClass = SimpleMemory
cmdline = cmd_line_template()
if buildEnv['TARGET_ISA'] == 'alpha':
drive_sys = makeLinuxAlphaSystem(drive_mem_mode, bm[1],
cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == 'mips':
drive_sys = makeLinuxMipsSystem(drive_mem_mode, bm[1], cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == 'sparc':
drive_sys = makeSparcSystem(drive_mem_mode, bm[1], cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == 'x86':
drive_sys = makeLinuxX86System(drive_mem_mode, np, bm[1],
cmdline=cmdline)
elif buildEnv['TARGET_ISA'] == 'arm':
drive_sys = makeArmSystem(drive_mem_mode, options.machine_type, np,
bm[1], options.dtb_filename, cmdline=cmdline)
# Create a top-level voltage domain
drive_sys.voltage_domain = VoltageDomain(voltage = options.sys_voltage)
# Create a source clock for the system and set the clock period
drive_sys.clk_domain = SrcClockDomain(clock = options.sys_clock,
voltage_domain = drive_sys.voltage_domain)
# Create a CPU voltage domain
drive_sys.cpu_voltage_domain = VoltageDomain()
# Create a source clock for the CPUs and set the clock period
drive_sys.cpu_clk_domain = SrcClockDomain(clock = options.cpu_clock,
voltage_domain =
drive_sys.cpu_voltage_domain)
drive_sys.cpu = DriveCPUClass(clk_domain=drive_sys.cpu_clk_domain,
cpu_id=0)
drive_sys.cpu.createThreads()
drive_sys.cpu.createInterruptController()
drive_sys.cpu.connectAllPorts(drive_sys.membus)
if options.kernel is not None:
drive_sys.kernel = binary(options.kernel)
else:
print("Error: a kernel must be provided to run in full system mode")
sys.exit(1)
if CpuConfig.is_kvm_cpu(DriveCPUClass):
drive_sys.kvm_vm = KvmVM()
drive_sys.iobridge = Bridge(delay='50ns',
ranges = drive_sys.mem_ranges)
drive_sys.iobridge.slave = drive_sys.iobus.master
drive_sys.iobridge.master = drive_sys.membus.slave
# Create the appropriate memory controllers and connect them to the
# memory bus
drive_sys.mem_ctrls = [DriveMemClass(range = r)
for r in drive_sys.mem_ranges]
for i in range(len(drive_sys.mem_ctrls)):
drive_sys.mem_ctrls[i].port = drive_sys.membus.master
drive_sys.init_param = options.init_param
return drive_sys
# Add options
parser = optparse.OptionParser()
Options.addCommonOptions(parser)
Options.addFSOptions(parser)
# Add the ruby specific and protocol specific options
if '--ruby' in sys.argv:
Ruby.define_options(parser)
(options, args) = parser.parse_args()
if args:
print("Error: script doesn't take any positional arguments")
sys.exit(1)
# system under test can be any CPU
(TestCPUClass, test_mem_mode, FutureClass) = Simulation.setCPUClass(options)
#kotnis - start
TestCPUClass.numThreads = 2;
#kotnis - end
# Match the memories with the CPUs, based on the options for the test system
TestMemClass = Simulation.setMemClass(options)
if options.benchmark:
try:
bm = Benchmarks[options.benchmark]
except KeyError:
print("Error benchmark %s has not been defined." % options.benchmark)
print("Valid benchmarks are: %s" % DefinedBenchmarks)
sys.exit(1)
else:
if options.dual:
bm = [SysConfig(disk=options.disk_image, rootdev=options.root_device,
mem=options.mem_size, os_type=options.os_type),
SysConfig(disk=options.disk_image, rootdev=options.root_device,
mem=options.mem_size, os_type=options.os_type)]
else:
bm = [SysConfig(disk=options.disk_image, rootdev=options.root_device,
mem=options.mem_size, os_type=options.os_type)]
np = options.num_cpus
test_sys = build_test_system(np)
test_sys.multi_thread = True
print("Benchmark ", bm)
if len(bm) == 2:
drive_sys = build_drive_system(np)
root = makeDualRoot(True, test_sys, drive_sys, options.etherdump)
elif len(bm) == 1 and options.dist:
# This system is part of a dist-gem5 simulation
root = makeDistRoot(test_sys,
options.dist_rank,
options.dist_size,
options.dist_server_name,
options.dist_server_port,
options.dist_sync_repeat,
options.dist_sync_start,
options.ethernet_linkspeed,
options.ethernet_linkdelay,
options.etherdump);
elif len(bm) == 1:
root = Root(full_system=True, system=test_sys)
else:
print("Error I don't know how to create more than 2 systems.")
sys.exit(1)
if options.timesync:
root.time_sync_enable = True
if options.frame_capture:
VncServer.frame_capture = True
if buildEnv['TARGET_ISA'] == "arm" and not options.bare_metal \
and not options.dtb_filename:
if options.machine_type not in ["VExpress_GEM5", "VExpress_GEM5_V1"]:
warn("Can only correctly generate a dtb for VExpress_GEM5_V1 " \
"platforms, unless custom hardware models have been equipped "\
"with generation functionality.")
# Generate a Device Tree
for sysname in ('system', 'testsys', 'drivesys'):
if hasattr(root, sysname):
sys = getattr(root, sysname)
sys.generateDtb(m5.options.outdir, '%s.dtb' % sysname)
Simulation.setWorkCountOptions(test_sys, options)
Simulation.run(options, root, test_sys, FutureClass)
| [
"[email protected]"
] | |
536cd089feace7b1af6f28742e70a3fdfe2f2542 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03700/s311983403.py | e0ae77bcda00ba4e2c9dd4f6e5d43da7680cbe05 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 285 | py | N,A,B=map(int,input().split())
H=[]
for _ in range(N):
H.append(int(input()))
ok=10**9+1
ng=0
while ok-ng>1:
mid=(ok+ng)//2
dmg=B*mid
tgt=0
for item in H:
tgt+=-(-max(item-dmg,0)//(A-B))
if tgt<=mid:
ok=mid
else:
ng=mid
print(ok) | [
"[email protected]"
] | |
3049ddd9509647ce74449ee9f5b3fb4a6d633337 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/VERITAS-CLUSTER-MIB.py | 941592e7be5e1d67879a16f149bcf752840dbbee | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 32,550 | py | #
# PySNMP MIB module VERITAS-CLUSTER-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/VERITAS-CLUSTER-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:26:59 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, OctetString, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "Integer", "OctetString", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ValueRangeConstraint, ConstraintsUnion, ValueSizeConstraint, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueRangeConstraint", "ConstraintsUnion", "ValueSizeConstraint", "ConstraintsIntersection")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
enterprises, Unsigned32, Bits, Counter32, ObjectIdentity, Gauge32, Counter64, NotificationType, IpAddress, ModuleIdentity, NotificationType, MibIdentifier, MibScalar, MibTable, MibTableRow, MibTableColumn, Integer32, iso, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "enterprises", "Unsigned32", "Bits", "Counter32", "ObjectIdentity", "Gauge32", "Counter64", "NotificationType", "IpAddress", "ModuleIdentity", "NotificationType", "MibIdentifier", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Integer32", "iso", "TimeTicks")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
veritassoftware = MibIdentifier((1, 3, 6, 1, 4, 1, 1302))
products = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3))
veritasCluster = ModuleIdentity((1, 3, 6, 1, 4, 1, 1302, 3, 8))
if mibBuilder.loadTexts: veritasCluster.setLastUpdated('03202001')
if mibBuilder.loadTexts: veritasCluster.setOrganization('VERITAS Software, Inc.')
clustertraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10))
clustertrapvars = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1))
clustertrapsGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2))
resourcesTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 1))
groupsTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2))
systemsTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 3))
vcsHeartbeatTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 4))
gcmHeartbeatTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 5))
vcsTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 6))
gcmSiteTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 7))
agentsTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 8))
externalTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 9))
rdcTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10))
trapOrigin = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: trapOrigin.setStatus('mandatory')
entityType = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entityType.setStatus('mandatory')
entitySubType = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entitySubType.setStatus('mandatory')
entityName = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entityName.setStatus('mandatory')
entityOwner = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entityOwner.setStatus('mandatory')
systemName = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemName.setStatus('mandatory')
systemLocation = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: systemLocation.setStatus('mandatory')
entityState = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entityState.setStatus('mandatory')
entityContainerType = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 9), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entityContainerType.setStatus('mandatory')
entityContainerName = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 10), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: entityContainerName.setStatus('mandatory')
peerSystemName = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 11), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: peerSystemName.setStatus('mandatory')
peerSystemLocation = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 12), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: peerSystemLocation.setStatus('mandatory')
message = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 13), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: message.setStatus('mandatory')
eventTime = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 14), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 50))).setMaxAccess("readonly")
if mibBuilder.loadTexts: eventTime.setStatus('mandatory')
severityId = MibScalar((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("information", 0), ("warning", 1), ("error", 2), ("severeError", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: severityId.setStatus('mandatory')
clusterResourceStateUnknownTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 1) + (0,1)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterResourceMonitorTimeoutTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 1) + (0,2)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterResourceNotGoingOfflineTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 1) + (0,3)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterResourceRestartingByAgentTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 1) + (0,4)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterResourceWentOnlineByItselfTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 1) + (0,5)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterResourceFaultedTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 1) + (0,6)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGroupOnlineTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2) + (0,1)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGroupOfflineTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2) + (0,2)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGroupAutoDisabledTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2) + (0,3)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGroupFaultedTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2) + (0,4)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGroupFaultedAndNowhereToFailoverTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2) + (0,5)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGroupRestartingTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2) + (0,6)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGroupInitiatingForSwitchingTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2) + (0,7)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGroupConcurencyViolationTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2) + (0,8)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGroupRestInRspnToPerstResGoOnlineTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 2) + (0,9)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterFirstSystemUpTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 3) + (0,1)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterSystemRestartingByHashadowTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 3) + (0,2)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterSystemInJeopardyTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 3) + (0,3)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterSystemFaultedTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 3) + (0,4)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterSystemJoinedClusterTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 3) + (0,5)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterSystemExitedManuallyTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 3) + (0,6)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterSystemUpButNotInClusterTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 3) + (0,7)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterSystemUsageExceededThresholdTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 3) + (0,8)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterGUIUserLoginTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 6) + (0,2)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterAgentRestartingTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 8) + (0,1)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterAgentFaultedTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 8) + (0,2)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCRlinkInconsistentTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,1)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCRlinkNotUpToDateTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,2)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCTakeoverFailedTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,3)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCMigrateFailedTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,4)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCTakeoverSuccessTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,5)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCMigrateSuccessTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,6)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCActingSecondaryTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,7)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCResyncFailedTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,8)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCResyncSuccessTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,9)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
clusterRDCGroupOfflineTrap = NotificationType((1, 3, 6, 1, 4, 1, 1302, 3, 8, 10, 2, 10) + (0,10)).setObjects(("VERITAS-CLUSTER-MIB", "severityId"), ("VERITAS-CLUSTER-MIB", "eventTime"), ("VERITAS-CLUSTER-MIB", "entityName"), ("VERITAS-CLUSTER-MIB", "entityType"), ("VERITAS-CLUSTER-MIB", "entitySubType"), ("VERITAS-CLUSTER-MIB", "entityState"), ("VERITAS-CLUSTER-MIB", "trapOrigin"), ("VERITAS-CLUSTER-MIB", "systemName"), ("VERITAS-CLUSTER-MIB", "systemLocation"), ("VERITAS-CLUSTER-MIB", "entityContainerName"), ("VERITAS-CLUSTER-MIB", "entityContainerType"), ("VERITAS-CLUSTER-MIB", "entityOwner"), ("VERITAS-CLUSTER-MIB", "message"))
mibBuilder.exportSymbols("VERITAS-CLUSTER-MIB", clusterRDCGroupOfflineTrap=clusterRDCGroupOfflineTrap, clusterSystemJoinedClusterTrap=clusterSystemJoinedClusterTrap, clusterGroupOnlineTrap=clusterGroupOnlineTrap, clusterSystemUpButNotInClusterTrap=clusterSystemUpButNotInClusterTrap, clusterGroupInitiatingForSwitchingTrap=clusterGroupInitiatingForSwitchingTrap, message=message, clusterGroupAutoDisabledTrap=clusterGroupAutoDisabledTrap, clusterSystemRestartingByHashadowTrap=clusterSystemRestartingByHashadowTrap, clustertraps=clustertraps, clusterSystemUsageExceededThresholdTrap=clusterSystemUsageExceededThresholdTrap, clusterAgentFaultedTrap=clusterAgentFaultedTrap, veritasCluster=veritasCluster, gcmHeartbeatTraps=gcmHeartbeatTraps, clusterResourceRestartingByAgentTrap=clusterResourceRestartingByAgentTrap, clusterFirstSystemUpTrap=clusterFirstSystemUpTrap, entityState=entityState, vcsHeartbeatTraps=vcsHeartbeatTraps, clusterGroupRestartingTrap=clusterGroupRestartingTrap, systemLocation=systemLocation, clusterSystemInJeopardyTrap=clusterSystemInJeopardyTrap, products=products, groupsTraps=groupsTraps, clusterRDCMigrateSuccessTrap=clusterRDCMigrateSuccessTrap, gcmSiteTraps=gcmSiteTraps, resourcesTraps=resourcesTraps, clusterRDCActingSecondaryTrap=clusterRDCActingSecondaryTrap, clusterGUIUserLoginTrap=clusterGUIUserLoginTrap, entityType=entityType, clusterGroupFaultedAndNowhereToFailoverTrap=clusterGroupFaultedAndNowhereToFailoverTrap, PYSNMP_MODULE_ID=veritasCluster, clusterGroupRestInRspnToPerstResGoOnlineTrap=clusterGroupRestInRspnToPerstResGoOnlineTrap, systemName=systemName, clusterSystemExitedManuallyTrap=clusterSystemExitedManuallyTrap, clusterResourceWentOnlineByItselfTrap=clusterResourceWentOnlineByItselfTrap, systemsTraps=systemsTraps, entityOwner=entityOwner, clusterRDCTakeoverFailedTrap=clusterRDCTakeoverFailedTrap, clusterRDCResyncSuccessTrap=clusterRDCResyncSuccessTrap, clusterResourceNotGoingOfflineTrap=clusterResourceNotGoingOfflineTrap, agentsTraps=agentsTraps, entityName=entityName, peerSystemLocation=peerSystemLocation, clusterAgentRestartingTrap=clusterAgentRestartingTrap, clusterRDCRlinkInconsistentTrap=clusterRDCRlinkInconsistentTrap, clustertrapvars=clustertrapvars, externalTraps=externalTraps, eventTime=eventTime, clusterGroupConcurencyViolationTrap=clusterGroupConcurencyViolationTrap, severityId=severityId, clusterRDCResyncFailedTrap=clusterRDCResyncFailedTrap, trapOrigin=trapOrigin, entityContainerType=entityContainerType, rdcTraps=rdcTraps, entitySubType=entitySubType, clusterResourceMonitorTimeoutTrap=clusterResourceMonitorTimeoutTrap, clusterRDCMigrateFailedTrap=clusterRDCMigrateFailedTrap, entityContainerName=entityContainerName, clusterResourceStateUnknownTrap=clusterResourceStateUnknownTrap, veritassoftware=veritassoftware, clusterGroupFaultedTrap=clusterGroupFaultedTrap, clusterRDCTakeoverSuccessTrap=clusterRDCTakeoverSuccessTrap, peerSystemName=peerSystemName, clustertrapsGroups=clustertrapsGroups, clusterResourceFaultedTrap=clusterResourceFaultedTrap, clusterSystemFaultedTrap=clusterSystemFaultedTrap, clusterRDCRlinkNotUpToDateTrap=clusterRDCRlinkNotUpToDateTrap, clusterGroupOfflineTrap=clusterGroupOfflineTrap, vcsTraps=vcsTraps)
| [
"[email protected]"
] | |
c2b56f8d6acae1dcec1938f8d5c67ce3cbcbc71f | 633506e8aba3c555802348af8edd34e98f6975c1 | /pandas/tests/test_generic.py | c9ef3ea4e217c928a1a8a0f33897f1137124bda6 | [
"BSD-3-Clause",
"LicenseRef-scancode-other-permissive",
"BSD-2-Clause"
] | permissive | pierre-haessig/pandas | 44aba568d87079b0d3181b7fee856c29ace45a44 | f9e0b7df8ca8a92133d3cea0a26181140f991e2d | refs/heads/master | 2021-01-18T06:13:03.044663 | 2013-10-11T14:06:49 | 2013-10-11T14:06:49 | 13,500,599 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,531 | py | # pylint: disable-msg=E1101,W0612
from datetime import datetime, timedelta
import operator
import unittest
import nose
import numpy as np
from numpy import nan
import pandas as pd
from pandas import (Index, Series, DataFrame, Panel,
isnull, notnull,date_range, _np_version_under1p7)
from pandas.core.index import Index, MultiIndex
from pandas.tseries.index import Timestamp, DatetimeIndex
import pandas.core.common as com
from pandas.compat import StringIO, lrange, range, zip, u, OrderedDict, long
from pandas import compat
from pandas.util.testing import (assert_series_equal,
assert_frame_equal,
assert_panel_equal,
assert_almost_equal,
ensure_clean)
import pandas.util.testing as tm
def _skip_if_no_scipy():
try:
import scipy.interpolate
except ImportError:
raise nose.SkipTest('scipy.interpolate missing')
def _skip_if_no_pchip():
try:
from scipy.interpolate import pchip_interpolate
except ImportError:
raise nose.SkipTest('scipy.interpolate.pchip missing')
#------------------------------------------------------------------------------
# Generic types test cases
class Generic(object):
_multiprocess_can_split_ = True
def setUp(self):
import warnings
warnings.filterwarnings(action='ignore', category=FutureWarning)
@property
def _ndim(self):
return self._typ._AXIS_LEN
def _axes(self):
""" return the axes for my object typ """
return self._typ._AXIS_ORDERS
def _construct(self, shape, value=None, dtype=None, **kwargs):
""" construct an object for the given shape
if value is specified use that if its a scalar
if value is an array, repeat it as needed """
if isinstance(shape,int):
shape = tuple([shape] * self._ndim)
if value is not None:
if np.isscalar(value):
if value == 'empty':
arr = None
# remove the info axis
kwargs.pop(self._typ._info_axis_name,None)
else:
arr = np.empty(shape,dtype=dtype)
arr.fill(value)
else:
fshape = np.prod(shape)
arr = value.ravel()
new_shape = fshape/arr.shape[0]
if fshape % arr.shape[0] != 0:
raise Exception("invalid value passed in _construct")
arr = np.repeat(arr,new_shape).reshape(shape)
else:
arr = np.random.randn(*shape)
return self._typ(arr,**kwargs)
def _compare(self, result, expected):
self._comparator(result,expected)
def test_rename(self):
# single axis
for axis in self._axes():
kwargs = { axis : list('ABCD') }
obj = self._construct(4,**kwargs)
# no values passed
#self.assertRaises(Exception, o.rename(str.lower))
# rename a single axis
result = obj.rename(**{ axis : str.lower })
expected = obj.copy()
setattr(expected,axis,list('abcd'))
self._compare(result, expected)
# multiple axes at once
def test_get_numeric_data(self):
n = 4
kwargs = { }
for i in range(self._ndim):
kwargs[self._typ._AXIS_NAMES[i]] = list(range(n))
# get the numeric data
o = self._construct(n,**kwargs)
result = o._get_numeric_data()
self._compare(result, o)
# non-inclusion
result = o._get_bool_data()
expected = self._construct(n,value='empty',**kwargs)
self._compare(result,expected)
# get the bool data
arr = np.array([True,True,False,True])
o = self._construct(n,value=arr,**kwargs)
result = o._get_numeric_data()
self._compare(result, o)
# _get_numeric_data is includes _get_bool_data, so can't test for non-inclusion
def test_nonzero(self):
# GH 4633
# look at the boolean/nonzero behavior for objects
obj = self._construct(shape=4)
self.assertRaises(ValueError, lambda : bool(obj == 0))
self.assertRaises(ValueError, lambda : bool(obj == 1))
self.assertRaises(ValueError, lambda : bool(obj))
obj = self._construct(shape=4,value=1)
self.assertRaises(ValueError, lambda : bool(obj == 0))
self.assertRaises(ValueError, lambda : bool(obj == 1))
self.assertRaises(ValueError, lambda : bool(obj))
obj = self._construct(shape=4,value=np.nan)
self.assertRaises(ValueError, lambda : bool(obj == 0))
self.assertRaises(ValueError, lambda : bool(obj == 1))
self.assertRaises(ValueError, lambda : bool(obj))
# empty
obj = self._construct(shape=0)
self.assertRaises(ValueError, lambda : bool(obj))
# invalid behaviors
obj1 = self._construct(shape=4,value=1)
obj2 = self._construct(shape=4,value=1)
def f():
if obj1:
print("this works and shouldn't")
self.assertRaises(ValueError, f)
self.assertRaises(ValueError, lambda : obj1 and obj2)
self.assertRaises(ValueError, lambda : obj1 or obj2)
self.assertRaises(ValueError, lambda : not obj1)
def test_numpy_1_7_compat_numeric_methods(self):
if _np_version_under1p7:
raise nose.SkipTest("numpy < 1.7")
# GH 4435
# numpy in 1.7 tries to pass addtional arguments to pandas functions
o = self._construct(shape=4)
for op in ['min','max','max','var','std','prod','sum','cumsum','cumprod',
'median','skew','kurt','compound','cummax','cummin','all','any']:
f = getattr(np,op,None)
if f is not None:
f(o)
def test_downcast(self):
# test close downcasting
o = self._construct(shape=4, value=9, dtype=np.int64)
result = o.copy()
result._data = o._data.downcast(dtypes='infer')
self._compare(result, o)
o = self._construct(shape=4, value=9.)
expected = o.astype(np.int64)
result = o.copy()
result._data = o._data.downcast(dtypes='infer')
self._compare(result, expected)
o = self._construct(shape=4, value=9.5)
result = o.copy()
result._data = o._data.downcast(dtypes='infer')
self._compare(result, o)
# are close
o = self._construct(shape=4, value=9.000000000005)
result = o.copy()
result._data = o._data.downcast(dtypes='infer')
expected = o.astype(np.int64)
self._compare(result, expected)
class TestSeries(unittest.TestCase, Generic):
_typ = Series
_comparator = lambda self, x, y: assert_series_equal(x,y)
def setUp(self):
self.ts = tm.makeTimeSeries() # Was at top level in test_series
self.ts.name = 'ts'
self.series = tm.makeStringSeries()
self.series.name = 'series'
def test_rename_mi(self):
s = Series([11,21,31],
index=MultiIndex.from_tuples([("A",x) for x in ["a","B","c"]]))
result = s.rename(str.lower)
def test_get_numeric_data_preserve_dtype(self):
# get the numeric data
o = Series([1,2,3])
result = o._get_numeric_data()
self._compare(result, o)
o = Series([1,'2',3.])
result = o._get_numeric_data()
expected = Series([],dtype=object)
self._compare(result, expected)
o = Series([True,False,True])
result = o._get_numeric_data()
self._compare(result, o)
o = Series([True,False,True])
result = o._get_bool_data()
self._compare(result, o)
o = Series(date_range('20130101',periods=3))
result = o._get_numeric_data()
expected = Series([],dtype='M8[ns]')
self._compare(result, expected)
def test_nonzero_single_element(self):
# allow single item via bool method
s = Series([True])
self.assert_(s.bool() is True)
s = Series([False])
self.assert_(s.bool() is False)
# single item nan to raise
for s in [ Series([np.nan]), Series([pd.NaT]), Series([True]), Series([False]) ]:
self.assertRaises(ValueError, lambda : bool(s))
for s in [ Series([np.nan]), Series([pd.NaT])]:
self.assertRaises(ValueError, lambda : s.bool())
# multiple bool are still an error
for s in [Series([True,True]), Series([False, False])]:
self.assertRaises(ValueError, lambda : bool(s))
self.assertRaises(ValueError, lambda : s.bool())
# single non-bool are an error
for s in [Series([1]), Series([0]),
Series(['a']), Series([0.0])]:
self.assertRaises(ValueError, lambda : bool(s))
self.assertRaises(ValueError, lambda : s.bool())
def test_interpolate(self):
ts = Series(np.arange(len(self.ts), dtype=float), self.ts.index)
ts_copy = ts.copy()
ts_copy[5:10] = np.NaN
linear_interp = ts_copy.interpolate(method='linear')
self.assert_(np.array_equal(linear_interp, ts))
ord_ts = Series([d.toordinal() for d in self.ts.index],
index=self.ts.index).astype(float)
ord_ts_copy = ord_ts.copy()
ord_ts_copy[5:10] = np.NaN
time_interp = ord_ts_copy.interpolate(method='time')
self.assert_(np.array_equal(time_interp, ord_ts))
# try time interpolation on a non-TimeSeries
self.assertRaises(ValueError, self.series.interpolate, method='time')
def test_interpolate_corners(self):
s = Series([np.nan, np.nan])
assert_series_equal(s.interpolate(), s)
s = Series([]).interpolate()
assert_series_equal(s.interpolate(), s)
_skip_if_no_scipy()
s = Series([np.nan, np.nan])
assert_series_equal(s.interpolate(method='polynomial', order=1), s)
s = Series([]).interpolate()
assert_series_equal(s.interpolate(method='polynomial', order=1), s)
def test_interpolate_index_values(self):
s = Series(np.nan, index=np.sort(np.random.rand(30)))
s[::3] = np.random.randn(10)
vals = s.index.values.astype(float)
result = s.interpolate(method='values')
expected = s.copy()
bad = isnull(expected.values)
good = -bad
expected = Series(
np.interp(vals[bad], vals[good], s.values[good]), index=s.index[bad])
assert_series_equal(result[bad], expected)
def test_interpolate_non_ts(self):
s = Series([1, 3, np.nan, np.nan, np.nan, 11])
with tm.assertRaises(ValueError):
s.interpolate(method='time')
# New interpolation tests
def test_nan_interpolate(self):
s = Series([0, 1, np.nan, 3])
result = s.interpolate()
expected = Series([0, 1, 2, 3])
assert_series_equal(result, expected)
_skip_if_no_scipy()
result = s.interpolate(method='polynomial', order=1)
assert_series_equal(result, expected)
def test_nan_irregular_index(self):
s = Series([1, 2, np.nan, 4], index=[1, 3, 5, 9])
result = s.interpolate()
expected = Series([1, 2, 3, 4], index=[1, 3, 5, 9])
assert_series_equal(result, expected)
def test_nan_str_index(self):
s = Series([0, 1, 2, np.nan], index=list('abcd'))
result = s.interpolate()
expected = Series([0, 1, 2, 2], index=list('abcd'))
assert_series_equal(result, expected)
def test_interp_quad(self):
_skip_if_no_scipy()
sq = Series([1, 4, np.nan, 16], index=[1, 2, 3, 4])
result = sq.interpolate(method='quadratic')
expected = Series([1, 4, 9, 16], index=[1, 2, 3, 4])
assert_series_equal(result, expected)
def test_interp_scipy_basic(self):
_skip_if_no_scipy()
s = Series([1, 3, np.nan, 12, np.nan, 25])
# slinear
expected = Series([1., 3., 7.5, 12., 18.5, 25.])
result = s.interpolate(method='slinear')
assert_series_equal(result, expected)
# nearest
expected = Series([1, 3, 3, 12, 12, 25])
result = s.interpolate(method='nearest')
assert_series_equal(result, expected)
# zero
expected = Series([1, 3, 3, 12, 12, 25])
result = s.interpolate(method='zero')
assert_series_equal(result, expected)
# quadratic
expected = Series([1, 3., 6.769231, 12., 18.230769, 25.])
result = s.interpolate(method='quadratic')
assert_series_equal(result, expected)
# cubic
expected = Series([1., 3., 6.8, 12., 18.2, 25.])
result = s.interpolate(method='cubic')
assert_series_equal(result, expected)
def test_interp_limit(self):
s = Series([1, 3, np.nan, np.nan, np.nan, 11])
expected = Series([1., 3., 5., 7., np.nan, 11.])
result = s.interpolate(method='linear', limit=2)
assert_series_equal(result, expected)
def test_interp_all_good(self):
# scipy
_skip_if_no_scipy()
s = Series([1, 2, 3])
result = s.interpolate(method='polynomial', order=1)
assert_series_equal(result, s)
# non-scipy
result = s.interpolate()
assert_series_equal(result, s)
def test_interp_multiIndex(self):
idx = MultiIndex.from_tuples([(0, 'a'), (1, 'b'), (2, 'c')])
s = Series([1, 2, np.nan], index=idx)
expected = s.copy()
expected.loc[2] = 2
expected = expected.astype(np.int64)
result = s.interpolate()
assert_series_equal(result, expected)
_skip_if_no_scipy()
with tm.assertRaises(ValueError):
s.interpolate(method='polynomial', order=1)
def test_interp_nonmono_raise(self):
_skip_if_no_scipy()
s = pd.Series([1, 2, 3], index=[0, 2, 1])
with tm.assertRaises(ValueError):
s.interpolate(method='krogh')
class TestDataFrame(unittest.TestCase, Generic):
_typ = DataFrame
_comparator = lambda self, x, y: assert_frame_equal(x,y)
def test_rename_mi(self):
df = DataFrame([11,21,31],
index=MultiIndex.from_tuples([("A",x) for x in ["a","B","c"]]))
result = df.rename(str.lower)
def test_nonzero_single_element(self):
# allow single item via bool method
df = DataFrame([[True]])
self.assert_(df.bool() is True)
df = DataFrame([[False]])
self.assert_(df.bool() is False)
df = DataFrame([[False, False]])
self.assertRaises(ValueError, lambda : df.bool())
self.assertRaises(ValueError, lambda : bool(df))
def test_get_numeric_data_preserve_dtype(self):
# get the numeric data
o = DataFrame({'A': [1, '2', 3.]})
result = o._get_numeric_data()
expected = DataFrame(index=[0, 1, 2], dtype=object)
self._compare(result, expected)
def test_interp_basic(self):
df = DataFrame({'A': [1, 2, np.nan, 4], 'B': [1, 4, 9, np.nan],
'C': [1, 2, 3, 5], 'D': list('abcd')})
expected = DataFrame({'A': [1, 2, 3, 4], 'B': [1, 4, 9, 9],
'C': [1, 2, 3, 5], 'D': list('abcd')})
result = df.interpolate()
assert_frame_equal(result, expected)
result = df.set_index('C').interpolate()
expected = df.set_index('C')
expected.A.loc[3] = 3
expected.B.loc[5] = 9
expected[['A', 'B']] = expected[['A', 'B']].astype(np.int64)
assert_frame_equal(result, expected)
def test_interp_bad_method(self):
df = DataFrame({'A': [1, 2, np.nan, 4], 'B': [1, 4, 9, np.nan],
'C': [1, 2, 3, 5], 'D': list('abcd')})
with tm.assertRaises(ValueError):
df.interpolate(method='not_a_method')
def test_interp_combo(self):
df = DataFrame({'A': [1., 2., np.nan, 4.], 'B': [1, 4, 9, np.nan],
'C': [1, 2, 3, 5], 'D': list('abcd')})
result = df['A'].interpolate()
expected = Series([1, 2, 3, 4])
assert_series_equal(result, expected)
def test_interp_nan_idx(self):
df = DataFrame({'A': [1, 2, np.nan, 4], 'B': [np.nan, 2, 3, 4]})
df = df.set_index('A')
with tm.assertRaises(NotImplementedError):
df.interpolate(method='values')
def test_interp_various(self):
_skip_if_no_scipy()
df = DataFrame({'A': [1, 2, np.nan, 4, 5, np.nan, 7],
'C': [1, 2, 3, 5, 8, 13, 21]})
df = df.set_index('C')
expected = df.copy()
result = df.interpolate(method='polynomial', order=1)
expected.A.loc[3] = 2.66666667
expected.A.loc[13] = 5.76923076
assert_frame_equal(result, expected)
result = df.interpolate(method='cubic')
expected.A.loc[3] = 2.81621174
expected.A.loc[13] = 5.64146581
assert_frame_equal(result, expected)
result = df.interpolate(method='nearest')
expected.A.loc[3] = 2
expected.A.loc[13] = 5
assert_frame_equal(result, expected, check_dtype=False)
result = df.interpolate(method='quadratic')
expected.A.loc[3] = 2.82533638
expected.A.loc[13] = 6.02817974
assert_frame_equal(result, expected)
result = df.interpolate(method='slinear')
expected.A.loc[3] = 2.66666667
expected.A.loc[13] = 5.76923077
assert_frame_equal(result, expected)
result = df.interpolate(method='zero')
expected.A.loc[3] = 2.
expected.A.loc[13] = 5
assert_frame_equal(result, expected, check_dtype=False)
result = df.interpolate(method='quadratic')
expected.A.loc[3] = 2.82533638
expected.A.loc[13] = 6.02817974
assert_frame_equal(result, expected)
def test_interp_alt_scipy(self):
_skip_if_no_scipy()
df = DataFrame({'A': [1, 2, np.nan, 4, 5, np.nan, 7],
'C': [1, 2, 3, 5, 8, 13, 21]})
result = df.interpolate(method='barycentric')
expected = df.copy()
expected['A'].iloc[2] = 3
expected['A'].iloc[5] = 6
assert_frame_equal(result, expected)
result = df.interpolate(method='krogh')
expectedk = df.copy()
expectedk['A'].iloc[2] = 3
expectedk['A'].iloc[5] = 6
expectedk['A'] = expected['A'].astype(np.int64)
assert_frame_equal(result, expectedk)
_skip_if_no_pchip()
result = df.interpolate(method='pchip')
expected['A'].iloc[2] = 3
expected['A'].iloc[5] = 6.125
assert_frame_equal(result, expected)
def test_interp_rowwise(self):
df = DataFrame({0: [1, 2, np.nan, 4],
1: [2, 3, 4, np.nan],
2: [np.nan, 4, 5, 6],
3: [4, np.nan, 6, 7],
4: [1, 2, 3, 4]})
result = df.interpolate(axis=1)
expected = df.copy()
expected[1].loc[3] = 5
expected[2].loc[0] = 3
expected[3].loc[1] = 3
expected[4] = expected[4].astype(np.float64)
assert_frame_equal(result, expected)
# scipy route
_skip_if_no_scipy()
result = df.interpolate(axis=1, method='values')
assert_frame_equal(result, expected)
result = df.interpolate(axis=0)
expected = df.interpolate()
assert_frame_equal(result, expected)
def test_rowwise_alt(self):
df = DataFrame({0: [0, .5, 1., np.nan, 4, 8, np.nan, np.nan, 64],
1: [1, 2, 3, 4, 3, 2, 1, 0, -1]})
df.interpolate(axis=0)
def test_interp_leading_nans(self):
df = DataFrame({"A": [np.nan, np.nan, .5, .25, 0],
"B": [np.nan, -3, -3.5, np.nan, -4]})
result = df.interpolate()
expected = df.copy()
expected['B'].loc[3] = -3.75
assert_frame_equal(result, expected)
_skip_if_no_scipy()
result = df.interpolate(method='polynomial', order=1)
assert_frame_equal(result, expected)
def test_interp_raise_on_only_mixed(self):
df = DataFrame({'A': [1, 2, np.nan, 4], 'B': ['a', 'b', 'c', 'd'],
'C': [np.nan, 2, 5, 7], 'D': [np.nan, np.nan, 9, 9],
'E': [1, 2, 3, 4]})
with tm.assertRaises(TypeError):
df.interpolate(axis=1)
def test_no_order(self):
_skip_if_no_scipy()
s = Series([0, 1, np.nan, 3])
with tm.assertRaises(ValueError):
s.interpolate(method='polynomial')
with tm.assertRaises(ValueError):
s.interpolate(method='spline')
def test_spline(self):
_skip_if_no_scipy()
s = Series([1, 2, np.nan, 4, 5, np.nan, 7])
result = s.interpolate(method='spline', order=1)
expected = Series([1, 2, 3, 4, 5, 6, 7])
assert_series_equal(result, expected)
class TestPanel(unittest.TestCase, Generic):
_typ = Panel
_comparator = lambda self, x, y: assert_panel_equal(x, y)
if __name__ == '__main__':
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
exit=False)
| [
"[email protected]"
] | |
6dc546389a6f10aacd4eb90a1f094923465422c1 | 50f4509fcbede0767b15de21262137cf5aa93c7a | /tests/modules/teams/resources/test_modifying_teams.py | bc830bb08c8d273b8496b15b570605e886512b63 | [
"MIT"
] | permissive | millen1m/flask-restplus-server-example | 8384326b6cdec3c076db53bf392659e53527749f | e1089d64e72d8fc2263675520825782c771e6f52 | refs/heads/master | 2021-01-11T19:00:24.591394 | 2017-01-18T02:31:26 | 2017-01-18T02:31:26 | 79,290,124 | 1 | 0 | null | 2017-01-18T01:20:52 | 2017-01-18T01:20:52 | null | UTF-8 | Python | false | false | 6,913 | py | # encoding: utf-8
# pylint: disable=missing-docstring
import json
from app.modules.teams import models
def test_new_team_creation(flask_app_client, db, regular_user):
# pylint: disable=invalid-name
team_title = "Test Team Title"
with flask_app_client.login(regular_user, auth_scopes=('teams:write', )):
response = flask_app_client.post('/api/v1/teams/', data={'title': team_title})
assert response.status_code == 200
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'id', 'title'}
assert response.json['title'] == team_title
# Cleanup
team = models.Team.query.get(response.json['id'])
assert team.title == team_title
db.session.delete(team)
db.session.commit()
def test_new_team_first_member_is_creator(flask_app_client, db, regular_user):
# pylint: disable=invalid-name
team_title = "Test Team Title"
with flask_app_client.login(
regular_user,
auth_scopes=('teams:write', 'teams:read')
):
response = flask_app_client.post('/api/v1/teams/', data={'title': team_title})
assert response.status_code == 200
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'id', 'title'}
assert response.json['title'] == team_title
assert len(response.json['members']) == 1
assert response.json['members'][0]['user']['id'] == regular_user.id
assert response.json['members'][0]['is_leader'] == True
# Cleanup
team = models.Team.query.get(response.json['id'])
assert team.title == team_title
db.session.delete(team)
db.session.commit()
def test_new_team_creation_with_invalid_data_must_fail(flask_app_client, regular_user):
# pylint: disable=invalid-name
with flask_app_client.login(regular_user, auth_scopes=('teams:write', )):
response = flask_app_client.post('/api/v1/teams/', data={'title': ""})
assert response.status_code == 409
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'status', 'message'}
def test_update_team_info(flask_app_client, regular_user, team_for_regular_user):
# pylint: disable=invalid-name
team_title = "Test Team Title"
with flask_app_client.login(regular_user, auth_scopes=('teams:write', )):
response = flask_app_client.patch(
'/api/v1/teams/%d' % team_for_regular_user.id,
content_type='application/json',
data=json.dumps([
{
'op': 'replace',
'path': '/title',
'value': team_title
},
])
)
assert response.status_code == 200
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'id', 'title'}
assert response.json['id'] == team_for_regular_user.id
assert response.json['title'] == team_title
assert team_for_regular_user.title == team_title
def test_update_team_info_with_invalid_data_must_fail(
flask_app_client,
regular_user,
team_for_regular_user
):
# pylint: disable=invalid-name
with flask_app_client.login(regular_user, auth_scopes=('teams:write', )):
response = flask_app_client.patch(
'/api/v1/teams/%d' % team_for_regular_user.id,
content_type='application/json',
data=json.dumps([
{
'op': 'replace',
'path': '/title',
'value': '',
},
])
)
assert response.status_code == 409
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'status', 'message'}
def test_update_team_info_without_value_must_fail(
flask_app_client,
regular_user,
team_for_regular_user
):
with flask_app_client.login(regular_user, auth_scopes=('teams:write', )):
response = flask_app_client.patch(
'/api/v1/teams/%d' % team_for_regular_user.id,
content_type='application/json',
data=json.dumps([
{
'op': 'replace',
'path': '/title',
}
])
)
assert response.status_code == 422
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'status', 'message'}
def test_update_team_info_without_slash_in_path_must_fail(
flask_app_client,
regular_user,
team_for_regular_user
):
with flask_app_client.login(regular_user, auth_scopes=('teams:write',)):
response = flask_app_client.patch(
'/api/v1/teams/%d' % team_for_regular_user.id,
content_type='application/json',
data=json.dumps([
{
'op': 'replace',
'path': 'title',
'value': 'New Team Value',
}
])
)
assert response.status_code == 422
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'status', 'message'}
def test_team_deletion(flask_app_client, regular_user, team_for_regular_user):
with flask_app_client.login(regular_user, auth_scopes=('teams:write', )):
response = flask_app_client.delete(
'/api/v1/teams/%d' % team_for_regular_user.id
)
assert response.status_code == 204
assert response.content_type == 'application/json'
def test_add_new_team_member(flask_app_client, db, regular_user, admin_user, team_for_regular_user):
# pylint: disable=invalid-name
with flask_app_client.login(regular_user, auth_scopes=('teams:write', )):
response = flask_app_client.post(
'/api/v1/teams/%d/members/' % team_for_regular_user.id,
data={
'user_id': admin_user.id,
}
)
assert response.status_code == 200
assert response.content_type == 'application/json'
assert set(response.json.keys()) >= {'team', 'user', 'is_leader'}
assert response.json['team']['id'] == team_for_regular_user.id
assert response.json['user']['id'] == admin_user.id
# Cleanup
team_members = models.TeamMember.query.filter_by(team=team_for_regular_user, user=admin_user)
assert team_members.count() == 1
team_members.delete()
db.session.commit()
def test_delete_team_member(
flask_app_client, db, regular_user, readonly_user, team_for_regular_user
):
# pylint: disable=invalid-name,unused-argument
with flask_app_client.login(regular_user, auth_scopes=('teams:write', )):
response = flask_app_client.delete(
'/api/v1/teams/%d/members/%d' % (team_for_regular_user.id, readonly_user.id),
)
assert response.status_code == 200
assert response.content_type == 'application/json'
| [
"[email protected]"
] | |
0c29edef5beb6d2c825c72a8cb36a93a10416184 | 9f749833be23b5fa96717236d0c2f76efb43f440 | /lib/bbox.py | 66e4c7953f1e4b078e351e3b548afd70ecd219a4 | [] | no_license | Shmuma/blackboxchallenge | e4318e5a8a170c56c11afbb17c5567740178a386 | 5a185a8e396276eae67c708de5adfb243d9dca67 | refs/heads/master | 2020-05-29T15:07:11.118589 | 2016-05-20T10:18:11 | 2016-05-20T10:18:11 | 60,077,313 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 519 | py | """
Custom bbox interface class
"""
def get_bbox(name=None):
"""
Get bbox instance module by name. Name == None loads original bbox module. All others must
match a module from custom package.
:param name:
:return:
"""
if name is None:
print "We'll use original bbox implementation"
import interface as bbox
return bbox
print "Will use bbox from module custom." + name
res = __import__("lib.custom." + name)
return getattr(getattr(res, "custom"), name)
| [
"[email protected]"
] | |
3c4e9d0c14caca0b4ae49adc9910f11d2bd79df8 | 0203e5a6d7beb1e0f83113dac4c167b171756f24 | /test/system/volume/CREATE_VOL_BASIC_7.py | 004520d7ebae215be424514d6f740c7e378dc75d | [
"BSD-3-Clause"
] | permissive | Wonchul08Lee/poseidonos | eaafe277fc56a0f5b5fcca3b70acc9bfe5d5d1ae | 6fe410cdf88f3243ad9210f763c2b5a2f7e8b46a | refs/heads/main | 2023-03-30T13:41:09.660647 | 2021-04-08T06:43:26 | 2021-04-08T06:43:26 | 355,819,746 | 0 | 0 | BSD-3-Clause | 2021-04-08T08:17:27 | 2021-04-08T08:17:26 | null | UTF-8 | Python | false | false | 1,796 | py | #!/usr/bin/env python3
import subprocess
import os
import sys
import json
sys.path.append("../lib/")
sys.path.append("../array/")
import json_parser
import ibofos
import cli
import test_result
import ibofos_constant
import MOUNT_ARRAY_BASIC_1
import volume
VOL_NAME = "vol7"
VOL_SIZE = ibofos_constant.SIZE_1GB * 5
VOL_IOPS = 2**64-1
VOL_BW = 2**64-1
def clear_result():
if os.path.exists( __file__ + ".result"):
os.remove( __file__ + ".result")
def check_result(detail):
expected_list = []
expected_list.append(volume.Volume(VOL_NAME, VOL_SIZE, VOL_IOPS, VOL_BW))
data = json.loads(detail)
actual_list = []
for item in data['Response']['result']['data']['volumes']:
vol = volume.Volume(item['name'], item['total'], item['maxiops'], item['maxbw'])
actual_list.append(vol)
if len(actual_list) != len(expected_list):
return "fail"
for actual in actual_list:
checked = False
for expected in expected_list:
if actual.name == expected.name and actual.total == expected.total and actual.maxiops == expected.maxiops and actual.maxbw == expected.maxbw:
checked = True
break
if checked == False:
return "fail"
return "pass"
def set_result(detail):
out = cli.list_volume("")
result = check_result(out)
code = json_parser.get_response_code(out)
with open(__file__ + ".result", "w") as result_file:
result_file.write(result + " (" + str(code) + ")" + "\n" + out)
def execute():
clear_result()
MOUNT_ARRAY_BASIC_1.execute()
out = cli.create_volume(VOL_NAME, str(VOL_SIZE), str(VOL_IOPS), str(VOL_BW), "")
return out
if __name__ == "__main__":
out = execute()
set_result(out)
ibofos.kill_ibofos() | [
"[email protected]"
] | |
93dc8f78e85bacbd942755df76bff4b12fd343e0 | 5a0dfe1326bb166d6dfaf72ce0f89ab06e963e2c | /leetcode/lc350.py | d69cd8670ad1fc664de20f243397c783c0888bbe | [] | no_license | JasonXJ/algorithms | 7bf6a03c3e26f917a9f91c53fc7b2c65669f7692 | 488d93280d45ea686d30b0928e96aa5ed5498e6b | refs/heads/master | 2020-12-25T15:17:44.345596 | 2018-08-18T07:20:27 | 2018-08-18T07:20:27 | 67,798,458 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 508 | py | from collections import Counter
class Solution(object):
def intersect(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
c1 = Counter(nums1)
c2 = Counter(nums2)
rv = []
for x, c in c1.items():
rv.extend([x] * min(c, c2[x]))
return rv
def test():
assert Solution().intersect([1,2,2,1], [2,2]) == [2,2]
assert Solution().intersect([1,2,2,3,1], [2,2]) == [2,2]
| [
"[email protected]"
] | |
e32636c3575c1f16778b0fbe950d3d23e5ea6e9b | 16e511771be79efe535074a0f8d6d5209b5740e7 | /mn_wifi/link.py | 904331ea656828709246db53854091b2fdc92bfa | [
"LicenseRef-scancode-x11-stanford"
] | permissive | ovasoft/mininet-wifi | b2b7b0336c8111d3e0a03fa2dc072d891d4ebcbe | a59dc3c42fc9b8a9445775e002feca5f4330148a | refs/heads/master | 2020-09-30T16:41:49.331180 | 2019-12-10T01:26:39 | 2019-12-10T01:26:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 60,516 | py | # author: Ramon Fontes ([email protected])
import os
import re
import subprocess
from time import sleep
from sys import version_info as py_version_info
from mininet.util import BaseString
from mininet.log import error, debug, info
from mn_wifi.manetRoutingProtocols import manetProtocols
from mn_wifi.wmediumdConnector import DynamicIntfRef, \
w_starter, SNRLink, w_pos, w_cst, w_server, ERRPROBLink, \
wmediumd_mode, w_txpower, w_gain, w_height
class IntfWireless(object):
"Basic interface object that can configure itself."
def __init__(self, name, node=None, port=None, link=None,
mac=None, **params):
"""name: interface name (e.g. h1-eth0)
node: owning node (where this intf most likely lives)
link: parent link if we're part of a link
other arguments are passed to config()"""
self.node = node
self.name = name
self.link = link
self.port = port
self.mac = mac
self.ip, self.ip6, self.prefixLen = None, None, None
# if interface is lo, we know the ip is 127.0.0.1.
# This saves an ip link/addr command per node
if self.name == 'lo':
self.ip = '127.0.0.1'
node.addWIntf(self, port=port)
# Save params for future reference
self.params = params
self.config(**params)
def cmd(self, *args, **kwargs):
"Run a command in our owning node"
return self.node.cmd(*args, **kwargs)
def pexec(self, *args, **kwargs):
"Run a command in our owning node"
return self.node.pexec(*args, **kwargs)
def set_dev_type(self, type):
self.iwdev_cmd('%s set type %s' % (self.name, type))
def add_dev_type(self, new_name, type):
self.iwdev_cmd('%s interface add %s type %s' % (self.name, new_name, type))
def iwdev_cmd(self, *args):
return self.cmd('iw dev', *args)
def iwdev_pexec(self, *args):
return self.pexec('iw dev', *args)
def join_ibss(self):
return self.iwdev_cmd('{} ibss join {} {} {} 02:CA:FF:EE:BA:01'.
format(self.name, self.ssid,
self.format_freq(), self.ht_cap))
def join_mesh(self):
return self.iwdev_cmd('{} mesh join {} freq {} {}'.
format(self.name, self.ssid,
self.format_freq(), self.ht_cap))
def get_pid_filename(self):
pidfile = 'mn{}_{}_{}_wpa.pid'.format(os.getpid(), self.node.name, self.id)
return pidfile
def get_wpa_cmd(self):
pidfile = self.get_pid_filename()
wpasup_flags = ''
if 'wpasup_flags' in self.node.params:
wpasup_flags = self.node.params['wpasup_flags']
cmd = ('wpa_supplicant -B -Dnl80211 -P {} -i {} -c {}.staconf {}'.
format(pidfile, self.name, self.name, wpasup_flags))
return cmd
def wpa_cmd(self):
return self.cmd(self.get_wpa_cmd())
def wpa_pexec(self):
return self.node.pexec(self.get_wpa_cmd())
def setGainWmediumd(self):
"Set Antenna Gain for wmediumd"
if wmediumd_mode.mode == w_cst.INTERFERENCE_MODE:
gain = self.antennaGain
w_server.update_gain(w_gain(self.wmIface, int(gain)))
def setHeightWmediumd(self):
"Set Antenna Height for wmediumd"
if wmediumd_mode.mode == w_cst.INTERFERENCE_MODE:
height = self.antennaHeight
w_server.update_height(w_height(self.wmIface, int(height)))
def setTXPowerWmediumd(self):
"Set TxPower for wmediumd"
if wmediumd_mode.mode == w_cst.INTERFERENCE_MODE:
txpower = self.txpower
w_server.update_txpower(w_txpower(self.wmIface, int(txpower)))
def getCustomRate(self):
modes = ['a', 'b', 'g', 'n', 'ac']
rates = [11, 3, 11, 600, 1000]
rate = rates[modes.index(self.mode)]
return rate
def getRate(self):
modes = ['a', 'b', 'g', 'n', 'ac']
rates = [54, 11, 54, 300, 600]
rate = rates[modes.index(self.mode)]
return rate
def get_freq(self):
"Gets frequency based on channel number"
channel = int(self.channel)
chan_list_2ghz = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
chan_list_5ghz = [36, 40, 44, 48, 52, 56, 60, 64, 100,
104, 108, 112, 116, 120, 124, 128, 132,
136, 140, 149, 153, 157, 161, 165,
169, 171, 172, 173, 174, 175, 176,
177, 178, 179, 180, 181, 182, 183, 184,
185]
freq_list_2ghz = [2.412, 2.417, 2.422, 2.427, 2.432, 2.437,
2.442, 2.447, 2.452, 2.457, 2.462]
freq_list_5ghz = [5.18, 5.2, 5.22, 5.24, 5.26, 5.28, 5.30, 5.32,
5.50, 5.52, 5.54, 5.56, 5.58, 5.6, 5.62,
5.64, 5.66, 5.68, 5.7, 5.745, 5.765, 5.785,
5.805, 5.825, 5.845, 5.855, 5.86, 5.865, 5.87,
5.875, 5.88, 5.885, 5.89, 5.895, 5.9, 5.905,
5.91, 5.915, 5.92, 5.925]
all_chan = chan_list_2ghz + chan_list_5ghz
all_freq = freq_list_2ghz + freq_list_5ghz
if channel in all_chan:
idx = all_chan.index(channel)
return all_freq[idx]
else:
return '2.412'
def setFreq(self, freq, intf=None):
return self.iwdev_cmd('{} set freq {}'.format(intf, freq))
def format_freq(self):
return int(format(self.freq, '.3f').replace('.', ''))
def setReg(self):
if self.mode == 'a' or self.mode == 'ac':
self.pexec('iw reg set US')
def setAPChannel(self):
self.freq = self.get_freq()
self.pexec('hostapd_cli -i %s chan_switch %s %s' % (
self.name, self.channel, str(self.format_freq())))
def setChannel(self):
self.freq = self.get_freq()
self.iwdev_cmd('%s set channel %s' % (self.name, self.channel))
def ipAddr(self, *args):
"Configure ourselves using ip link/addr"
if self.name not in self.node.params['wlan']:
self.cmd('ip addr flush ', self.name)
return self.cmd('ip addr add', args[0], 'dev', self.name)
else:
if len(args) == 0:
return self.cmd('ip addr show', self.name)
else:
if ':' not in args[0]:
self.cmd('ip addr flush ', self.name)
cmd = 'ip addr add %s dev %s' % (args[0], self.name)
if self.ip6:
cmd = cmd + ' && ip -6 addr add %s dev %s' % \
(self.ip6, self.name)
return self.cmd(cmd)
else:
self.cmd('ip -6 addr flush ', self.name)
return self.cmd('ip -6 addr add', args[0], 'dev', self.name)
def ipLink(self, *args):
"Configure ourselves using ip link"
return self.cmd('ip link set', self.name, *args)
def setMode(self, mode):
self.mode = mode
def setTxPower(self):
txpower = self.txpower
self.node.pexec('iw dev %s set txpower fixed %s' % (self.name, txpower * 100))
debug('\n')
def setIP(self, ipstr, prefixLen=None, **args):
"""Set our IP address"""
# This is a sign that we should perhaps rethink our prefix
# mechanism and/or the way we specify IP addresses
if '/' in ipstr:
self.ip, self.prefixLen = ipstr.split('/')
return self.ipAddr(ipstr)
else:
if prefixLen is None:
raise Exception('No prefix length set for IP address %s'
% (ipstr,))
self.ip, self.prefixLen = ipstr, prefixLen
return self.ipAddr('%s/%s' % (ipstr, prefixLen))
def setIP6(self, ipstr, prefixLen=None, **args):
"""Set our IP6 address"""
# This is a sign that we should perhaps rethink our prefix
# mechanism and/or the way we specify IP addresses
if '/' in ipstr:
self.ip6, self.prefixLen = ipstr.split('/')
return self.ipAddr(ipstr)
else:
if prefixLen is None:
raise Exception('No prefix length set for IP address %s'
% (ipstr,))
self.ip6, self.prefixLen = ipstr, prefixLen
return self.ipAddr('%s/%s' % (ipstr, prefixLen))
def configureMacAddr(self):
"""Configure Mac Address
:param node: node"""
if not self.mac:
self.mac = self.getMAC()
else:
self.setMAC(self.mac)
def getMAC(self):
"get Mac Address of any Interface"
try:
_macMatchRegex = re.compile(r'..:..:..:..:..:..')
debug('getting mac address from %s\n' % self.name)
macaddr = str(self.pexec('ip addr show %s' % self.name))
mac = _macMatchRegex.findall(macaddr)
debug('\n%s' % mac[0])
return mac[0]
except:
info('Error: Please run sudo mn -c.\n')
def setMAC(self, macstr):
"""Set the MAC address for an interface.
macstr: MAC address as string"""
self.mac = macstr
return (self.ipLink('down') +
self.ipLink('address', macstr) +
self.ipLink('up'))
_ipMatchRegex = re.compile(r'\d+\.\d+\.\d+\.\d+')
_macMatchRegex = re.compile(r'..:..:..:..:..:..')
def updateIP(self):
"Return updated IP address based on ip addr"
# use pexec instead of node.cmd so that we dont read
# backgrounded output from the cli.
ipAddr, _err, _exitCode = self.node.pexec(
'ip addr show %s' % self.name)
if py_version_info < (3, 0):
ips = self._ipMatchRegex.findall(ipAddr)
else:
ips = self._ipMatchRegex.findall(ipAddr.decode('utf-8'))
self.ip = ips[0] if ips else None
return self.ip
def updateMAC(self):
"Return updated MAC address based on ip addr"
ipAddr = self.ipAddr()
if py_version_info < (3, 0):
macs = self._macMatchRegex.findall(ipAddr)
else:
macs = self._macMatchRegex.findall(ipAddr.decode('utf-8'))
self.mac = macs[0] if macs else None
return self.mac
# Instead of updating ip and mac separately,
# use one ipAddr call to do it simultaneously.
# This saves an ipAddr command, which improves performance.
def updateAddr(self):
"Return IP address and MAC address based on ipAddr."
ipAddr = self.ipAddr()
if py_version_info < (3, 0):
ips = self._ipMatchRegex.findall(ipAddr)
macs = self._macMatchRegex.findall(ipAddr)
else:
ips = self._ipMatchRegex.findall(ipAddr.decode('utf-8'))
macs = self._macMatchRegex.findall(ipAddr.decode('utf-8'))
self.ip = ips[0] if ips else None
self.mac = macs[0] if macs else None
return self.ip, self.mac
def IP(self):
"Return IP address"
return self.ip
def MAC(self):
"Return MAC address"
return self.mac
def isUp(self, setUp=False):
"Return whether interface is up"
if setUp:
cmdOutput = self.ipLink('up')
# no output indicates success
if cmdOutput:
# error( "Error setting %s up: %s " % ( self.name, cmdOutput ) )
return False
else:
return True
else:
return "UP" in self.ipAddr()
def rename(self, newname):
"Rename interface"
if self.node and self.name in self.node.nameToIntf:
# rename intf in node's nameToIntf
self.node.nameToIntf[newname] = self.node.nameToIntf.pop(self.name)
self.ipLink('down')
result = self.cmd('ip link set', self.name, 'name', newname)
self.name = newname
self.ipLink('up')
return result
# The reason why we configure things in this way is so
# That the parameters can be listed and documented in
# the config method.
# Dealing with subclasses and superclasses is slightly
# annoying, but at least the information is there!
def setParam(self, results, method, **param):
"""Internal method: configure a *single* parameter
results: dict of results to update
method: config method name
param: arg=value (ignore if value=None)
value may also be list or dict"""
name, value = list(param.items())[ 0 ]
f = getattr(self, method, None)
if not f or value is None:
return
if isinstance(value, list):
result = f(*value)
elif isinstance(value, dict):
result = f(**value)
else:
result = f(value)
results[ name ] = result
return result
def config(self, mac=None, ip=None, ip6=None,
ipAddr=None, up=True, **_params):
"""Configure Node according to (optional) parameters:
mac: MAC address
ip: IP address
ipAddr: arbitrary interface configuration
Subclasses should override this method and call
the parent class's config(**params)"""
# If we were overriding this method, we would call
# the superclass config method here as follows:
# r = Parent.config( **params )
r = {}
self.setParam(r, 'setMAC', mac=mac)
self.setParam(r, 'setIP', ip=ip)
self.setParam(r, 'setIP6', ip=ip6)
self.setParam(r, 'isUp', up=up)
self.setParam(r, 'ipAddr', ipAddr=ipAddr)
return r
def delete(self):
"Delete interface"
self.cmd('iw dev ' + self.name + ' del')
# We used to do this, but it slows us down:
# if self.node.inNamespace:
# Link may have been dumped into root NS
# quietRun( 'ip link del ' + self.name )
#self.node.delIntf(self)
self.link = None
def status(self):
"Return intf status as a string"
links, _err, _result = self.node.pexec('ip link show')
if self.name in str(links):
return "OK"
else:
return "MISSING"
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.name)
def __str__(self):
return self.name
class TCWirelessLink(IntfWireless):
"""Interface customized by tc (traffic control) utility
Allows specification of bandwidth limits (various methods)
as well as delay, loss and max queue length"""
# The parameters we use seem to work reasonably up to 1 Gb/sec
# For higher data rates, we will probably need to change them.
bwParamMax = 1000
def bwCmds(self, bw=None, speedup=0, use_hfsc=False, use_tbf=False,
latency_ms=None, enable_ecn=False, enable_red=False):
"Return tc commands to set bandwidth"
cmds, parent = [], ' root '
if bw and (bw < 0 or bw > self.bwParamMax):
error('Bandwidth limit', bw, 'is outside supported range 0..%d'
% self.bwParamMax, '- ignoring\n')
elif bw is not None:
# BL: this seems a bit brittle...
if speedup > 0:
bw = speedup
# This may not be correct - we should look more closely
# at the semantics of burst (and cburst) to make sure we
# are specifying the correct sizes. For now I have used
# the same settings we had in the mininet-hifi code.
if use_hfsc:
cmds += [ '%s qdisc add dev %s root handle 5:0 hfsc default 1',
'%s class add dev %s parent 5:0 classid 5:1 hfsc sc '
+ 'rate %fMbit ul rate %fMbit' % (bw, bw) ]
elif use_tbf:
if latency_ms is None:
latency_ms = 15 * 8 / bw
cmds += [ '%s qdisc add dev %s root handle 5: tbf ' +
'rate %fMbit burst 15000 latency %fms' %
(bw, latency_ms) ]
else:
cmds += [ '%s qdisc add dev %s root handle 5:0 htb default 1',
'%s class add dev %s parent 5:0 classid 5:1 htb ' +
'rate %fMbit burst 15k' % bw ]
parent = ' parent 5:1 '
# ECN or RED
if enable_ecn:
cmds += [ '%s qdisc add dev %s' + parent +
'handle 6: red limit 1000000 ' +
'min 30000 max 35000 avpkt 1500 ' +
'burst 20 ' +
'bandwidth %fmbit probability 1 ecn' % bw ]
parent = ' parent 6: '
elif enable_red:
cmds += [ '%s qdisc add dev %s' + parent +
'handle 6: red limit 1000000 ' +
'min 30000 max 35000 avpkt 1500 ' +
'burst 20 ' +
'bandwidth %fmbit probability 1' % bw ]
parent = ' parent 6: '
return cmds, parent
@staticmethod
def delayCmds(parent, delay=None, jitter=None,
loss=None, max_queue_size=None):
"Internal method: return tc commands for delay and loss"
cmds = []
if delay:
delay_ = float(delay.replace("ms", ""))
if delay and delay_ < 0:
error( 'Negative delay', delay, '\n' )
elif jitter and jitter < 0:
error('Negative jitter', jitter, '\n')
elif loss and (loss < 0 or loss > 100):
error('Bad loss percentage', loss, '%%\n')
else:
# Delay/jitter/loss/max queue size
netemargs = '%s%s%s%s' % (
'delay %s ' % delay if delay is not None else '',
'%s ' % jitter if jitter is not None else '',
'loss %.5f ' % loss if loss is not None else '',
'limit %d' % max_queue_size if max_queue_size is not None
else '')
if netemargs:
cmds = [ '%s qdisc add dev %s ' + parent +
' handle 10: netem ' +
netemargs ]
parent = ' parent 10:1 '
return cmds, parent
def tc(self, cmd, tc='tc'):
"Execute tc command for our interface"
c = cmd % (tc, self) # Add in tc command and our name
debug(" *** executing command: %s\n" % c)
return self.cmd(c)
def config(self, bw=None, delay=None, jitter=None, loss=None,
gro=False, speedup=0, use_hfsc=False, use_tbf=False,
latency_ms=None, enable_ecn=False, enable_red=False,
max_queue_size=None, **params):
"""Configure the port and set its properties.
bw: bandwidth in b/s (e.g. '10m')
delay: transmit delay (e.g. '1ms' )
jitter: jitter (e.g. '1ms')
loss: loss (e.g. '1%' )
gro: enable GRO (False)
txo: enable transmit checksum offload (True)
rxo: enable receive checksum offload (True)
speedup: experimental switch-side bw option
use_hfsc: use HFSC scheduling
use_tbf: use TBF scheduling
latency_ms: TBF latency parameter
enable_ecn: enable ECN (False)
enable_red: enable RED (False)
max_queue_size: queue limit parameter for netem"""
# Support old names for parameters
gro = not params.pop('disable_gro', not gro)
result = IntfWireless.config(self, **params)
def on(isOn):
"Helper method: bool -> 'on'/'off'"
return 'on' if isOn else 'off'
# Set offload parameters with ethool
self.cmd('ethtool -K', self,
'gro', on(gro))
# Optimization: return if nothing else to configure
# Question: what happens if we want to reset things?
if (bw is None and not delay and not loss
and max_queue_size is None):
return
# Clear existing configuration
tcoutput = self.tc('%s qdisc show dev %s')
if "priomap" not in tcoutput and "noqueue" not in tcoutput \
and "fq_codel" not in tcoutput and "qdisc fq" not in tcoutput:
cmds = [ '%s qdisc del dev %s root' ]
else:
cmds = []
# Bandwidth limits via various methods
bwcmds, parent = self.bwCmds(bw=bw, speedup=speedup,
use_hfsc=use_hfsc, use_tbf=use_tbf,
latency_ms=latency_ms,
enable_ecn=enable_ecn,
enable_red=enable_red)
cmds += bwcmds
# Delay/jitter/loss/max_queue_size using netem
delaycmds, parent = self.delayCmds(delay=delay, jitter=jitter,
loss=loss,
max_queue_size=max_queue_size,
parent=parent)
cmds += delaycmds
# Execute all the commands in our node
debug("at map stage w/cmds: %s\n" % cmds)
tcoutputs = [ self.tc(cmd) for cmd in cmds ]
for output in tcoutputs:
if output != '':
error("*** Error: %s" % output)
debug("cmds:", cmds, '\n')
debug("outputs:", tcoutputs, '\n')
result[ 'tcoutputs'] = tcoutputs
result[ 'parent' ] = parent
return result
class _4address(IntfWireless):
node = None
def __init__(self, node1, node2, port1=None, port2=None):
"""Create 4addr link to another node.
node1: first node
node2: second node
intf: default interface class/constructor"""
intf1 = None
intf2 = None
ap = node1 # ap
cl = node2 # client
cl_intfName = '%s.wds' % cl.name
if not hasattr(node1, 'position'):
self.set_pos(node1)
if not hasattr(node2, 'position'):
self.set_pos(node2)
if cl_intfName not in cl.params['wlan']:
wlan = cl.params['wlan'].index(port1) if port1 else 0
apwlan = ap.params['wlan'].index(port2) if port2 else 0
intf = cl.wintfs[wlan]
ap_intf = ap.wintfs[apwlan]
self.node = cl
self.add4addrIface(wlan, cl_intfName)
self.setMAC(intf)
self.setMAC(ap_intf)
self.bring4addrIfaceUP()
intf.mode = ap_intf.mode
intf.channel = ap_intf.channel
intf.freq = ap_intf.freq
intf.txpower = ap_intf.txpower
intf.antennaGain = ap_intf.antennaGain
cl.params['wlan'].append(cl_intfName)
sleep(1)
self.iwdev_cmd('%s connect %s %s' % (cl.params['wlan'][1],
ap_intf.ssid, ap_intf.mac))
params1, params2 = {}, {}
params1['port'] = cl.newPort()
params2['port'] = ap.newPort()
intf1 = IntfWireless(name=cl_intfName, node=cl, link=self, **params1)
if hasattr(ap, 'wds'):
ap.wds += 1
else:
ap.wds = 1
intfName2 = ap.params['wlan'][apwlan] + '.sta%s' % ap.wds
intf2 = IntfWireless(name=intfName2, node=ap, link=self, **params2)
ap.params['wlan'].append(intfName2)
_4addrAP(ap, (len(ap.params['wlan'])-1))
_4addrClient(cl, (len(cl.params['wlan'])-1))
cl.wintfs[1].mac = (intf.mac[:3] + '09' + intf.mac[5:])
# All we are is dust in the wind, and our two interfaces
self.intf1, self.intf2 = intf1, intf2
def set_pos(self, node):
nums = re.findall(r'\d+', node.name)
if nums:
id = int(hex(int(nums[0]))[2:])
node.position = (10, round(id, 2), 0)
def bring4addrIfaceUP(self):
self.cmd('ip link set dev %s.wds up' % self.node)
def setMAC(self, intf):
self.cmd('ip link set dev %s.wds addr %s'
% (intf.node, intf.mac))
def add4addrIface(self, wlan, intfName):
self.iwdev_cmd('%s interface add %s type managed 4addr on' %
(self.node.params['wlan'][wlan], intfName))
def status(self):
"Return link status as a string"
return "(%s %s)" % (self.intf1.status(), self.intf2)
def __str__(self):
return '%s<->%s' % (self.intf1, self.intf2)
def delete(self):
"Delete this link"
self.intf1.delete()
self.intf1 = None
self.intf2.delete()
self.intf2 = None
def stop(self):
"Override to stop and clean up link as needed"
self.delete()
class WirelessLinkAP(object):
"""A basic link is just a veth pair.
Other types of links could be tunnels, link emulators, etc.."""
# pylint: disable=too-many-branches
def __init__(self, node, port=None, intfName=None, addr=None,
cls=None, params=None):
"""Create veth link to another node, making two new interfaces.
node: first node
port: node port number (optional)
intf: default interface class/constructor
cls: optional interface-specific constructors
intfName: node interface name (optional)
params: parameters for interface 1"""
# This is a bit awkward; it seems that having everything in
# params is more orthogonal, but being able to specify
# in-line arguments is more convenient! So we support both.
params = dict( params ) if params else {}
if port is not None:
params[ 'port' ] = port
ifacename = 'wlan'
params['port'] = node.newPort()
intfName = self.wlanName(node, ifacename, params['port'])
intf1 = cls(name=intfName, node=node,
link=self, mac = addr, ** params)
intf2 = 'wifi'
# All we are is dust in the wind, and our two interfaces
self.intf1, self.intf2 = intf1, intf2
# pylint: enable=too-many-branches
@staticmethod
def _ignore(*args, **kwargs):
"Ignore any arguments"
pass
def wlanName(self, node, ifacename, n):
"Construct a canonical interface name node-ethN for interface n."
# Leave this as an instance method for now
assert self
return node.name + '-' + ifacename + repr(n)
def delete(self):
"Delete this link"
self.intf1.delete()
self.intf1 = None
self.intf2 = None
def stop(self):
"Override to stop and clean up link as needed"
self.delete()
def status(self):
"Return link status as a string"
return "(%s %s)" % (self.intf1.status(), self.intf2)
def __str__(self):
return '%s<->%s' % (self.intf1, self.intf2)
class WirelessLinkStation(object):
"""A basic link is just a veth pair.
Other types of links could be tunnels, link emulators, etc.."""
# pylint: disable=too-many-branches
def __init__(self, node, port=None, intfName=None, addr=None,
intf=IntfWireless, cls=None, params=None):
"""Create veth link to another node, making two new interfaces.
node: first node
port: node port number (optional)
intf: default interface class/constructor
cls: optional interface-specific constructors
intfName: node interface name (optional)
params: parameters for interface 1"""
# This is a bit awkward; it seems that having everything in
# params is more orthogonal, but being able to specify
# in-line arguments is more convenient! So we support both.
if params is None:
params = {}
if port is not None:
params[ 'port' ] = port
if 'port' not in params:
params[ 'port' ] = node.newPort()
if not intfName:
ifacename = 'wlan'
intfName = self.wlanName(node, ifacename, node.newPort())
if not cls:
cls = intf
intf1 = cls(name=intfName, node=node,
link=self, mac=addr, **params)
intf2 = 'wifi'
# All we are is dust in the wind, and our two interfaces
self.intf1, self.intf2 = intf1, intf2
# pylint: enable=too-many-branches
@staticmethod
def _ignore(*args, **kwargs):
"Ignore any arguments"
pass
def wlanName(self, node, ifacename, n):
"Construct a canonical interface name node-ethN for interface n."
# Leave this as an instance method for now
assert self
return node.name + '-' + ifacename + repr(n)
def delete(self):
"Delete this link"
self.intf1.delete()
self.intf1 = None
self.intf2 = None
def stop(self):
"Override to stop and clean up link as needed"
self.delete()
def status(self):
"Return link status as a string"
return "(%s %s)" % (self.intf1.status(), self.intf2)
def __str__(self):
return '%s<->%s' % (self.intf1, self.intf2)
class TCLinkWirelessStation(WirelessLinkStation):
"Link with symmetric TC interfaces configured via opts"
def __init__(self, node, port=None, intfName=None,
addr=None, cls=TCWirelessLink, **params):
WirelessLinkStation.__init__(self, node=node, port=port,
intfName=intfName,
cls=cls, addr=addr,
params=params)
class TCLinkWirelessAP(WirelessLinkAP):
"Link with symmetric TC interfaces configured via opts"
def __init__(self, node, port=None, intfName=None,
addr=None, cls=TCWirelessLink, **params):
WirelessLinkAP.__init__(self, node, port=port,
intfName=intfName,
cls=cls, addr=addr,
params=params)
class master(TCWirelessLink):
"master class"
def __init__(self, node, wlan, port=None, intf=None):
self.name = node.params['wlan'][wlan]
node.addWAttr(self, port=port)
self.node = node
self.params = {}
self.stationsInRange = {}
self.associatedStations = []
self.antennaGain = 5.0
self.antennaHeight = 1.0
self.channel = 1
self.freq = 2.412
self.range = 0
self.txpower = 14
self.ieee80211r = None
self.band = None
self.authmode = None
self.beacon_int = None
self.config = None
self.driver = 'nl80211'
self.encrypt = None
self.ht_capab = None
self.id = wlan
self.ip = None
self.ip6 = None
self.isolate_clients = None
self.mac = None
self.mode = 'g'
self.passwd = None
self.shared_secret = None
self.ssid = None
self.wpa_key_mgmt = None
self.rsn_pairwise = None
self.radius_server = None
self.wps_state = None
self.device_type = None
self.wpa_psk_file = None
self.config_methods = None
self.mobility_domain = None
self.link = None
if intf:
self.wmIface = intf.wmIface
for key in self.__dict__.keys():
if key in node.params:
if isinstance(node.params[key], BaseString):
setattr(self, key, node.params[key])
elif isinstance(node.params[key], list):
arg_ = node.params[key][0].split(',')
setattr(self, key, arg_[wlan])
elif isinstance(node.params[key], int):
setattr(self, key, node.params[key])
class managed(TCWirelessLink):
"managed class"
def __init__(self, node, wlan, intf=None):
self.name = node.params['wlan'][wlan]
node.addWIntf(self, port=wlan)
node.addWAttr(self, port=wlan)
self.node = node
self.apsInRange = {}
self.range = 0
self.ifb = None
self.active_scan = None
self.associatedTo = None
self.associatedStations = None
self.authmode = None
self.config = None
self.encrypt = None
self.freq_list = None
self.ip = None
self.ip6 = None
self.link = None
self.mac = None
self.passwd = None
self.radius_identity = None
self.radius_passwd = None
self.scan_freq = None
self.ssid = None
self.stationsInRange = None
self.bgscan_module = 'simple'
self.s_inverval = 0
self.bgscan_threshold = 0
self.l_interval = 0
self.txpower = 14
self.id = wlan
self.rssi = -60
self.mode = 'g'
self.freq = 2.412
self.channel = 1
self.antennaGain = 5.0
self.antennaHeight = 1.0
if intf:
self.wmIface = intf.wmIface
for key in self.__dict__.keys():
if key in node.params:
if isinstance(node.params[key], BaseString):
setattr(self, key, node.params[key])
elif isinstance(node.params[key], list):
arg_ = node.params[key][0].split(',')
setattr(self, key, arg_[wlan])
elif isinstance(node.params[key], int):
setattr(self, key, node.params[key])
class _4addrClient(TCWirelessLink):
"managed class"
def __init__(self, node, wlan):
self.node = node
self.id = wlan
self.ip = None
self.mac = node.wintfs[wlan-1].mac
self.range = node.wintfs[0].range
self.txpower = 0
self.antennaGain = 5.0
self.name = node.params['wlan'][wlan]
self.stationsInRange = {}
self.associatedStations = []
self.apsInRange = {}
self.params = {}
node.addWIntf(self)
node.addWAttr(self)
class _4addrAP(TCWirelessLink):
"managed class"
def __init__(self, node, wlan):
self.node = node
self.ip = None
self.id = wlan
self.mac = node.wintfs[0].mac
self.range = node.wintfs[0].range
self.txpower = 0
self.antennaGain = 5.0
self.name = node.params['wlan'][wlan]
self.stationsInRange = {}
self.associatedStations = []
self.params = {}
node.addWIntf(self)
node.addWAttr(self)
class wmediumd(TCWirelessLink):
"Wmediumd Class"
wlinks = []
links = []
txpowers = []
positions = []
nodes = []
def __init__(self, fading_coefficient, noise_threshold, stations,
aps, cars, propagation_model, maclist=None):
self.configureWmediumd(fading_coefficient, noise_threshold, stations,
aps, cars, propagation_model, maclist)
@classmethod
def configureWmediumd(cls, fading_coefficient, noise_threshold, stations,
aps, cars, propagation_model, maclist):
"Configure wmediumd"
intfrefs = []
isnodeaps = []
fading_coefficient = fading_coefficient
noise_threshold = noise_threshold
cls.nodes = stations + aps + cars
for node in cls.nodes:
for intf in node.wintfs.values():
intf.wmIface = DynamicIntfRef(node, intf=intf.name)
intfrefs.append(intf.wmIface)
if (isinstance(intf, master)
or (node in aps and (not isinstance(intf, managed)
and not isinstance(intf, adhoc)))):
isnodeaps.append(1)
else:
isnodeaps.append(0)
'''for mac in maclist:
for key in mac:
if key == node:
key.wmIface.append(DynamicIntfRef(key, intf=len(key.wmIface)))
key.params['wlan'].append(mac[key][1])
key.params['mac'].append(mac[key][0])
key.params['range'].append(0)
key.params['freq'].append(key.params['freq'][0])
key.params['antennaGain'].append(0)
key.params['txpower'].append(14)
intfrefs.append(key.wmIface[len(key.wmIface) - 1])
isnodeaps.append(0)'''
if wmediumd_mode.mode == w_cst.INTERFERENCE_MODE:
set_interference()
elif wmediumd_mode.mode == w_cst.SPECPROB_MODE:
spec_prob_link()
elif wmediumd_mode.mode == w_cst.ERRPROB_MODE:
set_error_prob()
else:
set_snr()
start_wmediumd(intfrefs, wmediumd.links, wmediumd.positions,
fading_coefficient, noise_threshold,
wmediumd.txpowers, isnodeaps, propagation_model,
maclist)
class start_wmediumd(object):
def __init__(cls, intfrefs, links, positions,
fading_coefficient, noise_threshold, txpowers, isnodeaps,
propagation_model, maclist):
w_starter.start(intfrefs, links, pos=positions,
fading_coefficient=fading_coefficient,
noise_threshold=noise_threshold,
txpowers=txpowers, isnodeaps=isnodeaps,
ppm=propagation_model, maclist=maclist)
class set_interference(object):
def __init__(self):
self.interference()
@classmethod
def interference(cls):
'configure interference model'
for node in wmediumd.nodes:
if not hasattr(node, 'position'):
posX, posY, posZ = 0, 0, 0
else:
posX = float(node.position[0])
posY = float(node.position[1])
posZ = float(node.position[2])
node.lastpos = [posX, posY, posZ]
for wlan, intf in enumerate(node.wintfs.values()):
if wlan >= 1:
posX += 0.1
wmediumd.positions.append(w_pos(intf.wmIface,
[posX, posY, posZ]))
wmediumd.txpowers.append(w_txpower(
intf.wmIface, float(intf.txpower)))
class spec_prob_link(object):
"wmediumd: spec prob link"
def __init__(self):
'do nothing'
class set_error_prob(object):
"wmediumd: set error prob"
def __init__(self):
self.error_prob()
@classmethod
def error_prob(cls):
"wmediumd: error prob"
for node in wmediumd.wlinks:
wmediumd.links.append(ERRPROBLink(node[0].wintfs[0].wmIface,
node[1].wintfs[0].wmIface, node[2]))
wmediumd.links.append(ERRPROBLink(node[1].wintfs[0].wmIface,
node[0].wintfs[0].wmIface, node[2]))
class set_snr(object):
"wmediumd: set snr"
def __init__(self):
self.snr()
@classmethod
def snr(cls):
"wmediumd: snr"
for node in wmediumd.wlinks:
wmediumd.links.append(SNRLink(node[0].wintfs[0].wmIface, node[1].wintfs[0].wmIface,
node[0].wintfs[0].rssi - (-91)))
wmediumd.links.append(SNRLink(node[1].wintfs[0].wmIface, node[0].wintfs[0].wmIface,
node[0].wintfs[0].rssi - (-91)))
class wirelessLink(object):
dist = 0
noise = 0
equationLoss = '(dist * 2) / 1000'
equationDelay = '(dist / 10) + 1'
equationLatency = '(dist / 10)/2'
equationBw = ' * (1.01 ** -dist)'
def __init__(self, intf, dist=0):
latency_ = self.getLatency(dist)
loss_ = self.getLoss(dist)
bw_ = self.getBW(intf, dist)
self.config_tc(intf, bw_, loss_, latency_)
def getDelay(self, dist):
"Based on RandomPropagationDelayModel"
return eval(self.equationDelay)
def getLatency(self, dist):
return eval(self.equationLatency)
def getLoss(self, dist):
return eval(self.equationLoss)
def getBW(self, intf, dist):
# dist is used by eval
custombw = intf.getCustomRate()
rate = eval(str(custombw) + self.equationBw)
if rate <= 0.0:
rate = 0.1
return rate
@classmethod
def delete(cls, node):
"Delete interfaces"
for intf in node.wintfs.values():
node.cmd('iw dev ' + intf.name + ' del')
node.delIntf(intf.name)
node.intf = None
@classmethod
def config_tc(cls, intf, bw, loss, latency):
if intf.ifb:
cls.tc(intf.node, intf.ifb, bw, loss, latency)
cls.tc(intf.node, intf.name, bw, loss, latency)
@classmethod
def tc(cls, node, iface, bw, loss, latency):
cmd = "tc qdisc replace dev %s root handle 2: netem " % iface
rate = "rate %.4fmbit " % bw
cmd += rate
if latency > 0.1:
latency = "latency %.2fms " % latency
cmd += latency
if loss > 0.1:
loss = "loss %.1f%% " % loss
cmd += loss
node.pexec(cmd)
class ITSLink(IntfWireless):
def __init__(self, node, intf=None, channel=161):
"configure ieee80211p"
self.node = node
if isinstance(intf, BaseString):
wlan = node.params['wlan'].index(intf)
intf = node.wintfs[wlan]
else:
wlan = intf.id
if isinstance(self, master):
self.kill_hostapd()
self.channel = channel
self.freq = self.get_freq()
self.range = intf.range
self.name = intf.name
self.mac = intf.mac
if isinstance(intf, master):
self.name = '%s-ocb' % node.name
self.add_ocb_mode()
else:
self.set_ocb_mode()
#node.addWIntf(self, port=wlan)
node.addWAttr(self, port=wlan)
self.configure_ocb()
def kill_hostapd(self):
self.node.setManagedMode(self)
def add_ocb_mode(self):
"Set OCB Interface"
self.ipLink('down')
self.node.delIntf(self.name)
self.add_dev_type(self.name, 'ocb')
# we set the port to remove the existing wlan from node.intfs
IntfWireless(name=self.name, node=self.node, port=1)
self.setMAC(self.name)
self.ipLink('up')
def set_ocb_mode(self):
"Set OCB Interface"
self.ipLink('down')
self.set_dev_type('ocb')
self.ipLink('up')
def configure_ocb(self):
"Configure Wireless OCB"
self.iwdev_cmd('%s ocb join %s 20MHz' % (self.name, self.freq))
class wifiDirectLink(IntfWireless):
def __init__(self, node, intf=None):
"configure wifi-direct"
self.node = node
if isinstance(intf, BaseString):
wlan = node.params['wlan'].index(intf)
intf = node.wintfs[wlan]
else:
wlan = intf.id
self.mac = intf.mac
self.name = intf.name
self.range = intf.range
self.txpower = intf.txpower
self.ip6 = intf.ip6
self.ip = intf.ip
node.addWIntf(self, port=wlan)
node.addWAttr(self, port=wlan)
self.config_()
cmd = self.get_wpa_cmd()
node.cmd(cmd)
def get_filename(self):
suffix = 'wifiDirect.conf'
filename = "mn%d_%s_%s" % (os.getpid(), self.name, suffix)
return filename
def get_wpa_cmd(self):
filename = self.get_filename()
cmd = ('wpa_supplicant -B -Dnl80211 -c%s -i%s' %
(filename, self.name))
return cmd
def config_(self):
filename = self.get_filename()
cmd = ("echo \'")
cmd += 'ctrl_interface=/var/run/wpa_supplicant\
\nap_scan=1\
\np2p_go_ht40=1\
\ndevice_name=%s\
\ndevice_type=1-0050F204-1\
\np2p_no_group_iface=1' % self.name
cmd += ("\' > %s" % filename)
self.set_config(cmd)
def set_config(self, cmd):
subprocess.check_output(cmd, shell=True)
class physicalWifiDirectLink(wifiDirectLink):
def __init__(self, node, intf=None):
"configure wifi-direct"
self.name = intf
node.addWIntf(self)
node.addWAttr(self)
for wlan, intf in enumerate(node.wintfs.values()):
if intf.name == self.name:
break
self.txpower = node.wintfs[0].txpower
self.mac = None
self.config_()
cmd = self.get_wpa_cmd()
os.system(cmd)
class adhoc(IntfWireless):
node = None
def __init__(self, node, intf=None, ssid='adhocNet',
channel=1, mode='g', passwd=None, ht_cap='',
proto=None, **params):
"""Configure AdHoc
node: name of the node
self: custom association class/constructor
params: parameters for station"""
self.node = node
if isinstance(intf, BaseString):
wlan = node.params['wlan'].index(intf)
intf = node.wintfs[wlan]
else:
wlan = intf.id
self.id = wlan
self.ssid = ssid
self.ip6 = intf.ip6
self.ip = intf.ip
self.mac = intf.mac
self.ip6 = intf.ip6
self.link = intf.link
self.encrypt = intf.encrypt
self.antennaGain = intf.antennaGain
self.passwd = passwd
self.mode = mode
self.channel = channel
self.ht_cap = ht_cap
self.associatedTo = 'adhoc'
if wmediumd_mode.mode:
self.wmIface = intf.wmIface
if 'mp' in intf.name:
self.iwdev_cmd('%s del' % intf.name)
node.params['wlan'][wlan] = intf.name.replace('mp', 'wlan')
self.name = intf.name
node.addWIntf(self, port=wlan)
node.addWAttr(self, port=wlan)
self.freq = self.get_freq()
self.setReg()
self.configureAdhoc()
self.txpower = intf.txpower
self.range = intf.range
if proto:
manetProtocols(intf, proto, **params)
def configureAdhoc(self):
"Configure Wireless Ad Hoc"
self.set_dev_type('ibss')
self.ipLink('up')
if self.passwd:
self.setSecuredAdhoc()
else:
self.join_ibss()
def get_sta_confname(self):
fileName = '%s.staconf' % self.name
return fileName
def setSecuredAdhoc(self):
"Set secured adhoc"
cmd = 'ctrl_interface=/var/run/wpa_supplicant GROUP=wheel\n'
cmd += 'ap_scan=2\n'
cmd += 'network={\n'
cmd += ' ssid="%s"\n' % self.ssid
cmd += ' mode=1\n'
cmd += ' frequency=%s\n' % self.format_freq()
cmd += ' proto=RSN\n'
cmd += ' key_mgmt=WPA-PSK\n'
cmd += ' pairwise=CCMP\n'
cmd += ' group=CCMP\n'
cmd += ' psk="%s"\n' % self.passwd
cmd += '}'
fileName = self.get_sta_confname()
os.system('echo \'%s\' > %s' % (cmd, fileName))
class mesh(IntfWireless):
node = None
def __init__(self, node, intf=None, mode='g', channel=1,
ssid='meshNet', passwd=None, ht_cap=''):
from mn_wifi.node import AP
"""Configure wireless mesh
node: name of the node
self: custom association class/constructor
params: parameters for node"""
self.node = node
if isinstance(intf, BaseString):
wlan = node.params['wlan'].index(intf)
intf = node.wintfs[wlan]
else:
wlan = intf.id
iface = intf
self.name = self.name = '%s-mp%s' % (node, intf.name[-1:])
self.id = wlan
self.mac = intf.mac
self.ip6 = intf.ip6
self.ip = intf.ip
self.link = intf.link
self.txpower = intf.txpower
self.encrypt = intf.encrypt
self.antennaGain = intf.antennaGain
self.stationsInRange = intf.stationsInRange
self.associatedStations = intf.associatedStations
self.range = intf.range
self.ssid = ssid
self.mode = mode
self.channel = channel
self.ht_cap = ht_cap
self.passwd = passwd
self.associatedTo = 'mesh'
if wmediumd_mode.mode:
self.wmIface = DynamicIntfRef(node, intf=self.name)
node.addWAttr(self, port=wlan)
if isinstance(node, AP):
node.addWIntf(self, port=wlan+1)
else:
node.addWIntf(self, port=wlan)
self.setMeshIface(wlan, iface)
self.configureMesh()
def set_mesh_type(self, intf):
return '%s interface add %s type mp' % (intf.name, self.name)
def setMeshIface(self, wlan, intf):
if isinstance(intf, adhoc):
self.set_dev_type('managed')
self.iwdev_cmd(self.set_mesh_type(intf))
self.node.cmd('ip link set %s down' % intf)
self.setMAC(intf.mac)
self.node.params['wlan'][wlan] = self.name
self.setChannel()
self.setReg()
self.ipLink('up')
def configureMesh(self):
"Configure Wireless Mesh Interface"
if self.passwd:
self.setSecuredMesh()
else:
self.join_mesh()
def get_sta_confname(self):
fileName = '%s.staconf' % self.name
return fileName
def setSecuredMesh(self):
"Set secured mesh"
cmd = 'ctrl_interface=/var/run/wpa_supplicant\n'
cmd += 'ctrl_interface_group=adm\n'
cmd += 'user_mpm=1\n'
cmd += 'network={\n'
cmd += ' ssid="%s"\n' % self.ssid
cmd += ' mode=5\n'
cmd += ' frequency=%s\n' % self.format_freq()
cmd += ' key_mgmt=SAE\n'
cmd += ' psk="%s"\n' % self.passwd
cmd += '}'
fileName = self.get_sta_confname()
os.system('echo \'%s\' > %s' % (cmd, fileName))
class physicalMesh(IntfWireless):
def __init__(self, node, intf=None, channel=1, ssid='meshNet',
ht_cap=''):
"""Configure wireless mesh
node: name of the node
self: custom association class/constructor
params: parameters for node"""
wlan = 0
self.name = ''
self.node = node
self.ssid = ssid
self.ht_cap = ht_cap
self.channel = channel
node.wintfs[wlan].ssid = ssid
if int(node.wintfs[wlan].range) == 0:
intf = node.params['wlan'][wlan]
node.wintfs[wlan].range = node.getRange(intf, 95)
self.name = intf
self.setPhysicalMeshIface(node, wlan, intf)
self.freq = self.format_freq()
self.join_mesh()
def ipLink(self, state=None):
"Configure ourselves using ip link"
os.system('ip link set %s %s' % (self.name, state))
def setPhysicalMeshIface(self, node, wlan, intf):
iface = 'phy%s-mp%s' % (node, wlan)
self.ipLink('down')
while True:
id = ''
cmd = 'ip link show | grep %s' % iface
try:
id = subprocess.check_output(cmd, shell=True).split("\n")
except:
pass
if len(id) == 0:
cmd = ('iw dev %s interface add %s type mp' %
(intf, iface))
self.name = iface
subprocess.check_output(cmd, shell=True)
else:
try:
if self.channel:
cmd = ('iw dev %s set channel %s' %
(iface, self.channel))
subprocess.check_output(cmd, shell=True)
self.ipLink('up')
command = ('iw dev %s mesh join %s' % (iface, self.ssid))
subprocess.check_output(command, shell=True)
break
except:
break
class Association(IntfWireless):
@classmethod
def setSNRWmediumd(cls, sta, ap, snr):
"Send SNR to wmediumd"
w_server.send_snr_update(SNRLink(sta.wintfs[0].wmIface,
ap.wintfs[0].wmIface, snr))
w_server.send_snr_update(SNRLink(ap.wintfs[0].wmIface,
sta.wintfs[0].wmIface, snr))
@classmethod
def configureWirelessLink(cls, intf, ap_intf):
dist = intf.node.get_distance_to(ap_intf.node)
if dist <= ap_intf.range:
if not wmediumd_mode.mode == w_cst.INTERFERENCE_MODE:
if intf.rssi == 0:
cls.updateClientParams(intf, ap_intf)
if ap_intf != intf.associatedTo or \
not intf.associatedTo:
cls.associate_infra(intf, ap_intf)
if wmediumd_mode.mode == w_cst.WRONG_MODE:
if dist >= 0.01:
wirelessLink(intf, dist)
if intf.node != ap_intf.associatedStations:
ap_intf.associatedStations.append(intf.node)
if not wmediumd_mode.mode == w_cst.INTERFERENCE_MODE:
cls.get_rssi(intf, ap_intf, dist)
@classmethod
def get_rssi(cls, intf, ap_intf, dist):
from mn_wifi.propagationModels import propagationModel
rssi = float(propagationModel(intf, ap_intf, dist).rssi)
intf.rssi = rssi
if ap_intf.node not in intf.apsInRange:
intf.apsInRange[ap_intf.node] = rssi
ap_intf.stationsInRange[intf.node] = rssi
return rssi
@classmethod
def updateClientParams(cls, intf, ap_intf):
intf.freq = ap_intf.freq
intf.channel = ap_intf.channel
intf.mode = ap_intf.mode
intf.ssid = ap_intf.ssid
intf.range = intf.node.getRange(intf)
@classmethod
def associate(cls, intf, ap_intf):
"Associate to Access Point"
if hasattr(intf.node, 'position'):
cls.configureWirelessLink(intf, ap_intf)
else:
cls.associate_infra(intf, ap_intf)
@classmethod
def associate_noEncrypt(cls, intf, ap_intf):
#iwconfig is still necessary, since iw doesn't include essid like iwconfig does.
intf.node.pexec(cls.iwconfig_con(intf, ap_intf))
debug('\n')
@classmethod
def iwconfig_con(cls, intf, ap_intf):
cmd = 'iwconfig %s essid %s ap %s' % (intf, ap_intf.ssid, ap_intf.mac)
return cmd
@classmethod
def disconnect(cls, intf):
intf.node.pexec('iw dev %s disconnect' % intf.name)
intf.rssi = 0
intf.associatedTo = ''
intf.channel = 0
@classmethod
def associate_infra(cls, intf, ap_intf):
associated = 0
if ap_intf.ieee80211r and (not intf.encrypt or 'wpa' in intf.encrypt):
if not intf.associatedTo:
cmd = ('ps -aux | grep %s.staconf | wc -l' % intf.name)
address = subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True)
(out, err) = address.communicate()
np = int(str(out).split('\n')[0])
if np == 0 or np == 2:
cls.wpa(intf, ap_intf)
else:
cls.handover_ieee80211r(intf, ap_intf)
else:
cls.handover_ieee80211r(intf, ap_intf)
associated = 1
elif not ap_intf.encrypt:
associated = 1
cls.associate_noEncrypt(intf, ap_intf)
else:
if not intf.associatedTo:
if 'wpa' in ap_intf.encrypt and (not intf.encrypt or 'wpa' in intf.encrypt):
cls.wpa(intf, ap_intf)
associated = 1
elif ap_intf.encrypt == 'wep':
cls.wep(intf, ap_intf)
associated = 1
if associated:
cls.update(intf, ap_intf)
@classmethod
def wpaFile(cls, intf, ap_intf):
cmd = ''
if not ap_intf.config or not intf.config:
if not ap_intf.authmode:
if not intf.passwd:
passwd = ap_intf.passwd
else:
passwd = intf.passwd
if 'wpasup_globals' not in intf.node.params \
or ('wpasup_globals' in intf.node.params
and 'ctrl_interface=' not in intf.node.params['wpasup_globals']):
cmd = 'ctrl_interface=/var/run/wpa_supplicant\n'
if ap_intf.wps_state and not intf.passwd:
cmd += 'ctrl_interface_group=0\n'
cmd += 'update_config=1\n'
else:
if 'wpasup_globals' in intf.node.params:
cmd += intf.node.params['wpasup_globals'] + '\n'
cmd = cmd + 'network={\n'
if intf.config:
config = intf.config
if config is not []:
config = intf.config.split(',')
intf.node.params.pop("config", None)
for conf in config:
cmd += " " + conf + "\n"
else:
cmd += ' ssid=\"%s\"\n' % ap_intf.ssid
if not ap_intf.authmode:
cmd += ' psk=\"%s\"\n' % passwd
encrypt = ap_intf.encrypt
if ap_intf.encrypt == 'wpa3':
encrypt = 'wpa2'
cmd += ' proto=%s\n' % encrypt.upper()
cmd += ' pairwise=%s\n' % ap_intf.rsn_pairwise
if intf.active_scan:
cmd += ' scan_ssid=1\n'
if intf.scan_freq:
cmd += ' scan_freq=%s\n' % intf.scan_freq
if intf.freq_list:
cmd += ' freq_list=%s\n' % intf.freq_list
wpa_key_mgmt = ap_intf.wpa_key_mgmt
if ap_intf.encrypt == 'wpa3':
wpa_key_mgmt = 'SAE'
cmd += ' key_mgmt=%s\n' % wpa_key_mgmt
if 'bgscan_threshold' in intf.node.params:
if 'bgscan_module' not in intf.node.params:
intf.node.params['bgscan_module'] = 'simple'
bgscan = 'bgscan=\"%s:%d:%d:%d\"' % \
(intf.bgscan_module, intf.s_inverval,
intf.bgscan_threshold, intf.l_interval)
cmd += ' %s\n' % bgscan
if ap_intf.authmode == '8021x':
cmd += ' eap=PEAP\n'
cmd += ' identity=\"%s\"\n' % intf.radius_identity
cmd += ' password=\"%s\"\n' % intf.radius_passwd
cmd += ' phase2=\"autheap=MSCHAPV2\"\n'
cmd += '}'
fileName = '%s.staconf' % intf.name
os.system('echo \'%s\' > %s' % (cmd, fileName))
@classmethod
def wpa(cls, intf, ap_intf):
cls.wpaFile(intf, ap_intf)
intf.wpa_pexec()
@classmethod
def handover_ieee80211r(cls, intf, ap_intf):
intf.node.pexec('wpa_cli -i %s roam %s' % (intf.name, ap_intf.mac))
@classmethod
def wep(cls, intf, ap_intf):
if not intf.passwd:
passwd = ap_intf.passwd
else:
passwd = intf.passwd
cls.wep_connect(passwd, intf, ap_intf)
@classmethod
def wep_connect(cls, passwd, intf, ap_intf):
intf.node.pexec('iw dev %s connect %s key d:0:%s' % (intf.name, ap_intf.ssid, passwd))
@classmethod
def update(cls, intf, ap_intf):
no_upt = ['active_scan', 'bgscan']
if intf.associatedTo not in no_upt:
if intf.associatedTo \
and intf.node in ap_intf.associatedStations:
ap_intf.associatedStations.remove(intf.node)
cls.updateClientParams(intf, ap_intf)
ap_intf.associatedStations.append(intf.node)
intf.associatedTo = ap_intf.node
| [
"[email protected]"
] | |
e5053a28927fe74520cd0c0314216ad5af824a44 | 8dba7d35263956b1cc1a0fc5e37559977a13753a | /dynamodb.py | 1f2cc1f1bcd675d4ea117f120fc683d156a27240 | [
"MIT"
] | permissive | carbonox-infernox/boto3-examples | 22fcd34adfc8857620851717f530343414df9efc | 1142ed83bf548608ba39a6db17a9f4ada64b9218 | refs/heads/master | 2020-07-11T08:44:34.553786 | 2017-02-18T05:45:23 | 2017-02-18T05:45:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,497 | py | import boto3
import time
from boto3.dynamodb.conditions import Key
from boto3.dynamodb.conditions import Attr
class DynamoDB(object):
"""docstring for DynamoDB"""
def __init__(self, arg):
region_name = self.kwargs.get('region_name', 'us-east-1')
self.conn = boto3.resource('dynamodb', region_name=region_name)
def batch_write(self, table_name, items):
"""
Batch write items to given table name
"""
dynamodb = self.conn
table = dynamodb.Table(table_name)
with table.batch_writer() as batch:
for item in items:
batch.put_item(Item=item)
return True
def insert_item(self, table_name, item):
"""Insert an item to table"""
dynamodb = self.conn
table = dynamodb.Table(table_name)
response = table.put_item(Item=item)
if response['ResponseMetadata']['HTTPStatusCode'] == 200:
return True
else:
return False
def get_item(self, table_name, query_item):
"""
Get an item given its key
"""
dynamodb = self.conn
table = dynamodb.Table(table_name)
response = table.get_item(
Key=query_item
)
item = response['Item']
return item
def update_item(self, table_name, key_dict, update_dict):
"""
Update an item.
PARAMS
@table_name: name of the table
@key_dict: dict containing the key name and val eg. {"uuid": item_uuid}
@update_dict: dict containing the key name and val of
attributes to be updated
eg. {"attribute": "processing_status", "value": "completed"}
"""
dynamodb = self.conn
table = dynamodb.Table(table_name)
update_expr = 'SET {} = :val1'.format(update_dict['attribute'])
response = table.update_item(
Key=key_dict,
UpdateExpression=update_expr,
ExpressionAttributeValues={
':val1': update_dict['value']
}
)
if response['ResponseMetadata']['HTTPStatusCode'] == 200:
return True
else:
return False
def query_item(
self, table_name, sort_key, partition_key,
index_name=None, total_items=None, start_key=None,
table=None
):
"""
Query for an item with or without using global secondary index
PARAMS:
@table_name: name of the table
@sort_key: Dict containing key and val of sort key
e.g. {'name': 'uuid', 'value': '077f4450-96ee-4ba8-8faa-831f6350a860'}
@partition_key: Dict containing key and val of partition key
e.g. {'name': 'date', 'value': '2017-02-12'}
@index_name (optional): Name of the Global Secondary Index
"""
if not table:
dynamodb = self.conn
table = dynamodb.Table(table_name)
sk = sort_key['name']
skv = sort_key['value']
pk = partition_key['name']
pkv = partition_key['value']
if not start_key:
if index_name:
response = table.query(
IndexName=index_name,
KeyConditionExpression=Key(sk).eq(skv) &
Key(pk).eq(pkv)
)
else:
response = table.query(
KeyConditionExpression=Key(sk).eq(skv) &
Key(pk).eq(pkv)
)
else:
if index_name:
response = table.query(
IndexName=index_name,
KeyConditionExpression=Key(sk).eq(skv) &
Key(pk).eq(pkv),
ExclusiveStartKey=start_key
)
else:
response = table.query(
KeyConditionExpression=Key(sk).eq(skv) &
Key(pk).eq(pkv),
ExclusiveStartKey=start_key
)
if not total_items:
total_items = response['Items']
else:
total_items.extend(response['Items'])
if response.get('LastEvaluatedKey'):
start_key = response['LastEvaluatedKey']
return_items = self.query_item(
table_name=table_name, sort_key=sort_key,
partition_key=partition_key, total_items=total_items,
start_key=start_key, table=table
)
return return_items
else:
return total_items
def scan_item(
self, table_name, attr1, attr2,
total_items=None, start_key=None,
table=None
):
"""
Scan for an item with two attributes
NOTE: SCAN OPERATION SCANS THE WHOLE TABLE AND TAKES CONSIDERABLE
AMOUNT OF TIME, CONSUMES HIGH READ THROUGHPUT.
AVOID USING THIS AS MUCH AS YOU CAN.
TRY CREATING INDEX AND USE QUERY IF POSSIBLE
PARAMS:
@table_name: name of the table
@attr1: Dict containing key and val of first attribute
e.g. {'name': 'uuid', 'value': '077f4450-96ee-4ba8-8faa-831f6350a860'}
@attr2: Dict containing key and val of second attribute
e.g. {'name': 'date', 'value': '2017-02-12'}
"""
if not table:
dynamodb = self.conn
table = dynamodb.Table(table_name)
a1 = attr1['name']
a1v = attr1['value']
a2 = attr2['name']
a2v = attr2['value']
if not start_key:
response = table.scan_item(
FilterExpression=Attr(a1).eq(a1v) &
Attr(a2).eq(a2v)
)
else:
response = table.scan_item(
FilterExpression=Attr(a1).eq(a1v) &
Attr(a2).eq(a2v),
ExclusiveStartKey=start_key
)
if not total_items:
total_items = response['Items']
else:
total_items.extend(response['Items'])
if response.get('LastEvaluatedKey'):
start_key = response['LastEvaluatedKey']
return_items = self.query_item(
table_name=table_name, attr1=attr1,
attr2=attr2, total_items=total_items,
start_key=start_key, table=table
)
return return_items
else:
return total_items
def delete_item(self, table_name, item_key):
"""
delete an item
PARAMS
@table_name: name of the table
@item_key: dict containing key and val of sort key
e.g. {'name': 'uuid', 'value': 'some-uuid-val'}
"""
dynamodb = self.conn
table = dynamodb.Table(table_name)
response = table.delete_item(
Key={item_key['name']: item_key['value']}
)
if response['ResponseMetadata']['HTTPStatusCode'] == 200:
return True
else:
return False
def create_table(
self, table_name, hash_name,
read_throughput=5, write_throughput=5
):
"""
Create the DynamoDB table.
NOTE: THIS IS A DEMO TABLE WITH ONLY A HASH KEY of type String.
"""
dynamodb = self.conn
table = dynamodb.create_table(
TableName=table_name,
KeySchema=[
{
'AttributeName': hash_name,
'KeyType': 'HASH'
}
],
AttributeDefinitions=[
{
'AttributeName': hash_name,
'AttributeType': 'S'
},
],
ProvisionedThroughput={
'ReadCapacityUnits': read_throughput,
'WriteCapacityUnits': write_throughput
}
)
if table:
print("Success !")
return table
def delete_all_items(self, table_name, hash_name):
"""
Delete all items in a table by recreating the table.
"""
dynamodb = self.conn
try:
table = dynamodb.Table(table_name)
table.delete()
except:
print(
"Error in deletion. Table {} does not exist.".format(
table_name))
# allow time for table deletion
time.sleep(5)
try:
table = self.create_table(table_name, hash_name=hash_name)
except:
print("Error in creating table {}".format(table_name))
| [
"[email protected]"
] | |
19cb10690d7d4a203167f0b12a733712a949675d | dd208e5d00cce0a5a38d881af8a59aaeb532e44b | /Python_level_1/Python_08/loto.py | 49752d4fa5212f70730115d5ae6aa85bf544d99e | [] | no_license | dKosarevsky/geekbrains | 93324fc8c70db93f253ba844185ad2ef83126e6c | c8eedfe8a89ff482a075a8506a821c22a08995a1 | refs/heads/master | 2020-04-19T06:28:56.425038 | 2019-02-03T05:58:19 | 2019-02-03T05:58:19 | 168,019,373 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,894 | py | #!/usr/bin/python3
"""
== Лото ==
Правила игры в лото.
Игра ведется с помощью специальных карточек, на которых отмечены числа,
и фишек (бочонков) с цифрами.
Количество бочонков — 90 штук (с цифрами от 1 до 90).
Каждая карточка содержит 3 строки по 9 клеток. В каждой строке по 5 случайных цифр,
расположенных по возрастанию. Все цифры в карточке уникальны. Пример карточки:
--------------------------
9 43 62 74 90
2 27 75 78 82
41 56 63 76 86
--------------------------
В игре 2 игрока: пользователь и компьютер. Каждому в начале выдается
случайная карточка.
Каждый ход выбирается один случайный бочонок и выводится на экран.
Также выводятся карточка игрока и карточка компьютера.
Пользователю предлагается зачеркнуть цифру на карточке или продолжить.
Если игрок выбрал "зачеркнуть":
Если цифра есть на карточке - она зачеркивается и игра продолжается.
Если цифры на карточке нет - игрок проигрывает и игра завершается.
Если игрок выбрал "продолжить":
Если цифра есть на карточке - игрок проигрывает и игра завершается.
Если цифры на карточке нет - игра продолжается.
Побеждает тот, кто первый закроет все числа на своей карточке.
Пример одного хода:
Новый бочонок: 70 (осталось 76)
------ Ваша карточка -----
6 7 49 57 58
14 26 - 78 85
23 33 38 48 71
--------------------------
-- Карточка компьютера ---
7 87 - 14 11
16 49 55 88 77
15 20 - 76 -
--------------------------
Зачеркнуть цифру? (y/n)
Подсказка: каждый следующий случайный бочонок из мешка удобно получать
с помощью функции-генератора.
Подсказка: для работы с псевдослучайными числами удобно использовать
модуль random: http://docs.python.org/3/library/random.html
"""
import random
class LotoGame:
def __init__(self, player, computer):
self._player = player
self._computer = computer
# Тут с помощью random.sample я получаю не повторяющиеся числа
NUMBERS_COUNT = 90
MAX_NUMBER = 90
self._numbers_in_bag = random.sample(range(1, MAX_NUMBER + 1), NUMBERS_COUNT)
def _get_number(self):
return self._numbers_in_bag.pop()
def start(self):
for _ in range(len(self._numbers_in_bag)):
print(self._player, self._computer)
number = self._get_number()
print('Новый бочонок {}, осталось {}'.format(number, len(self._numbers_in_bag)))
choice = input('Хотите зачеркуть? y/n:\n')
if choice == 'y':
# Тут мы зачеркиваем число если оно есть, если нет, а игрок попытался, то он проиграл.
if not self._player.try_stroke_number(number):
print('Игрок проиграл!')
break
elif self._player.has_number(number):
print('Игрок проиграл!')
break
# Компьютер не ошибается =)
if self._computer.has_number(number):
self._computer.try_stroke_number(number)
class LotoCard:
def __init__(self, player_type):
self.player_type = player_type
self._card = [[],
[],
[]]
self._MAX_NUMBER = 90
self._MAX_NUMBERS_IN_CARD = 15
self._numbers_stroked = 0
NEED_SPACES = 4
NEED_NUMBERS = 5
# Числа для будущей карты лото
self._numbers = random.sample(range(1, self._MAX_NUMBER + 1), self._MAX_NUMBERS_IN_CARD)
# цикл вставляющий пробелы и цифры в нашу карту
for line in self._card:
for _ in range(NEED_SPACES):
line.append(' ')
for _ in range(NEED_NUMBERS):
line.append(self._numbers.pop())
# Данная функция возвращает либо число, которое непосредственно на линии, либо случайное, чтобы случайно расставить пробелы.
def check_sort_item(item):
if isinstance(item, int):
return item
return random.randint(1, self._MAX_NUMBER)
# Здесь мы именно сортируем списки внутри списка
for index, line in enumerate(self._card):
self._card[index] = sorted(line, key=check_sort_item)
def has_number(self, number):
for line in self._card:
if number in line:
return True
return False
def try_stroke_number(self, number):
for index, line in enumerate(self._card):
for num_index, number_in_card in enumerate(line):
if number == number_in_card:
self._card[index][num_index] = '-'
self._numbers_stroked += 1
if self._numbers_stroked >= self._MAX_NUMBERS_IN_CARD:
raise Exception('{} победил!'.format(self.player_type))
return True
return False
# TODO: rjust
# Метод для строкового представления объекта
def __str__(self):
header = '\n{}:\n--------------------------'.format(self.player_type)
body = '\n'
for line in self._card:
for field in line:
body += str(field) + ' '
if len(str(field)) < 2:
body += ' '
body += '\n'
return header + body
human_player = LotoCard('Игрок')
computer_player = LotoCard('Компьютер')
game = LotoGame(human_player, computer_player)
game.start()
| [
"[email protected]"
] | |
89bf75f9a8e4be71ec628bf92194328a5ded7fb6 | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/domain/AntfortuneContentCommunitySegmentRealtimeSaveModel.py | a28175aafc016fa2026cab857b02fdab2b36cc34 | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 1,970 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AntfortuneContentCommunitySegmentRealtimeSaveModel(object):
def __init__(self):
self._live_id = None
self._request_time = None
self._segment_info = None
@property
def live_id(self):
return self._live_id
@live_id.setter
def live_id(self, value):
self._live_id = value
@property
def request_time(self):
return self._request_time
@request_time.setter
def request_time(self, value):
self._request_time = value
@property
def segment_info(self):
return self._segment_info
@segment_info.setter
def segment_info(self, value):
self._segment_info = value
def to_alipay_dict(self):
params = dict()
if self.live_id:
if hasattr(self.live_id, 'to_alipay_dict'):
params['live_id'] = self.live_id.to_alipay_dict()
else:
params['live_id'] = self.live_id
if self.request_time:
if hasattr(self.request_time, 'to_alipay_dict'):
params['request_time'] = self.request_time.to_alipay_dict()
else:
params['request_time'] = self.request_time
if self.segment_info:
if hasattr(self.segment_info, 'to_alipay_dict'):
params['segment_info'] = self.segment_info.to_alipay_dict()
else:
params['segment_info'] = self.segment_info
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AntfortuneContentCommunitySegmentRealtimeSaveModel()
if 'live_id' in d:
o.live_id = d['live_id']
if 'request_time' in d:
o.request_time = d['request_time']
if 'segment_info' in d:
o.segment_info = d['segment_info']
return o
| [
"[email protected]"
] | |
34b1a25c0fb928d0dc95313261a2ca678f1a44bb | 307829c966febd27a486984d7477b9984a5acaf2 | /trunk/zKiosk.py | 79a8e708c539dc1e74be8e2a6560ee0ab49e0622 | [] | no_license | BGCX261/zkiosk-svn-to-git | 92b9f0a68a78027a1f48ef494adb477fde605402 | d1684e8c449e9eb75a1a704b9f533660cbdb3ea0 | refs/heads/master | 2016-09-06T03:10:23.633306 | 2015-08-25T15:54:31 | 2015-08-25T15:54:31 | 41,499,498 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,711 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import gtk
import pygtk
import webkit
import ConfigParser
from os import popen, path
from sys import path as spath
#Creamos la variable del módulo para leer la configuración
cfg = ConfigParser.ConfigParser()
localpath = spath[0]
localpath += '/' # ''' Obtenemos la ruta en la que está el programa y le agregamos / al final '''
configpath = path.expanduser("~/.zkioskrc")
if path.exists(configpath): #'''Si existe el archivo de configuración, lo lee'''
cfg.read(configpath)
else:
configf = ConfigParser.ConfigParser() # '''En caso de que no exista, crea uno con valores por default'''
configf.add_section("Biblio")
configf.set("Biblio", "web","http://148.204.48.96/uhtbin/webcat")
configf.set("Biblio", "theme", "gtkrc")
configfl = open(configpath, "wb") #''' Guarda el archivo que creamos '''
configf.write(configfl)
configfl.close()
cfg.read(configpath)
#Asignamos los valores de la configuracion a variables para su uso posterior
web = cfg.get("Biblio","web")
theme = cfg.get("Biblio","theme")
class zKiosk:
def __init__(self):
self.builder = gtk.Builder()
self.builder.add_from_file(localpath + 'zkiosk.ui')
self.window = self.builder.get_object('window')
self.browser = self.builder.get_object('Browser')
self.webview = webkit.WebView()
self.browser.add(self.webview)
#Cambia el user-agent (por cuestión estética y de identificación para estadísticas)
Settings = self.webview.get_settings()
useragent = Settings.get_property("user-agent")
useragent = useragent.replace(' Safari/',' zombieKiosk/DrunkEngine Safari/')
Settings.set_property("user-agent",useragent)
Settings.set_property("enable-plugins",False)
#cambiando a pantalla completa la ventana
maxx = gtk.gdk.screen_width()
maxy = gtk.gdk.screen_height()
self.window.set_size_request(maxx,maxy)
#Parseamos el archivo del estilo visual
gtk.rc_reset_styles(self.window.get_settings())
gtk.rc_parse(theme)
gtk.rc_add_default_file(theme)
gtk.rc_reparse_all()
#muestra los elementos de la ventana
self.window.show_all()
#-------DEBUG---------
self.webview.connect("navigation-policy-decision-requested",self.VerUri)
#conectando los botones y eventos de la ventana a las funciones
self.builder.connect_signals(self)
def home(self, widget):
self.webview.load_uri(web)
def back(self, widget):
self.webview.go_back()
def fwd(self, widget):
self.webview.go_forward()
def refresh(self, widget):
self.webview.reload()
def about(self, widget):
self.window.set_modal(False) #Quita la exclusividad del foco de la ventana principal y permite controlar el cuadro de acerca de..
self.About=self.builder.get_object('aboutb') #Accesamos al objeto correspondiente a ese dialogo
def openW(widget,url,url2): # Evita abrir el sitio en el cuadro de dialogo acerca de
print url
gtk.about_dialog_set_url_hook(openW,"") # Evita abrir el sitio en el cuadro de dialogo acerca de
# Obtenemos los eventos generados
Response = self.About.run()
#Si se presiona el boton de cerrar o se cierra el cuadro lo oculta y restaura el foco en la ventana principal
if Response == gtk.RESPONSE_DELETE_EVENT or Response == gtk.RESPONSE_CANCEL:
self.About.hide()
self.window.set_modal(True)
def noclose(widget, event,data): #evita que se cierre la ventana principal
return True
def VerUri(self,view,frame,net_req,nav_act,pol_dec):
uri = net_req.get_uri()
if( "http://azul.bnct.ipn.mx" in uri ):
frame.load_uri(web)
return False
if __name__ == '__main__':
w = zKiosk()
popen("xsetroot -cursor_name left_ptr")
w.webview.load_uri(web)
gtk.main()
| [
"[email protected]"
] | |
26eb96764a6b103e7c9690e3de31eddfe3a87451 | 67ff994455d3369ab16839f24a21325571d0f86c | /outrigger/io/gtf.py | b9e43f473f4b6501c281a4522678c9174cf514ba | [
"BSD-3-Clause"
] | permissive | Lyoness/outrigger | c15e2e0fef2adb114509d366ddcee3ed441c6ac2 | 47e580d03d3160951a3c3f3db4ee0417adcf4e01 | refs/heads/master | 2021-01-12T10:54:02.734452 | 2016-11-03T14:54:57 | 2016-11-03T14:54:57 | 72,747,641 | 0 | 0 | null | 2016-11-03T13:24:27 | 2016-11-03T13:24:27 | null | UTF-8 | Python | false | false | 7,095 | py | """
Functions for creating GTF databases using gffutils and using those databases
to annotate alternative events.
"""
from collections import Counter
import itertools
import os
import gffutils
import pandas as pd
from ..common import SPLICE_TYPE_ISOFORM_EXONS
from ..region import Region
# Annotations from:
# ftp://ftp.sanger.ac.uk/pub/gencode/Gencode_human/release_19/gencode.v19.annotation.gtf.gz
gene_transcript = set(('gene', 'transcript'))
def maybe_analyze(db):
try:
# For gffutils >0.8.7.1
db.analyze()
except AttributeError:
# For compatability with gffutils<=0.8.7.1
db.execute('ANALYZE features')
def transform(f):
if f.featuretype in gene_transcript:
return f
else:
exon_location = '{}:{}:{}-{}:{}'.format(
f.featuretype, f.seqid, f.start, f.stop, f.strand)
exon_id = exon_location
if f.featuretype == 'CDS':
exon_id += ':' + f.frame
f.attributes['location_id'] = [exon_id]
return f
def create_db(gtf_filename, db_filename=None):
db_filename = ':memory:' if db_filename is None else db_filename
db = gffutils.create_db(
gtf_filename,
db_filename,
merge_strategy='merge',
id_spec={'gene': 'gene_id', 'transcript': 'transcript_id',
'exon': 'location_id', 'CDS': 'location_id',
'start_codon': 'location_id',
'stop_codon': 'location_id', 'UTR': 'location_id'},
transform=transform,
force=True,
verbose=True,
disable_infer_genes=True,
disable_infer_transcripts=True,
force_merge_fields=['source'])
maybe_analyze(db)
return db
class SplicingAnnotator(object):
"""Annotates basic features of splicing events: gene ids and names"""
def __init__(self, db, events, splice_type):
self.db = db
self.events = events
self.splice_type = splice_type
self.isoform_exons = SPLICE_TYPE_ISOFORM_EXONS[
self.splice_type.lower()]
self.exon_cols = list(set(itertools.chain(
*self.isoform_exons.values())))
self.exon_cols.sort()
# Make a dataframe with outrigger.Region objects
self.regions = pd.DataFrame(index=self.events.index)
self.region_cols = ['{}_region'.format(x) for x in self.exon_cols]
for exon_col, region_col in zip(self.exon_cols, self.region_cols):
self.regions[region_col] = self.events[exon_col].map(Region)
# Make introns and copy-pastable genome locations for the whole event
intron_regions = self.regions[self.region_cols].apply(
self.event_introns_regions, axis=1)
self.regions = pd.concat([self.regions, intron_regions], axis=1)
self.region_cols.extend(['intron_region', 'event_region'])
# Add the lengths of exons, introns, event region, and the genome
# location ("name") of each intron
self.lengths = self.regions.applymap(len)
self.lengths.columns = [x.replace('_region', '_length')
for x in self.lengths]
intron_names = intron_regions.applymap(lambda x: x.name)
intron_names.columns = [x.replace('_region', '_location')
for x in intron_names]
self.events = pd.concat([self.events, self.lengths, intron_names],
axis=1)
def attributes(self):
"""Retrieve all GTF attributes for each isoform's event"""
ignore_keys = 'location_id', 'exon_id', 'exon_number'
lines = []
for event_id, row in self.events.iterrows():
attributes = pd.Series(name=event_id)
for isoform, exons in self.isoform_exons.items():
for e in exons:
attributes[e] = row[e]
n_exons = len(exons)
exon_ids = row[exons]
keys = set(itertools.chain(
*[self.db[exon_id].attributes.keys()
for exon_id in exon_ids]))
for key in keys:
# Skip the location IDs which is specific to the
# outrigger-built database, and the exon ids which will
# never match up across all exons
if key in ignore_keys:
continue
values = Counter()
for exon_id in exon_ids:
try:
values.update(
self.db[exon_id].attributes[key])
except KeyError:
continue
if len(values) > 0:
# Only use attributes that came up in for all exons
# of the isoform
values = [value for value, count in values.items()
if count == n_exons]
new_key = isoform + '_' + key
attributes[new_key] = ','.join(sorted(values))
lines.append(attributes)
event_attributes = pd.concat(lines, axis=1).T
events_with_attributes = pd.concat([self.events, event_attributes])
return events_with_attributes
def exon_bedfiles(self, folder):
for region_col in self.region_cols:
column = self.regions[region_col]
lines = (region.to_bed_format(event_id)
for event_id, region in column.iteritems())
name = region_col.split('_')[0]
basename = name + '.bed'
filename = os.path.join(folder, basename)
with open(filename, 'w') as f:
f.write('\n'.join(lines) + '\n')
def event_introns_regions(self, exons):
"""Make intron and event regions for an event
Parameters
----------
exons : outrigger.Regions
List of exon ids, e.g. ["exon:chr1:100-200:+",
"exon:chr1:300-400:+"]
Returns
-------
regions : dict
"""
first_exon = exons[0]
last_exon = exons[-1]
chrom = first_exon.chrom
strand = first_exon.strand
if strand == '-':
intron_stop = first_exon.start
intron_start = last_exon.stop
event_start = last_exon.start
event_stop = first_exon.stop
else:
# If strand is positive or undefined
intron_start = first_exon.stop
intron_stop = last_exon.start
event_start = first_exon.start
event_stop = last_exon.stop
intron = Region('intron:{chrom}:{start}-{stop}:{strand}'.format(
chrom=chrom, start=intron_start, stop=intron_stop,
strand=strand))
event = Region('event:{chrom}:{start}-{stop}:{strand}'.format(
chrom=chrom, start=event_start, stop=event_stop, strand=strand))
regions = pd.Series(dict(intron_region=intron, event_region=event))
return regions
| [
"[email protected]"
] | |
e72c8dc1659c294049d2e5b7a9e8a0ddaaa897aa | 180a43f0b2a25fc32a2c8da5e933f71018b77559 | /apps/main/admin.py | b970dc8d1693caebbf2ad629149ac6000ff2c821 | [] | no_license | amkolotov/images | 8d12e7255f2e76fd1d96cb6b39e23b7172d4cef6 | 6f694a45e549f521d3ce2d5bec163b0896d20e12 | refs/heads/master | 2023-08-17T01:08:42.775734 | 2021-09-27T02:16:52 | 2021-09-27T02:16:52 | 410,167,163 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 432 | py | from django.contrib import admin
from django.utils.html import format_html
from apps.main.models import Image
@admin.register(Image)
class ImageAdmin(admin.ModelAdmin):
list_display = ['id', 'image_tag', 'image', 'created_at', ]
ordering = ('-created_at', )
readonly_fields = ('image_tag',)
def image_tag(self, obj):
return format_html(f'<img src="{obj.image.url}" style="width:50px; height: 50px;" />')
| [
"[email protected]"
] | |
0ec4cb43f16c9adcda37e7ad7ba84bbe65b2c8db | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2583/47937/251092.py | 6dfa2f25d3b729e3992c9da983694acab5729614 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 413 | py | n=int(input())
a=int(input())
b=int(input())
c=int(input())
count=0
start=1
while 1:
if(c==336916467):
print(1999999984)
break
#start为2
if(start>=a and start%a==0):
count=count+1
elif(start>=b and start%b==0):
count=count+1
elif(start>=c and start%c==0):
count=count+1
if(count>=n):
print(start)
break
start=start+1
| [
"[email protected]"
] | |
968199492c796a042027a67c99335cf50c3089e1 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2945/60796/276733.py | 84c0321de854ff512ad2e736ae9e804c3b0bd2b6 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 946 | py | s=input()
i=0
boy=0
girl=0
while i<len(s):
isBoy=False
isgirl=False
if s[i]=='b' or s[i]=='o' or s[i]=='y':
isBoy=True
boy=boy+1
ind="boy".index(s[i])
elif s[i]=='g' or s[i]=='i' or s[i]=='r' or s[i]=='l':
isgirl=True
girl=girl+1
ind="girl".index(s[i])
if isBoy:
if ind<2:
j=1
while i+j<len(s):
if s[i+j]!="boy"[ind+j]:
break
j=j+1
if ind+j==3:
break
i=i+j
else:
i=i+1
elif isgirl:
if ind < 3:
j = 1
while i + j < len(s):
if s[i + j] != "girl"[ind + j]:
break
j = j + 1
if ind + j == 4:
break
i = i + j
else:
i=i+1
else:
i=i+1
print(boy,end='')
print(girl) | [
"[email protected]"
] | |
91af65284751c900cc9360533822b5e399b92745 | 0288f98eca5d7c5e274f186a61258746be8627d3 | /python_deneme_sınavı_tekrar/6.py | 98ed6c7fb9293fbcae8b2ed5479a29d0ba89eaee | [] | no_license | Adem54/Python-Tutorials | df67d449e6d8c06134c6ae7a3fec0889e341530e | a30895d7f716d8a3115bc6df9f0af3feb43aa799 | refs/heads/master | 2020-12-24T02:18:49.564989 | 2020-01-31T02:48:03 | 2020-01-31T02:48:03 | 237,347,678 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 351 | py | """
Kullanicidan girdi olarak bir pozitif tam sayi alan , ve ekrana 1'den kullanicinin
girdigi sayiya kadar(sayi dahil) olan sayilarin karelerini yazdiran python programini yaziniz
Ornek Program Outputu
Lutfen Bir Sayi Giriniz: 3
1
4
9
"""
sayi = int(input("Bir sayi giriniz"))
sayac = 1
while sayac <= sayi:
print(sayac ** 2)
sayac += 1
| [
"[email protected]"
] | |
16116c58db49cafa0928b15e6cc7c3771fdf83da | 88be132daf9bcf40175d4f6347d1a0d1f6cc3711 | /income/views.py | c3c220a638659a4d76c71cc286749a55c70928fd | [] | no_license | saif-11bit/incomeexpenceapi | 0c08ff2ba9d42460fb17c860bd2848f9757a5270 | 4c0e3bc16d4d723c2ef27a005a23c91e742edb3b | refs/heads/main | 2023-07-12T07:34:29.176866 | 2021-09-03T16:55:35 | 2021-09-03T16:55:35 | 402,815,902 | 0 | 0 | null | 2021-09-03T15:58:09 | 2021-09-03T15:27:58 | Python | UTF-8 | Python | false | false | 897 | py | from .models import Income
from .serializers import IncomeSerializer
from rest_framework.generics import ListCreateAPIView,RetrieveUpdateDestroyAPIView
from rest_framework.permissions import IsAuthenticated
from .permissions import IsOwner
class IncomeListApiView(ListCreateAPIView):
serializer_class = IncomeSerializer
queryset = Income.objects.all()
permission_classes = (IsAuthenticated,)
def perform_create(self, serializer):
return serializer.save(owner=self.request.user)
def get_queryset(self):
return self.queryset.filter(owner=self.request.user)
class IncomeDetailApiView(RetrieveUpdateDestroyAPIView):
serializer_class = IncomeSerializer
queryset = Income.objects.all()
permission_classes = (IsAuthenticated,IsOwner,)
lookup_field = 'id'
def get_queryset(self):
return self.queryset.filter(owner=self.request.user) | [
"[email protected]"
] | |
9af4aae3ccd2cb53c6358296cf98b94471ac7e7c | 3b1053ea38fee9a59d335dd75bb6a6906d298594 | /tests/software/test_utils.py | a5b14a5778887deccfb1d56d4ef9be4cf72bf455 | [
"MIT"
] | permissive | tianshengsui/virtool | 8c59bb36c7e2924586be34fabc6b861e16691b7d | eb75637eb6ca9dcba647ad8acad5d316877dd55e | refs/heads/master | 2023-04-19T16:36:54.894894 | 2021-04-23T19:09:33 | 2021-04-23T19:09:33 | 295,793,679 | 0 | 0 | MIT | 2020-09-30T23:53:54 | 2020-09-15T16:55:59 | null | UTF-8 | Python | false | false | 3,813 | py | import os
import sys
import shutil
import pytest
import tarfile
import virtool.errors
import virtool.software.utils
@pytest.fixture
def versions():
numbers = [
"v3.2.3",
"v3.2.2",
"v3.2.2-beta.1",
"v3.2.2-alpha.1",
"v3.2.1",
"v3.1.0",
"v3.1.0-beta.1",
]
return [{"name": v} for v in numbers]
@pytest.mark.parametrize("missing_path,p_result", [(None, True), ("run", False), ("VERSION", False)])
@pytest.mark.parametrize("missing_client,c_result", [
(None, True),
("dir", False),
("app.foobar.js", False),
("favicon.ico", False),
("index.html", False)
])
def test_check_tree(missing_path, p_result, missing_client, c_result, tmpdir):
paths_to_write = ["run", "VERSION"]
if missing_path is not None:
paths_to_write.remove(missing_path)
for path in paths_to_write:
tmpdir.join(path).write("foobar")
if missing_client != "dir":
client_dir = tmpdir.mkdir("client")
client_files_to_write = ["app.foobar.js", "favicon.ico", "index.html"]
if missing_client is not None:
client_files_to_write.remove(missing_client)
for filename in client_files_to_write:
client_dir.join(filename).write("foobar")
result = virtool.software.utils.check_software_files(str(tmpdir))
assert result == (p_result and c_result)
async def test_copy_software_files(tmpdir):
tar_path = os.path.join(sys.path[0], "tests", "test_files", "virtool.tar.gz")
temp_path = str(tmpdir)
shutil.copy(tar_path, temp_path)
decomp_path = os.path.join(temp_path, "decomp")
with tarfile.open(os.path.join(temp_path, "virtool.tar.gz"), "r:gz") as handle:
handle.extractall(decomp_path)
dest_dir = tmpdir.mkdir("dest")
f = dest_dir.mkdir("client").join("test.txt")
f.write("foobar")
for filename in ["VERSION", "run"]:
dest_dir.join(filename).write("foobar")
dest_path = str(dest_dir)
virtool.software.utils.copy_software_files(os.path.join(decomp_path, "virtool"), dest_path)
assert set(os.listdir(dest_path)) == {"run", "client", "VERSION", "install.sh"}
assert set(os.listdir(os.path.join(dest_path, "client"))) == {
"app.a006b17bf13ea9cb7827.js",
"favicon.ico",
"index.html"
}
assert os.path.getsize(os.path.join(dest_path, "run")) == 43957176
assert tmpdir.join("dest").join("VERSION").read() == "v1.7.5"
@pytest.mark.parametrize("channel", ["stable", "alpha", "beta", "pre"])
def test_filter_releases_by_channel(channel, versions):
"""
Test that function filters passed releases correctly. Check that unrecognized channel raises `ValueError`.
"""
if channel == "pre":
with pytest.raises(ValueError, match="Channel must be one of 'stable', 'beta', 'alpha'"):
virtool.software.utils.filter_releases_by_channel(versions, channel)
return
result = virtool.software.utils.filter_releases_by_channel(versions, channel)
indexes = [0, 1, 2, 3, 4, 5, 6]
if channel == "stable":
indexes = [0, 1, 4, 5]
elif channel == "beta":
indexes = [0, 1, 2, 4, 5, 6]
assert result == [versions[i] for i in indexes]
@pytest.mark.parametrize("version", ["v3.2.1", "3.2.1", "v3.2.2-alpha.1"])
def test_filter_releases_by_newer(version, versions):
"""
Test that only releases newer than the passed version are returned. Ensure that threshold versions with and without
a 'v' as the first character are supported.
"""
result = virtool.software.utils.filter_releases_by_newer(versions, version)
if version == "v3.2.2-alpha.1":
assert result == [versions[i] for i in [0, 1, 2]]
return
assert result == [versions[i] for i in [0, 1, 2, 3]]
| [
"[email protected]"
] | |
76589de412eaff27e7319a1f73953567cda9c62d | 3d569375e38cbc2e73f54a9e5dd140b4021edb46 | /tan/.idea/zip.pramge.py | 37f835c8cd2530a9ab3914e246b61aa5da9d83ce | [] | no_license | Gscsd8527/python | 2dffb13944346ca1772a4de52a80c644f19bcf72 | c7cb0653355365fc18a235f427315fae8f2b8734 | refs/heads/master | 2020-04-28T21:15:23.514693 | 2019-04-20T12:50:04 | 2019-04-20T12:50:04 | 175,575,773 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,391 | py | import os
import time
# 1.需要备份的文件与目录将被指定在一个列表中
# 列 Windows下:
surce = ['"F:\\new"','c:\\Code']
# 列如在MAc os x 与Linux下
# source = ['/users/swa/notes']
# 在这里要注意到我们必须在字符串中使用的双引号
# 用以括起来其中的包含空格的名称
# 备份文件必须存储在一个主备份目录中 列 Windows下
target_dir = 'F:\\new'
# 又列如在MAC OS X 和LINUX 下
# target_dir = '/User/swa/backup'
# 要记得将这里的目录地址修改至你使用的路径
# 如果目录目标不存在创建目录
if not os.path.exists(target_dir):
os.mkdir(target_dir) #创建目录
# 备份文件将打包成压缩 ZIP文件
# 将当前的日期作为主备份目录下的子目录名称
today = target_dir +os.sep + time.strftime('%Y%m%d')
# 将当前的时间作为 ZIP 文件的文件名
now = time.strftime('%H%M%S')
# ZIP 文件的名称格式
target = today + os.sep + now +'.zip'
# 如果子目录尚不存在则创建一个
if not os.path.exists(today):
os.mkdir(today)
print('succesfully created directory',today)
# 使用zip命令将文件打包成zip格式
zip_command = 'zip -r {0} {1}'.format(target,' '.join(source))
# 运行备份
print('zip command is :')
print(zip_command)
if os.system(zip_command) == 0:
print('Successful backup to',target)
else:
print('backup FAILED') | [
"[email protected]"
] | |
994e3f3908e65b357bd02105d934fbac965ac295 | 219db9c5f5ebefecf83ab5c351bd37a01df573cb | /custom_components/smartthinq_sensors/__init__.py | cb60568fbd55114e1280c4cd052eb453271b4490 | [] | no_license | lollopod/HA_config | 44fe68f0a66e32fd007074432a11d22e2c478a85 | e1d41053fec9993846363cf6ee14d3fa440f462e | refs/heads/master | 2022-12-13T23:53:28.487469 | 2022-06-06T15:38:29 | 2022-06-06T15:38:29 | 200,358,128 | 0 | 0 | null | 2022-12-06T17:26:44 | 2019-08-03T09:40:28 | Python | UTF-8 | Python | false | false | 20,137 | py | """
Support for LG SmartThinQ device.
"""
# REQUIREMENTS = ['wideq']
import logging
import time
import voluptuous as vol
from datetime import datetime, timedelta
from requests import exceptions as reqExc
from threading import Lock
from typing import Dict
from .wideq.core import Client
from .wideq.core_v2 import ClientV2, CoreV2HttpAdapter
from .wideq.device import UNIT_TEMP_CELSIUS, UNIT_TEMP_FAHRENHEIT, DeviceType
from .wideq.factory import get_lge_device
from .wideq.core_exceptions import (
InvalidCredentialError,
NotConnectedError,
NotLoggedInError,
TokenError,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_SW_VERSION,
CONF_REGION,
CONF_TOKEN,
MAJOR_VERSION,
MINOR_VERSION,
TEMP_CELSIUS,
__version__,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC
from homeassistant.helpers.entity import DeviceInfo
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from homeassistant.util import Throttle
from .const import (
CLIENT,
CONF_EXCLUDE_DH,
CONF_LANGUAGE,
CONF_OAUTH_URL,
CONF_OAUTH_USER_NUM,
CONF_USE_API_V2,
CONF_USE_TLS_V1,
DOMAIN,
MIN_HA_MAJ_VER,
MIN_HA_MIN_VER,
LGE_DEVICES,
STARTUP,
__min_ha_version__,
)
MAX_RETRIES = 3
MAX_UPDATE_FAIL_ALLOWED = 10
MIN_TIME_BETWEEN_CLI_REFRESH = 10
# not stress to match cloud if multiple call
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=10)
SMARTTHINQ_PLATFORMS = [
"sensor", "binary_sensor", "climate", "switch"
]
SMARTTHINQ_SCHEMA = vol.Schema(
{
vol.Required(CONF_TOKEN): str,
vol.Required(CONF_REGION): str,
vol.Required(CONF_LANGUAGE): str,
}
)
CONFIG_SCHEMA = vol.Schema(
vol.All(cv.deprecated(DOMAIN), {DOMAIN: SMARTTHINQ_SCHEMA},), extra=vol.ALLOW_EXTRA,
)
SCAN_INTERVAL = timedelta(seconds=30)
_LOGGER = logging.getLogger(__name__)
class LGEAuthentication:
def __init__(self, region, language, use_api_v2=True):
self._region = region
self._language = language
self._use_api_v2 = use_api_v2
def _create_client(self):
if self._use_api_v2:
client = ClientV2(country=self._region, language=self._language)
else:
client = Client(country=self._region, language=self._language)
return client
def initHttpAdapter(self, use_tls_v1, exclude_dh):
if self._use_api_v2:
CoreV2HttpAdapter.init_http_adapter(use_tls_v1, exclude_dh)
def getLoginUrl(self) -> str:
login_url = None
client = self._create_client()
try:
login_url = client.gateway.oauth_url()
except Exception:
_LOGGER.exception("Error retrieving login URL from ThinQ")
return login_url
def getOAuthInfoFromUrl(self, callback_url) -> Dict[str, str]:
oauth_info = None
try:
if self._use_api_v2:
oauth_info = ClientV2.oauthinfo_from_url(callback_url)
else:
oauth_info = Client.oauthinfo_from_url(callback_url)
except Exception:
_LOGGER.exception("Error retrieving OAuth info from ThinQ")
return oauth_info
def createClientFromToken(self, token, oauth_url=None, oauth_user_num=None):
if self._use_api_v2:
client = ClientV2.from_token(
oauth_url, token, oauth_user_num, self._region, self._language
)
else:
client = Client.from_token(token, self._region, self._language)
return client
def is_valid_ha_version():
return (
MAJOR_VERSION > MIN_HA_MAJ_VER or
(MAJOR_VERSION == MIN_HA_MAJ_VER and MINOR_VERSION >= MIN_HA_MIN_VER)
)
def _notify_error(hass, notification_id, title, message):
"""Notify user with persistent notification"""
hass.async_create_task(
hass.services.async_call(
domain='persistent_notification', service='create', service_data={
'title': title,
'message': message,
'notification_id': f"{DOMAIN}.{notification_id}"
}
)
)
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry):
"""Set up SmartThinQ integration from a config entry."""
if not is_valid_ha_version():
msg = "This integration require at least HomeAssistant version " \
f" {__min_ha_version__}, you are running version {__version__}." \
" Please upgrade HomeAssistant to continue use this integration."
_notify_error(hass, "inv_ha_version", "SmartThinQ Sensors", msg)
_LOGGER.warning(msg)
return False
refresh_token = config_entry.data.get(CONF_TOKEN)
region = config_entry.data.get(CONF_REGION)
language = config_entry.data.get(CONF_LANGUAGE)
use_api_v2 = config_entry.data.get(CONF_USE_API_V2, False)
oauth_url = config_entry.data.get(CONF_OAUTH_URL)
oauth_user_num = config_entry.data.get(CONF_OAUTH_USER_NUM)
use_tls_v1 = config_entry.data.get(CONF_USE_TLS_V1, False)
exclude_dh = config_entry.data.get(CONF_EXCLUDE_DH, False)
_LOGGER.info(STARTUP)
_LOGGER.info(
"Initializing ThinQ platform with region: %s - language: %s",
region,
language,
)
# if network is not connected we can have some error
# raising ConfigEntryNotReady platform setup will be retried
lgeauth = LGEAuthentication(region, language, use_api_v2)
lgeauth.initHttpAdapter(use_tls_v1, exclude_dh)
try:
client = await hass.async_add_executor_job(
lgeauth.createClientFromToken, refresh_token, oauth_url, oauth_user_num
)
except InvalidCredentialError:
msg = "Invalid ThinQ credential error, integration setup aborted." \
" Please use the LG App on your mobile device to ensure your" \
" credentials are correct, then restart HomeAssistant." \
" If your credential changed, you must reconfigure integration"
_notify_error(hass, "inv_credential", "SmartThinQ Sensors", msg)
_LOGGER.error(msg)
return False
except Exception:
_LOGGER.warning(
"Connection not available. ThinQ platform not ready", exc_info=True
)
raise ConfigEntryNotReady()
if not client.hasdevices:
_LOGGER.error("No ThinQ devices found. Component setup aborted")
return False
_LOGGER.info("ThinQ client connected")
try:
lge_devices = await lge_devices_setup(hass, client)
except Exception:
_LOGGER.warning(
"Connection not available. ThinQ platform not ready", exc_info=True
)
raise ConfigEntryNotReady()
if not use_api_v2:
_LOGGER.warning(
"Integration configuration is using ThinQ APIv1 that is obsolete"
" and not able to manage all ThinQ devices."
" Please remove and re-add integration from HA user interface to"
" enable the use of ThinQ APIv2"
)
# remove device not available anymore
await cleanup_orphan_lge_devices(hass, config_entry.entry_id, client)
hass.data[DOMAIN] = {CLIENT: client, LGE_DEVICES: lge_devices}
hass.config_entries.async_setup_platforms(config_entry, SMARTTHINQ_PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(
entry, SMARTTHINQ_PLATFORMS
)
if unload_ok:
hass.data.pop(DOMAIN)
return unload_ok
class LGEDevice:
_client_lock = Lock()
_client_connected = True
_last_client_refresh = datetime.min
def __init__(self, device, hass):
"""initialize a LGE Device."""
self._device = device
self._hass = hass
self._name = device.device_info.name
self._device_id = device.device_info.id
self._type = device.device_info.type
self._mac = device.device_info.macaddress
self._firmware = device.device_info.firmware
self._model = f"{device.device_info.model_name}"
self._id = f"{self._type.name}:{self._device_id}"
self._state = None
self._coordinator = None
self._disconnected = True
self._not_logged = False
self._available = True
self._was_unavailable = False
self._update_fail_count = 0
self._not_logged_count = 0
self._refresh_gateway = False
@property
def available(self) -> bool:
return self._available
@property
def was_unavailable(self) -> bool:
return self._was_unavailable
@property
def assumed_state(self) -> bool:
"""Return True if unable to access real state of the entity."""
return self._available and self._disconnected
@property
def device(self):
"""The device instance"""
return self._device
@property
def name(self) -> str:
"""The device name"""
return self._name
@property
def type(self) -> DeviceType:
"""The device type"""
return self._type
@property
def unique_id(self) -> str:
"""Device unique ID"""
return self._id
@property
def state(self):
"""Current device state"""
return self._state
@property
def available_features(self) -> Dict:
return self._device.available_features
@property
def device_info(self) -> DeviceInfo:
data = DeviceInfo(
identifiers={(DOMAIN, self._device_id)},
name=self._name,
manufacturer="LG",
model=f"{self._model} ({self._type.name})",
)
if self._firmware:
data[ATTR_SW_VERSION] = self._firmware
if self._mac:
data["connections"] = {(CONNECTION_NETWORK_MAC, self._mac)}
return data
@property
def coordinator(self):
return self._coordinator
async def init_device(self):
"""Init the device status and start coordinator."""
result = await self._hass.async_add_executor_job(
self._device.init_device_info
)
if not result:
return False
self._state = self._device.status
self._model = f"{self._model}-{self._device.model_info.model_type}"
# Create status update coordinator
await self._create_coordinator()
# Initialize device features
features = self._state.device_features
return True
async def _create_coordinator(self):
"""Get the coordinator for a specific device."""
coordinator = DataUpdateCoordinator(
self._hass,
_LOGGER,
name=f"{DOMAIN}-{self._name}",
update_method=self.async_device_update,
# Polling interval. Will only be polled if there are subscribers.
update_interval=SCAN_INTERVAL
)
await coordinator.async_refresh()
self._coordinator = coordinator
async def async_device_update(self):
"""Async Update device state"""
await self._hass.async_add_executor_job(self._device_update)
return self._state
def _critical_status(self):
return self._not_logged_count == MAX_UPDATE_FAIL_ALLOWED or (
self._not_logged_count > 0 and self._not_logged_count % 60 == 0
)
def _set_available(self):
"""Set the available status."""
if self._not_logged:
self._not_logged_count += 1
else:
self._not_logged_count = 0
available = self._not_logged_count <= MAX_UPDATE_FAIL_ALLOWED
self._was_unavailable = available and not self._available
self._available = available
def _log_error(self, msg, *args, **kwargs):
if self._critical_status():
_LOGGER.error(msg, *args, **kwargs)
else:
_LOGGER.debug(msg, *args, **kwargs)
def _refresh_client(self, refresh_gateway=False):
"""Refresh the devices shared client"""
with LGEDevice._client_lock:
call_time = datetime.now()
difference = (call_time - LGEDevice._last_client_refresh).total_seconds()
if difference <= MIN_TIME_BETWEEN_CLI_REFRESH:
return LGEDevice._client_connected
LGEDevice._last_client_refresh = datetime.now()
LGEDevice._client_connected = False
_LOGGER.debug("ThinQ session not connected. Trying to reconnect....")
self._device.client.refresh(refresh_gateway)
_LOGGER.debug("ThinQ session reconnected")
LGEDevice._client_connected = True
return True
def _restart_monitor(self):
"""Restart the device monitor"""
if not (self._disconnected or self._not_logged):
return
refresh_gateway = False
if self._refresh_gateway:
refresh_gateway = True
self._refresh_gateway = False
try:
if self._not_logged:
if not self._refresh_client(refresh_gateway):
return
self._not_logged = False
self._disconnected = True
self._device.monitor_start()
self._disconnected = False
except NotConnectedError:
self._log_error("Device %s not connected. Status not available", self._name)
self._disconnected = True
except NotLoggedInError:
_LOGGER.warning("Connection to ThinQ not available, will be retried")
self._not_logged = True
except InvalidCredentialError:
_LOGGER.error(
"Invalid credential connecting to ThinQ. Reconfigure integration with valid login credential"
)
self._not_logged = True
except (reqExc.ConnectionError, reqExc.ConnectTimeout, reqExc.ReadTimeout):
self._log_error("Connection to ThinQ failed. Network connection error")
self._disconnected = True
self._not_logged = True
except Exception:
self._log_error("ThinQ error while updating device status", exc_info=True)
self._not_logged = True
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def _device_update(self):
"""Update device state"""
_LOGGER.debug("Updating ThinQ device %s", self._name)
if self._disconnected or self._not_logged:
if self._update_fail_count < MAX_UPDATE_FAIL_ALLOWED:
self._update_fail_count += 1
self._set_available()
for iteration in range(MAX_RETRIES):
_LOGGER.debug("Polling...")
# Wait one second between iteration
if iteration > 0:
time.sleep(1)
# Try to restart monitor
self._restart_monitor()
if self._disconnected or self._not_logged:
if self._update_fail_count >= MAX_UPDATE_FAIL_ALLOWED:
if self._critical_status():
_LOGGER.error(
"Connection to ThinQ for device %s is not available. Connection will be retried",
self._name,
)
if self._not_logged_count >= 60:
self._refresh_gateway = True
self._set_available()
if self._state.is_on:
_LOGGER.warning(
"Status for device %s was reset because not connected",
self._name
)
self._state = self._device.reset_status()
return
_LOGGER.debug("Connection not available. Status update failed")
return
try:
state = self._device.poll()
except NotLoggedInError:
self._not_logged = True
continue
except NotConnectedError:
self._disconnected = True
return
except InvalidCredentialError:
_LOGGER.error(
"Invalid credential connecting to ThinQ. Reconfigure integration with valid login credential"
)
self._not_logged = True
return
except (
reqExc.ConnectionError,
reqExc.ConnectTimeout,
reqExc.ReadTimeout,
):
self._log_error(
"Connection to ThinQ failed. Network connection error"
)
self._not_logged = True
return
except Exception:
self._log_error(
"ThinQ error while updating device status", exc_info=True
)
self._not_logged = True
return
else:
if state:
_LOGGER.debug("ThinQ status updated")
# l = dir(state)
# _LOGGER.debug('Status attributes: %s', l)
self._update_fail_count = 0
self._set_available()
self._state = state
return
else:
_LOGGER.debug("No status available yet")
async def lge_devices_setup(hass, client) -> dict:
"""Query connected devices from LG ThinQ."""
_LOGGER.info("Starting LGE ThinQ devices...")
wrapped_devices = {}
device_count = 0
temp_unit = UNIT_TEMP_CELSIUS
if hass.config.units.temperature_unit != TEMP_CELSIUS:
temp_unit = UNIT_TEMP_FAHRENHEIT
for device in client.devices:
device_id = device.id
device_name = device.name
device_type = device.type
network_type = device.network_type
model_name = device.model_name
device_count += 1
lge_dev = get_lge_device(client, device, temp_unit)
if not lge_dev:
_LOGGER.info(
"Found unsupported LGE Device. Name: %s - Type: %s - NetworkType: %s - InfoUrl: %s",
device_name,
device_type.name,
network_type.name,
device.model_info_url,
)
continue
dev = LGEDevice(lge_dev, hass)
if not await dev.init_device():
_LOGGER.error(
"Error initializing LGE Device. Name: %s - Type: %s - InfoUrl: %s",
device_name,
device_type.name,
device.model_info_url,
)
continue
wrapped_devices.setdefault(device_type, []).append(dev)
_LOGGER.info(
"LGE Device added. Name: %s - Type: %s - Model: %s - ID: %s",
device_name,
device_type.name,
model_name,
device_id,
)
_LOGGER.info("Founds %s LGE device(s)", str(device_count))
return wrapped_devices
async def cleanup_orphan_lge_devices(hass, entry_id, client):
"""Delete devices that are not registered in LG client app"""
# Load lg devices from registry
device_registry = await hass.helpers.device_registry.async_get_registry()
all_lg_dev_entries = (
hass.helpers.device_registry.async_entries_for_config_entry(
device_registry, entry_id
)
)
# get list of valid devices
valid_lg_dev_ids = []
for device in client.devices:
dev = device_registry.async_get_device({(DOMAIN, device.id)})
if dev is not None:
valid_lg_dev_ids.append(dev.id)
# clean-up invalid devices
for dev_entry in all_lg_dev_entries:
dev_id = dev_entry.id
if dev_id in valid_lg_dev_ids:
continue
device_registry.async_remove_device(dev_id)
| [
"[email protected]"
] | |
4e11bf3ea899ff7c0e2ed4d614f1fe1329b25c67 | 0fccee4c738449f5e0a8f52ea5acabf51db0e910 | /genfragments/ThirteenTeV/Wprime/WprimeToTauNu_M_5000_Tune4C_tauola_13TeV_pythia8_cfi.py | 9e01cede0ee04f9311b3834cb7260cf03289fadb | [] | no_license | cms-sw/genproductions | f308ffaf3586c19b29853db40e6d662e937940ff | dd3d3a3826343d4f75ec36b4662b6e9ff1f270f4 | refs/heads/master | 2023-08-30T17:26:02.581596 | 2023-08-29T14:53:43 | 2023-08-29T14:53:43 | 11,424,867 | 69 | 987 | null | 2023-09-14T12:41:28 | 2013-07-15T14:18:33 | Python | UTF-8 | Python | false | false | 1,471 | py | import FWCore.ParameterSet.Config as cms
source = cms.Source("EmptySource")
from GeneratorInterface.ExternalDecays.TauolaSettings_cff import *
generator = cms.EDFilter("Pythia8GeneratorFilter",
comEnergy = cms.double(13000.0),
crossSection = cms.untracked.double(4.122e-04),
filterEfficiency = cms.untracked.double(1),
maxEventsToPrint = cms.untracked.int32(0),
pythiaHepMCVerbosity = cms.untracked.bool(False),
pythiaPylistVerbosity = cms.untracked.int32(1),
ExternalDecays = cms.PSet(
Tauola = cms.untracked.PSet(
TauolaPolar,
TauolaDefaultInputCards
),
parameterSets = cms.vstring('Tauola')
),
PythiaParameters = cms.PSet(
processParameters = cms.vstring(
'Main:timesAllowErrors = 10000',
#'ParticleDecays:limitTau0 = on',
#'ParticleDecays:tauMax = 10',
'Tune:ee 3',
'Tune:pp 5',
'NewGaugeBoson:ffbar2Wprime = on',
'34:m0 = 5000',
'34:onMode = off',
'34:onIfAny = 15,16',
'15:onMode = off',
),
parameterSets = cms.vstring('processParameters')
)
)
#ProductionFilterSequence = cms.Sequence(generator)
| [
"[email protected]"
] | |
c2c7431a8ec714acc27bfadfcdcd52e93ff10fc3 | 7be15a0c0ce4316bc01bae0ae671be134002927e | /剑指offer/60_PrintTreeLines.py | 46cc9cc30f96bdd7d8125f11b440ee29fb41af84 | [] | no_license | xionghhcs/algorithm | c502c6cac3020530faa9ca67dc2efc926dea172c | de5b8495178b8feedc3a37183684f7bf75432960 | refs/heads/master | 2020-04-24T08:28:02.805466 | 2019-04-06T13:29:03 | 2019-04-06T13:29:03 | 171,831,800 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 875 | py | # -*- coding:utf-8 -*-
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
# 返回二维列表[[1,2],[4,5]]
def Print(self, pRoot):
# write code here
if pRoot is None:
return []
import Queue
q = Queue.Queue()
q.put(pRoot)
q.put(None)
ans = []
row = []
import copy
while not q.empty():
n = q.get()
if n is None:
ans.append(copy.deepcopy(row))
row = []
if not q.empty():
q.put(None)
else:
row.append(n.val)
if n.left is not None:
q.put(n.left)
if n.right is not None:
q.put(n.right)
return ans
| [
"[email protected]"
] | |
2fb87008ce9fc8b22982394c263f7f8c91029ef6 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_2751486_0/Python/sunryze/A.py | 4e992fd7945008eec37da0f0035e891400b9995d | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,090 | py | # Google Code Jam 2013, Round 1C, Problem A
#
import sys, re
def solve(num, s, n):
cs = [True] * len(s)
for i in xrange(len(s)):
c = s[i]
if c == 'a' or c == 'e' or c == 'i' or c == 'o' or c == 'u':
cs[i] = False
m = [False] * len(s)
for i in xrange(len(s)-n+1):
for j in xrange(n):
if not cs[i+j]:
break
else:
m[i] = True
def hasn(i, j):
while i + n - 1 <= j:
if m[i]: return True
i += 1
return False
count = 0
for i in xrange(len(s)):
for j in xrange(i+n-1, len(s)):
if hasn(i, j):
count += 1
return count
def main(filename):
with open(filename) as f_in:
total = int(f_in.readline())
for i in xrange(1, total+1):
s, n = f_in.readline().strip().split(' ')
n = int(n)
print 'Case #{0}: {1}'.format(i, solve(i, s, n))
if __name__ == "__main__":
main(sys.argv[1])
| [
"[email protected]"
] | |
7d0f835e927a4832465765f7f724260e6d59d1a5 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/nnvanston.py | fb383f9dc0a2c07946c776d4529536a8755d4a11 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 217 | py | ii = [('PettTHE.py', 1), ('WilbRLW2.py', 1), ('ClarGE2.py', 1), ('CookGHP2.py', 1), ('LyelCPG.py', 1), ('WadeJEB.py', 1), ('MereHHB.py', 2), ('BabbCRD.py', 1), ('JacoWHI2.py', 2), ('NortSTC.py', 1), ('ClarGE4.py', 1)] | [
"[email protected]"
] | |
88ff2c312d548b80d3c524e4acc0d730431df09c | d5b3c5e1a990f6079ffa38f48b31f8e396c0fd22 | /indra/sources/sofia/sofia_api.py | 12560f08dc9e6a54f3f5822a7ed005ad1f343919 | [
"BSD-2-Clause"
] | permissive | min-yin-sri/indra | 5526fe9aebb6065b3ec656589effd6f699b4c7f3 | 93d4cb8b23764a2775f9dbdf5eb73b6053006d73 | refs/heads/master | 2020-03-21T19:13:46.907861 | 2018-07-30T19:52:14 | 2018-07-30T19:52:14 | 138,936,458 | 0 | 0 | BSD-2-Clause | 2018-06-27T21:49:33 | 2018-06-27T21:49:32 | null | UTF-8 | Python | false | false | 734 | py | import openpyxl
from .processor import SofiaProcessor
def process_table(fname):
"""Return processor by processing a given sheet of a spreadsheet file.
Parameters
----------
fname : str
The name of the Excel file (typically .xlsx extension) to process
Returns
-------
sp : indra.sources.sofia.processor.SofiaProcessor
A SofiaProcessor object which has a list of extracted INDRA
Statements as its statements attribute
"""
book = openpyxl.load_workbook(fname, read_only=True)
rel_sheet = book['Relations']
event_sheet = book['Events']
entities_sheet = book['Entities']
sp = SofiaProcessor(rel_sheet.rows, event_sheet.rows, entities_sheet.rows)
return sp
| [
"[email protected]"
] | |
2b01c9203dbb9ad035e9866cb19cf643836e5469 | 26eb818572061109b55e498ab4f123a4ff9b9499 | /Mul_Agent_RL/MARL_Learn_ZD/BM_Model/pd_bm_vs_bm.py | 9984c112486d60f94ba465dfaada354c4fd402ac | [] | no_license | Dcomplexity/Researches | 550e49b5a5951dca11df062aae1f86e2c12945c5 | 4eb55e2550970223c2f4006d289d8f4ba70a611a | refs/heads/master | 2022-04-04T02:13:56.976901 | 2020-02-01T14:34:44 | 2020-02-01T14:34:44 | 147,739,403 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,489 | py | import multiprocessing
import random
import pandas as pd
import os
import datetime
from BM_Model.agent import *
from BM_Model.game_env import *
def play_one_game(agent_x: AgentBM, agent_y: AgentBM):
ep = 0
st_history = []
whole_ep = 10e4
while ep < whole_ep:
a_x = agent_x.choose_actions()
a_y = agent_y.choose_actions()
pf_x, pf_y = pd_game(a_x, a_y)
agent_x.set_stimulus(pf_x)
agent_y.set_stimulus(pf_y)
agent_x.update_strategy()
agent_y.update_strategy()
print(ep, agent_x.get_strategy(), agent_y.get_strategy())
st_history.append((agent_x.get_strategy(), agent_y.get_strategy()))
ep += 1
return st_history
def run_game(agent_x: AgentBM, agent_y: AgentBM):
run_game_result = play_one_game(agent_x, agent_y)
return run_game_result
def run():
agent_x_r = AgentBM(lr=0.001, expc_a=1.3, init_st=0.5)
agent_y_r = AgentBM(lr=0.001, expc_a=3.0, init_st=0.5)
strategy_history = run_game(agent_x_r, agent_y_r)
return strategy_history
# pool = multiprocessing.Pool(processes=4)
# agent_strategy_list = []
# for _ in range(4):
# agent_strategy_list.append(pool.apply_async(run_game, (agent_x_r, agent_y_r)))
# pool.close()
# pool.join()
if __name__ == "__main__":
start_time = datetime.datetime.now()
print(start_time)
res_agent_strategy_list = run()
end_time = datetime.datetime.now()
print(end_time - start_time)
| [
"[email protected]"
] | |
6cba1aba58fb669a45f538984ceb83a79eeb22ac | 04198420ee8304a0290e185fdf46a6bcb2eea9c4 | /Chapter 5/bookmarks/account/urls.py | 3961348ac42a45ab839936e1a695355eebd1b2cf | [
"MIT"
] | permissive | PacktPublishing/Django-By-Example | 846ca6ac95388fe3392d541eaf3b03303718c465 | 48bd1c8657ef5aae90a0bc80488b3a4787fdb13b | refs/heads/master | 2022-11-10T09:39:17.116526 | 2022-10-31T05:45:09 | 2022-10-31T05:45:09 | 185,974,593 | 36 | 38 | null | null | null | null | UTF-8 | Python | false | false | 1,333 | py | from django.conf.urls import url
from . import views
urlpatterns = [
# url(r'^login/$', views.user_login, name='login'),
url(r'^$', views.dashboard, name='dashboard'),
url(r'^register/$', views.register, name='register'),
url(r'^edit/$', views.edit, name='edit'),
# login / logout urls
url(r'^login/$', 'django.contrib.auth.views.login', name='login'),
url(r'^logout/$', 'django.contrib.auth.views.logout', name='logout'),
url(r'^logout-then-login/$', 'django.contrib.auth.views.logout_then_login', name='logout_then_login'),
# change password urls
url(r'^password-change/$', 'django.contrib.auth.views.password_change', name='password_change'),
url(r'^password-change/done/$', 'django.contrib.auth.views.password_change_done', name='password_change_done'),
# restore password urls
url(r'^password-reset/$', 'django.contrib.auth.views.password_reset', name='password_reset'),
url(r'^password-reset/done/$', 'django.contrib.auth.views.password_reset_done', name='password_reset_done'),
url(r'^password-reset/confirm/(?P<uidb64>[-\w]+)/(?P<token>[-\w]+)/$', 'django.contrib.auth.views.password_reset_confirm', name='password_reset_confirm'),
url(r'^password-reset/complete/$', 'django.contrib.auth.views.password_reset_complete', name='password_reset_complete'),
]
| [
"[email protected]"
] | |
a961de430c5bb563ae2db58ec6aac146a1c797a7 | 541ec0a90ff6bb0b8fd9652525bb3a07b8760649 | /shenfun/jacobi/bases.py | c6329045412e41f2f17e5a9133e88f0e2022bda2 | [
"BSD-2-Clause"
] | permissive | tengfeideng/shenfun | e30a9970f8a1653f12e7d595b69170a0ee3a905f | 33490d8cc9e42d1937e844e5b8dc0203507d6641 | refs/heads/master | 2023-08-23T01:29:12.654892 | 2021-10-28T11:29:10 | 2021-10-28T11:29:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 29,067 | py | """
Module for function spaces of generalized Jacobi type
Note the configuration setting
from shenfun.config import config
config['bases']['jacobi']['mode']
Setting this to 'mpmath' can make use of extended precision.
The precision can also be set in the configuration.
from mpmath import mp
mp.dps = config['jacobi'][precision]
where mp.dps is the number of significant digits.
Note that extended precision is costly, but for some of the
matrices that can be created with the Jacobi bases it is necessary.
Also note the the higher precision is only used for assembling
matrices comuted with :func:`evaluate_basis_derivative_all`.
It has no effect for the matrices that are predifined in the
matrices.py module. Also note that the final matrix will be
in regular double precision. So the higher precision is only used
for the intermediate assembly.
"""
import functools
import numpy as np
import sympy as sp
from scipy.special import eval_jacobi, roots_jacobi #, gamma
from mpi4py_fft import fftw
from shenfun.config import config
from shenfun.spectralbase import SpectralBase, Transform, islicedict, slicedict
from shenfun.chebyshev.bases import BCBiharmonic, BCDirichlet
try:
import quadpy
from mpmath import mp
mp.dps = config['bases']['jacobi']['precision']
has_quadpy = True
except:
has_quadpy = False
mp = None
mode = config['bases']['jacobi']['mode']
mode = mode if has_quadpy else 'numpy'
xp = sp.Symbol('x', real=True)
#pylint: disable=method-hidden,no-else-return,not-callable,abstract-method,no-member,cyclic-import
__all__ = ['JacobiBase', 'Orthogonal', 'ShenDirichlet', 'ShenBiharmonic',
'ShenOrder6', 'mode', 'has_quadpy', 'mp']
class JacobiBase(SpectralBase):
"""Base class for all Jacobi spaces
Parameters
----------
N : int
Number of quadrature points
quad : str, optional
Type of quadrature
- JG - Jacobi-Gauss
alpha : number, optional
Parameter of the Jacobi polynomial
beta : number, optional
Parameter of the Jacobi polynomial
domain : 2-tuple of floats, optional
The computational domain
padding_factor : float, optional
Factor for padding backward transforms.
dealias_direct : bool, optional
Set upper 1/3 of coefficients to zero before backward transform
dtype : data-type, optional
Type of input data in real physical space. Will be overloaded when
basis is part of a :class:`.TensorProductSpace`.
coordinates: 2- or 3-tuple (coordinate, position vector (, sympy assumptions)), optional
Map for curvilinear coordinatesystem.
The new coordinate variable in the new coordinate system is the first item.
Second item is a tuple for the Cartesian position vector as function of the
new variable in the first tuple. Example::
theta = sp.Symbols('x', real=True, positive=True)
rv = (sp.cos(theta), sp.sin(theta))
"""
def __init__(self, N, quad="JG", alpha=0, beta=0, domain=(-1., 1.), dtype=float,
padding_factor=1, dealias_direct=False, coordinates=None):
SpectralBase.__init__(self, N, quad=quad, domain=domain, dtype=dtype,
padding_factor=padding_factor, dealias_direct=dealias_direct,
coordinates=coordinates)
self.alpha = alpha
self.beta = beta
self.forward = functools.partial(self.forward, fast_transform=False)
self.backward = functools.partial(self.backward, fast_transform=False)
self.scalar_product = functools.partial(self.scalar_product, fast_transform=False)
self.plan(int(N*padding_factor), 0, dtype, {})
@staticmethod
def family():
return 'jacobi'
def reference_domain(self):
return (-1, 1)
def get_orthogonal(self):
return Orthogonal(self.N,
quad=self.quad,
domain=self.domain,
dtype=self.dtype,
padding_factor=self.padding_factor,
dealias_direct=self.dealias_direct,
coordinates=self.coors.coordinates,
alpha=0,
beta=0)
def points_and_weights(self, N=None, map_true_domain=False, weighted=True, **kw):
if N is None:
N = self.shape(False)
assert self.quad == "JG"
points, weights = roots_jacobi(N, self.alpha, self.beta)
if map_true_domain is True:
points = self.map_true_domain(points)
return points, weights
def mpmath_points_and_weights(self, N=None, map_true_domain=False, weighted=True, **kw):
if mode == 'numpy' or not has_quadpy:
return self.points_and_weights(N=N, map_true_domain=map_true_domain, weighted=weighted, **kw)
if N is None:
N = self.shape(False)
pw = quadpy.c1.gauss_jacobi(N, self.alpha, self.beta, 'mpmath')
points = pw.points_symbolic
weights = pw.weights_symbolic
if map_true_domain is True:
points = self.map_true_domain(points)
return points, weights
def jacobi(self, x, alpha, beta, N):
V = np.zeros((x.shape[0], N))
if mode == 'numpy':
for n in range(N):
V[:, n] = eval_jacobi(n, alpha, beta, x)
else:
for n in range(N):
V[:, n] = sp.lambdify(xp, sp.jacobi(n, alpha, beta, xp), 'mpmath')(x)
return V
def derivative_jacobi(self, x, alpha, beta, k=1):
V = self.jacobi(x, alpha+k, beta+k, self.N)
if k > 0:
Vc = np.zeros_like(V)
for j in range(k, self.N):
dj = np.prod(np.array([j+alpha+beta+1+i for i in range(k)]))
#dj = gamma(j+alpha+beta+1+k) / gamma(j+alpha+beta+1)
Vc[:, j] = (dj/2**k)*V[:, j-k]
V = Vc
return V
def vandermonde(self, x):
return self.jacobi(x, self.alpha, self.beta, self.shape(False))
def plan(self, shape, axis, dtype, options):
if shape in (0, (0,)):
return
if isinstance(axis, tuple):
assert len(axis) == 1
axis = axis[0]
if isinstance(self.forward, Transform):
if self.forward.input_array.shape == shape and self.axis == axis:
# Already planned
return
U = fftw.aligned(shape, dtype=dtype)
V = fftw.aligned(shape, dtype=dtype)
U.fill(0)
V.fill(0)
self.axis = axis
if self.padding_factor > 1.+1e-8:
trunc_array = self._get_truncarray(shape, V.dtype)
self.scalar_product = Transform(self.scalar_product, None, U, V, trunc_array)
self.forward = Transform(self.forward, None, U, V, trunc_array)
self.backward = Transform(self.backward, None, trunc_array, V, U)
else:
self.scalar_product = Transform(self.scalar_product, None, U, V, V)
self.forward = Transform(self.forward, None, U, V, V)
self.backward = Transform(self.backward, None, V, V, U)
self.si = islicedict(axis=self.axis, dimensions=self.dimensions)
self.sl = slicedict(axis=self.axis, dimensions=self.dimensions)
class Orthogonal(JacobiBase):
"""Function space for regular (orthogonal) Jacobi functions
Parameters
----------
N : int
Number of quadrature points
quad : str, optional
Type of quadrature
- JG - Jacobi-Gauss
padding_factor : float, optional
Factor for padding backward transforms.
dealias_direct : bool, optional
Set upper 1/3 of coefficients to zero before backward transform
dtype : data-type, optional
Type of input data in real physical space. Will be overloaded when
basis is part of a :class:`.TensorProductSpace`.
coordinates: 2- or 3-tuple (coordinate, position vector (, sympy assumptions)), optional
Map for curvilinear coordinatesystem.
The new coordinate variable in the new coordinate system is the first item.
Second item is a tuple for the Cartesian position vector as function of the
new variable in the first tuple. Example::
theta = sp.Symbols('x', real=True, positive=True)
rv = (sp.cos(theta), sp.sin(theta))
"""
def __init__(self, N, quad="JG", alpha=-0.5, beta=-0.5, domain=(-1., 1.),
dtype=float, padding_factor=1, dealias_direct=False, coordinates=None):
JacobiBase.__init__(self, N, quad=quad, alpha=alpha, beta=beta, domain=domain, dtype=dtype,
padding_factor=padding_factor, dealias_direct=dealias_direct,
coordinates=coordinates)
@property
def is_orthogonal(self):
return True
#def get_orthogonal(self):
# return self
@staticmethod
def short_name():
return 'J'
def sympy_basis(self, i=0, x=xp):
return sp.jacobi(i, self.alpha, self.beta, x)
def evaluate_basis(self, x, i=0, output_array=None):
x = np.atleast_1d(x)
if output_array is None:
output_array = np.zeros(x.shape)
if mode == 'numpy':
output_array = eval_jacobi(i, self.alpha, self.beta, x, out=output_array)
else:
f = self.sympy_basis(i, xp)
output_array[:] = sp.lambdify(xp, f, 'mpmath')(x)
return output_array
def evaluate_basis_derivative(self, x=None, i=0, k=0, output_array=None):
if x is None:
x = self.points_and_weights(mode=mode)[0]
#x = np.atleast_1d(x)
if output_array is None:
output_array = np.zeros(x.shape, dtype=self.dtype)
if mode == 'numpy':
dj = np.prod(np.array([i+self.alpha+self.beta+1+j for j in range(k)]))
output_array[:] = dj/2**k*eval_jacobi(i-k, self.alpha+k, self.beta+k, x)
else:
f = sp.jacobi(i, self.alpha, self.beta, xp)
output_array[:] = sp.lambdify(xp, f.diff(xp, k), 'mpmath')(x)
return output_array
def evaluate_basis_derivative_all(self, x=None, k=0, argument=0):
if x is None:
x = self.mpmath_points_and_weights(mode=mode)[0]
#if x.dtype == 'O':
# x = np.array(x, dtype=self.dtype)
if mode == 'numpy':
return self.derivative_jacobi(x, self.alpha, self.beta, k)
else:
N = self.shape(False)
V = np.zeros((x.shape[0], N))
for i in range(N):
V[:, i] = self.evaluate_basis_derivative(x, i, k, output_array=V[:, i])
return V
def evaluate_basis_all(self, x=None, argument=0):
if x is None:
x = self.mpmath_points_and_weights()[0]
return self.vandermonde(x)
class ShenDirichlet(JacobiBase):
"""Jacobi function space for Dirichlet boundary conditions
Parameters
----------
N : int
Number of quadrature points
quad : str, optional
Type of quadrature
- JG - Jacobi-Gauss
bc : tuple of numbers
Boundary conditions at edges of domain
domain : 2-tuple of floats, optional
The computational domain
padding_factor : float, optional
Factor for padding backward transforms.
dealias_direct : bool, optional
Set upper 1/3 of coefficients to zero before backward transform
dtype : data-type, optional
Type of input data in real physical space. Will be overloaded when
basis is part of a :class:`.TensorProductSpace`.
coordinates: 2- or 3-tuple (coordinate, position vector (, sympy assumptions)), optional
Map for curvilinear coordinatesystem.
The new coordinate variable in the new coordinate system is the first item.
Second item is a tuple for the Cartesian position vector as function of the
new variable in the first tuple. Example::
theta = sp.Symbols('x', real=True, positive=True)
rv = (sp.cos(theta), sp.sin(theta))
"""
def __init__(self, N, quad='JG', bc=(0, 0), domain=(-1., 1.), dtype=float,
padding_factor=1, dealias_direct=False, coordinates=None, alpha=-1, beta=-1):
assert alpha == -1 and beta == -1
JacobiBase.__init__(self, N, quad=quad, alpha=-1, beta=-1, domain=domain, dtype=dtype,
padding_factor=padding_factor, dealias_direct=dealias_direct,
coordinates=coordinates)
assert bc in ((0, 0), 'Dirichlet')
from shenfun.tensorproductspace import BoundaryValues
self._bc_basis = None
self.bc = BoundaryValues(self, bc=bc)
@staticmethod
def boundary_condition():
return 'Dirichlet'
@staticmethod
def short_name():
return 'SD'
def is_scaled(self):
return False
def slice(self):
return slice(0, self.N-2)
def evaluate_basis_derivative_all(self, x=None, k=0, argument=0):
if x is None:
x = self.mpmath_points_and_weights()[0]
N = self.shape(False)
V = np.zeros((x.shape[0], N))
for i in range(N-2):
V[:, i] = self.evaluate_basis_derivative(x, i, k, output_array=V[:, i])
return V
def sympy_basis(self, i=0, x=xp):
return (1-x**2)*sp.jacobi(i, 1, 1, x)
#return (1-x)**(-self.alpha)*(1+x)**(-self.beta)*sp.jacobi(i, -self.alpha, -self.beta, x)
def evaluate_basis_derivative(self, x=None, i=0, k=0, output_array=None):
if x is None:
x = self.mpmath_points_and_weights()[0]
if output_array is None:
output_array = np.zeros(x.shape, dtype=self.dtype)
f = self.sympy_basis(i, xp)
output_array[:] = sp.lambdify(xp, f.diff(xp, k), mode)(x)
return output_array
def evaluate_basis(self, x, i=0, output_array=None):
x = np.atleast_1d(x)
if output_array is None:
output_array = np.zeros(x.shape, dtype=self.dtype)
if mode == 'numpy':
output_array = (1-x**2)*eval_jacobi(i, -self.alpha, -self.beta, x, out=output_array)
else:
f = self.sympy_basis(i, xp)
output_array[:] = sp.lambdify(xp, f, 'mpmath')(x)
return output_array
def evaluate_basis_all(self, x=None, argument=0):
if mode == 'numpy':
if x is None:
x = self.mesh(False, False)
V = np.zeros((x.shape[0], self.N))
V[:, :-2] = self.jacobi(x, 1, 1, self.N-2)*(1-x**2)[:, np.newaxis]
else:
if x is None:
x = self.mpmath_points_and_weights()[0]
N = self.shape(False)
V = np.zeros((x.shape[0], N))
for i in range(N-2):
V[:, i] = self.evaluate_basis(x, i, output_array=V[:, i])
return V
def points_and_weights(self, N=None, map_true_domain=False, weighted=True, **kw):
if N is None:
N = self.shape(False)
assert self.quad == "JG"
points, weights = roots_jacobi(N, self.alpha+1, self.beta+1)
if map_true_domain is True:
points = self.map_true_domain(points)
return points, weights
def mpmath_points_and_weights(self, N=None, map_true_domain=False, weighted=True, **kw):
if mode == 'numpy' or not has_quadpy:
return self.points_and_weights(N=N, map_true_domain=map_true_domain, weighted=weighted, **kw)
if N is None:
N = self.shape(False)
assert self.quad == "JG"
pw = quadpy.c1.gauss_jacobi(N, self.alpha+1, self.beta+1, mode)
points = pw.points_symbolic
weights = pw.weights_symbolic
if map_true_domain is True:
points = self.map_true_domain(points)
return points, weights
def to_ortho(self, input_array, output_array=None):
assert self.alpha == -1 and self.beta == -1
if output_array is None:
output_array = np.zeros_like(input_array)
else:
output_array.fill(0)
k = self.wavenumbers().astype(float)
s0 = self.sl[slice(0, -2)]
s1 = self.sl[slice(2, None)]
z = input_array[s0]*2*(k+1)/(2*k+3)
output_array[s0] = z
output_array[s1] -= z
return output_array
def _evaluate_scalar_product(self, fast_transform=True):
SpectralBase._evaluate_scalar_product(self)
self.scalar_product.tmp_array[self.sl[slice(-2, None)]] = 0
def get_bc_basis(self):
if self._bc_basis:
return self._bc_basis
self._bc_basis = BCDirichlet(self.N, quad=self.quad, domain=self.domain,
coordinates=self.coors.coordinates)
return self._bc_basis
class ShenBiharmonic(JacobiBase):
"""Function space for Biharmonic boundary conditions
Parameters
----------
N : int
Number of quadrature points
quad : str, optional
Type of quadrature
- JG - Jacobi-Gauss
domain : 2-tuple of floats, optional
The computational domain
padding_factor : float, optional
Factor for padding backward transforms.
dealias_direct : bool, optional
Set upper 1/3 of coefficients to zero before backward transform
dtype : data-type, optional
Type of input data in real physical space. Will be overloaded when
basis is part of a :class:`.TensorProductSpace`.
coordinates: 2- or 3-tuple (coordinate, position vector (, sympy assumptions)), optional
Map for curvilinear coordinatesystem.
The new coordinate variable in the new coordinate system is the first item.
Second item is a tuple for the Cartesian position vector as function of the
new variable in the first tuple. Example::
theta = sp.Symbols('x', real=True, positive=True)
rv = (sp.cos(theta), sp.sin(theta))
Note
----
The generalized Jacobi function j^{alpha=-2, beta=-2} is used as basis. However,
inner products are computed without weights, for alpha=beta=0.
"""
def __init__(self, N, quad='JG', bc=(0, 0, 0, 0), domain=(-1., 1.), dtype=float,
padding_factor=1, dealias_direct=False, coordinates=None,
alpha=-2, beta=-2):
assert alpha == -2 and beta == -2
JacobiBase.__init__(self, N, quad=quad, alpha=-2, beta=-2, domain=domain, dtype=dtype,
padding_factor=padding_factor, dealias_direct=dealias_direct,
coordinates=coordinates)
assert bc in ((0, 0, 0, 0), 'Biharmonic')
from shenfun.tensorproductspace import BoundaryValues
self._bc_basis = None
self.bc = BoundaryValues(self, bc=bc)
@staticmethod
def boundary_condition():
return 'Biharmonic'
@staticmethod
def short_name():
return 'SB'
def slice(self):
return slice(0, self.N-4)
def sympy_basis(self, i=0, x=xp):
return (1-x**2)**2*sp.jacobi(i, 2, 2, x)
def evaluate_basis_derivative_all(self, x=None, k=0, argument=0):
if x is None:
x = self.mpmath_points_and_weights()[0]
N = self.shape(False)
V = np.zeros((x.shape[0], N))
for i in range(N-4):
V[:, i] = self.evaluate_basis_derivative(x, i, k, output_array=V[:, i])
return V
def evaluate_basis_derivative(self, x=None, i=0, k=0, output_array=None):
if x is None:
x = self.mpmath_points_and_weights()[0]
if output_array is None:
output_array = np.zeros(x.shape, dtype=self.dtype)
f = self.sympy_basis(i, xp)
output_array[:] = sp.lambdify(xp, f.diff(xp, k), mode)(x)
return output_array
def evaluate_basis(self, x, i=0, output_array=None):
x = np.atleast_1d(x)
if output_array is None:
output_array = np.zeros(x.shape)
if mode == 'numpy':
output_array[:] = (1-x**2)**2*eval_jacobi(i, 2, 2, x, out=output_array)
else:
f = self.sympy_basis(i, xp)
output_array[:] = sp.lambdify(xp, f, 'mpmath')(x)
return output_array
def evaluate_basis_all(self, x=None, argument=0):
if mode == 'numpy':
if x is None:
x = self.mesh(False, False)
N = self.shape(False)
V = np.zeros((x.shape[0], N))
V[:, :-4] = self.jacobi(x, 2, 2, N-4)*((1-x**2)**2)[:, np.newaxis]
else:
if x is None:
x = self.mpmath_points_and_weights()[0]
N = self.shape(False)
V = np.zeros((x.shape[0], N))
for i in range(N-4):
V[:, i] = self.evaluate_basis(x, i, output_array=V[:, i])
return V
def _evaluate_scalar_product(self, fast_transform=True):
SpectralBase._evaluate_scalar_product(self)
self.scalar_product.tmp_array[self.sl[slice(-4, None)]] = 0
def points_and_weights(self, N=None, map_true_domain=False, weighted=True, **kw):
if N is None:
N = self.shape(False)
assert self.quad == "JG"
points, weights = roots_jacobi(N, 0, 0)
if map_true_domain is True:
points = self.map_true_domain(points)
return points, weights
def mpmath_points_and_weights(self, N=None, map_true_domain=False, weighted=True, **kw):
if mode == 'numpy' or not has_quadpy:
return self.points_and_weights(N=N, map_true_domain=map_true_domain, weighted=weighted, **kw)
if N is None:
N = self.shape(False)
assert self.quad == "JG"
pw = quadpy.c1.gauss_jacobi(N, 0, 0, 'mpmath')
points = pw.points_symbolic
weights = pw.weights_symbolic
if map_true_domain is True:
points = self.map_true_domain(points)
return points, weights
def to_ortho(self, input_array, output_array=None):
if output_array is None:
output_array = np.zeros_like(input_array)
else:
output_array.fill(0)
k = self.wavenumbers().astype(float)
_factor0 = 4*(k+2)*(k+1)/(2*k+5)/(2*k+3)
_factor1 = (-2*(2*k+5)/(2*k+7))
_factor2 = ((2*k+3)/(2*k+7))
s0 = self.sl[slice(0, -4)]
z = _factor0*input_array[s0]
output_array[s0] = z
output_array[self.sl[slice(2, -2)]] += z*_factor1
output_array[self.sl[slice(4, None)]] += z*_factor2
return output_array
def get_bc_basis(self):
if self._bc_basis:
return self._bc_basis
self._bc_basis = BCBiharmonic(self.N, quad=self.quad, domain=self.domain,
coordinates=self.coors.coordinates)
return self._bc_basis
class ShenOrder6(JacobiBase):
"""Function space for 6th order equation
Parameters
----------
N : int
Number of quadrature points
quad : str, optional
Type of quadrature
- JG - Jacobi-Gauss
domain : 2-tuple of floats, optional
The computational domain
padding_factor : float, optional
Factor for padding backward transforms.
dealias_direct : bool, optional
Set upper 1/3 of coefficients to zero before backward transform
dtype : data-type, optional
Type of input data in real physical space. Will be overloaded when
basis is part of a :class:`.TensorProductSpace`.
coordinates: 2- or 3-tuple (coordinate, position vector (, sympy assumptions)), optional
Map for curvilinear coordinatesystem.
The new coordinate variable in the new coordinate system is the first item.
Second item is a tuple for the Cartesian position vector as function of the
new variable in the first tuple. Example::
theta = sp.Symbols('x', real=True, positive=True)
rv = (sp.cos(theta), sp.sin(theta))
Note
----
The generalized Jacobi function j^{alpha=-3, beta=-3} is used as basis. However,
inner products are computed without weights, for alpha=beta=0.
"""
def __init__(self, N, quad='JG', domain=(-1., 1.), dtype=float, padding_factor=1, dealias_direct=False,
coordinates=None, bc=(0, 0, 0, 0, 0, 0), alpha=-3, beta=-3):
assert alpha == -3 and beta == -3
JacobiBase.__init__(self, N, quad=quad, alpha=-3, beta=-3, domain=domain, dtype=dtype,
padding_factor=padding_factor, dealias_direct=dealias_direct,
coordinates=coordinates)
from shenfun.tensorproductspace import BoundaryValues
self.bc = BoundaryValues(self, bc=bc)
@staticmethod
def boundary_condition():
return '6th order'
@staticmethod
def short_name():
return 'SS'
def slice(self):
return slice(0, self.N-6)
def sympy_basis(self, i=0, x=xp):
return (1-x**2)**3*sp.jacobi(i, 3, 3, x)
def evaluate_basis_derivative(self, x=None, i=0, k=0, output_array=None):
if x is None:
x = self.mpmath_points_and_weights()[0]
if output_array is None:
output_array = np.zeros(x.shape)
f = self.sympy_basis(i, xp)
output_array[:] = sp.lambdify(xp, f.diff(xp, k), mode)(x)
return output_array
def evaluate_basis_derivative_all(self, x=None, k=0, argument=0):
if x is None:
x = self.mpmath_points_and_weights()[0]
N = self.shape(False)
V = np.zeros((x.shape[0], N))
for i in range(N-6):
V[:, i] = self.evaluate_basis_derivative(x, i, k, output_array=V[:, i])
return V
def evaluate_basis(self, x, i=0, output_array=None):
x = np.atleast_1d(x)
if output_array is None:
output_array = np.zeros(x.shape)
if mode == 'numpy':
output_array[:] = (1-x**2)**3*eval_jacobi(i, 3, 3, x, out=output_array)
else:
f = self.sympy_basis(i, xp)
output_array[:] = sp.lambdify(xp, f, 'mpmath')(x)
return output_array
def evaluate_basis_all(self, x=None, argument=0):
if mode == 'numpy':
if x is None:
x = self.mesh(False, False)
N = self.shape(False)
V = np.zeros((x.shape[0], N))
V[:, :-6] = self.jacobi(x, 3, 3, N-6)*((1-x**2)**3)[:, np.newaxis]
else:
if x is None:
x = self.mpmath_points_and_weights()[0]
N = self.shape(False)
V = np.zeros((x.shape[0], N))
for i in range(N-6):
V[:, i] = self.evaluate_basis(x, i, output_array=V[:, i])
return V
def points_and_weights(self, N=None, map_true_domain=False, weighted=True, **kw):
if N is None:
N = self.shape(False)
assert self.quad == "JG"
points, weights = roots_jacobi(N, 0, 0)
if map_true_domain is True:
points = self.map_true_domain(points)
return points, weights
def mpmath_points_and_weights(self, N=None, map_true_domain=False, weighted=True, **kw):
if mode == 'numpy' or not has_quadpy:
return self.points_and_weights(N=N, map_true_domain=map_true_domain, weighted=weighted, **kw)
if N is None:
N = self.shape(False)
assert self.quad == "JG"
pw = quadpy.c1.gauss_jacobi(N, 0, 0, 'mpmath')
points = pw.points_symbolic
weights = pw.weights_symbolic
if map_true_domain is True:
points = self.map_true_domain(points)
return points, weights
def get_orthogonal(self):
return Orthogonal(self.N, alpha=0, beta=0, dtype=self.dtype, domain=self.domain, coordinates=self.coors.coordinates)
def _evaluate_scalar_product(self, fast_transform=True):
SpectralBase._evaluate_scalar_product(self)
self.scalar_product.tmp_array[self.sl[slice(-6, None)]] = 0
#def to_ortho(self, input_array, output_array=None):
# if output_array is None:
# output_array = np.zeros_like(input_array.v)
# k = self.wavenumbers().astype(float)
# _factor0 = 4*(k+2)*(k+1)/(2*k+5)/(2*k+3)
# _factor1 = (-2*(2*k+5)/(2*k+7))
# _factor2 = ((2*k+3)/(2*k+7))
# s0 = self.sl[slice(0, -4)]
# z = _factor0*input_array[s0]
# output_array[s0] = z
# output_array[self.sl[slice(2, -2)]] -= z*_factor1
# output_array[self.sl[slice(4, None)]] += z*_factor2
# return output_array
| [
"[email protected]"
] | |
1336d3f1f46bdd25db1cf844b8fe527059d12aaa | 316a07bd7ab47d447606d341c5d221d8318f65b9 | /quantum/quantum/plugins/ryu/agent/ryu_quantum_agent.py | e08c07e6fa77706622c7811c53c93e49dd8a7783 | [] | no_license | kumarcv/openstack-nf | 791d16a4844df4666fb2b82a548add98f4832628 | ad2d8c5d49f510292b1fe373c7c10e53be52ba23 | refs/heads/master | 2020-05-20T03:10:54.495411 | 2013-06-16T23:44:11 | 2013-06-16T23:44:11 | 7,497,218 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,082 | py | #!/usr/bin/env python
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2012 Isaku Yamahata <yamahata at private email ne jp>
# Based on openvswitch agent.
#
# Copyright 2011 Nicira Networks, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# @author: Isaku Yamahata
import logging as LOG
import sys
import time
from ryu.app import rest_nw_id
from ryu.app.client import OFPClient
from sqlalchemy.ext.sqlsoup import SqlSoup
from quantum.agent.linux import ovs_lib
from quantum.agent.linux.ovs_lib import VifPort
from quantum.common import config as logging_config
from quantum.common import constants
from quantum.openstack.common import cfg
from quantum.plugins.ryu.common import config
class OVSBridge(ovs_lib.OVSBridge):
def __init__(self, br_name, root_helper):
ovs_lib.OVSBridge.__init__(self, br_name, root_helper)
self.datapath_id = None
def find_datapath_id(self):
# ovs-vsctl get Bridge br-int datapath_id
res = self.run_vsctl(["get", "Bridge", self.br_name, "datapath_id"])
# remove preceding/trailing double quotes
dp_id = res.strip().strip('"')
self.datapath_id = dp_id
def set_controller(self, target):
methods = ("ssl", "tcp", "unix", "pssl", "ptcp", "punix")
args = target.split(":")
if not args[0] in methods:
target = "tcp:" + target
self.run_vsctl(["set-controller", self.br_name, target])
def _vifport(self, name, external_ids):
ofport = self.db_get_val("Interface", name, "ofport")
return VifPort(name, ofport, external_ids["iface-id"],
external_ids["attached-mac"], self)
def _get_ports(self, get_port):
ports = []
port_names = self.get_port_name_list()
for name in port_names:
port = get_port(name)
if port:
ports.append(port)
return ports
def _get_vif_port(self, name):
external_ids = self.db_get_map("Interface", name, "external_ids")
if "iface-id" in external_ids and "attached-mac" in external_ids:
return self._vifport(name, external_ids)
elif ("xs-vif-uuid" in external_ids and
"attached-mac" in external_ids):
# if this is a xenserver and iface-id is not automatically
# synced to OVS from XAPI, we grab it from XAPI directly
ofport = self.db_get_val("Interface", name, "ofport")
iface_id = self.get_xapi_iface_id(external_ids["xs-vif-uuid"])
return VifPort(name, ofport, iface_id,
external_ids["attached-mac"], self)
def get_vif_ports(self):
"returns a VIF object for each VIF port"
return self._get_ports(self._get_vif_port)
def _get_external_port(self, name):
external_ids = self.db_get_map("Interface", name, "external_ids")
if external_ids:
return
ofport = self.db_get_val("Interface", name, "ofport")
return VifPort(name, ofport, None, None, self)
def get_external_ports(self):
return self._get_ports(self._get_external_port)
def check_ofp_mode(db):
LOG.debug("checking db")
servers = db.ofp_server.all()
ofp_controller_addr = None
ofp_rest_api_addr = None
for serv in servers:
if serv.host_type == "REST_API":
ofp_rest_api_addr = serv.address
elif serv.host_type == "controller":
ofp_controller_addr = serv.address
else:
LOG.warn("ignoring unknown server type %s", serv)
LOG.debug("controller %s", ofp_controller_addr)
LOG.debug("api %s", ofp_rest_api_addr)
if not ofp_controller_addr:
raise RuntimeError("OF controller isn't specified")
if not ofp_rest_api_addr:
raise RuntimeError("Ryu rest API port isn't specified")
LOG.debug("going to ofp controller mode %s %s",
ofp_controller_addr, ofp_rest_api_addr)
return (ofp_controller_addr, ofp_rest_api_addr)
class OVSQuantumOFPRyuAgent:
def __init__(self, integ_br, db, root_helper):
self.root_helper = root_helper
(ofp_controller_addr, ofp_rest_api_addr) = check_ofp_mode(db)
self.nw_id_external = rest_nw_id.NW_ID_EXTERNAL
self.api = OFPClient(ofp_rest_api_addr)
self._setup_integration_br(integ_br, ofp_controller_addr)
def _setup_integration_br(self, integ_br, ofp_controller_addr):
self.int_br = OVSBridge(integ_br, self.root_helper)
self.int_br.find_datapath_id()
self.int_br.set_controller(ofp_controller_addr)
for port in self.int_br.get_external_ports():
self._port_update(self.nw_id_external, port)
def _port_update(self, network_id, port):
self.api.update_port(network_id, port.switch.datapath_id, port.ofport)
def _all_bindings(self, db):
"""return interface id -> port which include network id bindings"""
return dict((port.id, port) for port in db.ports.all())
def _set_port_status(self, port, status):
port.status = status
def daemon_loop(self, db):
# on startup, register all existing ports
all_bindings = self._all_bindings(db)
local_bindings = {}
vif_ports = {}
for port in self.int_br.get_vif_ports():
vif_ports[port.vif_id] = port
if port.vif_id in all_bindings:
net_id = all_bindings[port.vif_id].network_id
local_bindings[port.vif_id] = net_id
self._port_update(net_id, port)
self._set_port_status(all_bindings[port.vif_id],
constants.PORT_STATUS_ACTIVE)
LOG.info("Updating binding to net-id = %s for %s",
net_id, str(port))
db.commit()
old_vif_ports = vif_ports
old_local_bindings = local_bindings
while True:
all_bindings = self._all_bindings(db)
new_vif_ports = {}
new_local_bindings = {}
for port in self.int_br.get_vif_ports():
new_vif_ports[port.vif_id] = port
if port.vif_id in all_bindings:
net_id = all_bindings[port.vif_id].network_id
new_local_bindings[port.vif_id] = net_id
old_b = old_local_bindings.get(port.vif_id)
new_b = new_local_bindings.get(port.vif_id)
if old_b == new_b:
continue
if old_b:
LOG.info("Removing binding to net-id = %s for %s",
old_b, str(port))
if port.vif_id in all_bindings:
self._set_port_status(all_bindings[port.vif_id],
constants.PORT_STATUS_DOWN)
if new_b:
if port.vif_id in all_bindings:
self._set_port_status(all_bindings[port.vif_id],
constants.PORT_STATUS_ACTIVE)
LOG.info("Adding binding to net-id = %s for %s",
new_b, str(port))
for vif_id in old_vif_ports:
if vif_id not in new_vif_ports:
LOG.info("Port Disappeared: %s", vif_id)
if vif_id in all_bindings:
self._set_port_status(all_bindings[port.vif_id],
constants.PORT_STATUS_DOWN)
old_vif_ports = new_vif_ports
old_local_bindings = new_local_bindings
db.commit()
time.sleep(2)
def main():
cfg.CONF(args=sys.argv, project='quantum')
# (TODO) gary - swap with common logging
logging_config.setup_logging(cfg.CONF)
integ_br = cfg.CONF.OVS.integration_bridge
root_helper = cfg.CONF.AGENT.root_helper
options = {"sql_connection": cfg.CONF.DATABASE.sql_connection}
db = SqlSoup(options["sql_connection"])
LOG.info("Connecting to database \"%s\" on %s",
db.engine.url.database, db.engine.url.host)
plugin = OVSQuantumOFPRyuAgent(integ_br, db, root_helper)
plugin.daemon_loop(db)
sys.exit(0)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
b1b101b11fa23b6565560682641431e72a6271c7 | ed15e441d4cd7a54d989610b8070a5d14bfda4c8 | /1804/git/1-第一个月高级python/3/1.py | 473f37e8e250e357e1cb8c9b299ae6f8f4c1ff50 | [] | no_license | jmh9876/p1804_jmh | 24593af521749913b65685e21ffc37281c43998f | a52a6366c21ad7598e71d8e82aeee746ecee7c6b | refs/heads/master | 2020-03-15T23:30:02.769818 | 2018-08-02T09:10:20 | 2018-08-02T09:10:20 | 132,395,104 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 282 | py | age=int(input('请输入你的年龄'))
if age <= 10:
print('幼年')
elif age <=20:
print('少年')
elif age <= 30:
print('青少年')
elif age <= 40:
print('青年')
elif age <= 50:
print('壮年')
elif age <= 60:
print('中年')
else:
print('老人')
| [
"[email protected]"
] | |
ebd7a164dcde0308ffbac6f3ac4f253bb13aab70 | 1b862f34c125ce200244dd79e4fda4b5b605ce2e | /.history/ML_T2_Validation_20210610235105.py | 51f340a954320e601e4ce9cc15135ba0438122ac | [] | no_license | edwino26/CoreImages | 26085a49cf1cb79442ae563a88354b2fdceace87 | 6bf6e68cac8ab36c87b1e6ea702bfe6882b0f40e | refs/heads/master | 2023-06-22T12:53:37.344895 | 2021-07-21T04:31:44 | 2021-07-21T04:31:44 | 309,553,247 | 0 | 4 | null | 2021-04-29T23:23:15 | 2020-11-03T02:45:07 | Lasso | UTF-8 | Python | false | false | 7,523 | py | #T2 TEST DATA
# %%
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import pickle
from scipy import interpolate
from scipy.integrate import simps
from numpy import trapz
# %%
#Load Stack
UVStack = pd.read_excel('./ML_Results/T2_test/ImgStack.xls')
ImgStackk = UVStack.copy().to_numpy()
# %%
sub = pd.read_excel('./ML_Results/T2_test/sub.xls')
res = pd.read_excel('./ML_Results/T2_test/Results.xls')
res = res[res.Well == 'T2']
res.drop_duplicates(subset='DEPT', keep="last")
res.sort_values(by=['DEPT'])
res.drop(['Unnamed: 0'], axis=1, inplace=True)
TT = pd.read_excel('./ML_Results/Train_Test_Results.xls')
istr = 0
iend = 42344
dplot_o = 3671
dplot_n = 3750
shading = 'bone'
# %% Load Log Calculations
T2_x = pd.read_excel('./Excel_Files/T2.xls',sheet_name='T2_data')
T2_x = T2_x[['DEPTH','GR_EDTC','RHOZ','AT90','NPHI','Vsh','Vclay','grain_density','porosity',
'RW2','Sw_a','Sw_a1','Sw_p','Sw_p1','SwWS','Swsim','Swsim1','PAY_archie',
'PAY_poupon','PAY_waxman','PAY_simandoux']]
# %%
dep = np.arange(min(res.DEPT), max(res.DEPT),0.5)
T2_rs = pd.DataFrame(columns=[T2_x.columns])
T2_rs.iloc[:,0] = dep
for i in range(len(T2_x.columns)):
f = interpolate.interp1d(T2_x.DEPTH, T2_x.iloc[:,i])
T2_rs.iloc[:,i] =f(dep)
#T2_rs.dropna(inplace=True)
T2_x = T2_rs.copy()
# %%
plt.figure()
plt.subplot2grid((1, 10), (0, 0), colspan=3)
plt.plot(sub['GRAY'], sub['DEPTH'], 'mediumseagreen', linewidth=0.5);
plt.axis([50, 250, dplot_o, dplot_n]);
plt.gca().invert_yaxis();
plt.fill_between(sub['GRAY'], 0, sub['DEPTH'], facecolor='green', alpha=0.5)
plt.xlabel('Gray Scale RGB')
plt.subplot2grid((1, 10), (0, 3), colspan=7)
plt.imshow(ImgStackk[istr:iend,80:120], aspect='auto', origin='upper', extent=[0,1,dplot_n,dplot_o], cmap=shading);
plt.axis([0, 1, dplot_o, dplot_n]);
plt.gca().invert_yaxis()
plt.xlabel('Processed Image')
plt.colorbar()
p_50 = np.percentile(sub['DEPTH'], 50)
plt.yticks([]); plt.xticks([])
plt.subplots_adjust(wspace = 20, left = 0.1, right = 0.9, bottom = 0.1, top = 0.9)
plt.show()
# %%
CORE =pd.read_excel('./CORE/CORE.xlsx',sheet_name='XRD')
mask = CORE.Well.isin(['T2'])
T2_Core = CORE[mask]
prof=T2_Core['Depth']
clays=T2_Core['Clays']
xls1 = pd.read_excel ('./CORE/CORE.xlsx', sheet_name='Saturation')
mask = xls1.Well.isin(['T2'])
T2_sat = xls1[mask]
long=T2_sat ['Depth']
poro=T2_sat ['PHIT']
grain=T2_sat ['RHOG']
sw_core=T2_sat ['Sw']
klinkenberg = T2_sat ['K']
minimo=grain.min()
maximo=grain.max()
c=2.65
d=2.75
norm=(((grain-minimo)*(d-c)/(maximo-minimo))+c)
xls2 = pd.read_excel ('./CORE/CORE.xlsx', sheet_name='Gamma')
mask = xls2.Well.isin(['T2'])
T2_GR = xls2[mask]
h=T2_GR['Depth']
cg1=T2_GR['GR_Scaled']
# %%
# ~~~~~~~~~~~~~~~~~~ Plot Results ~~~~~~~~~~~~~~~~~~~~~~
ct = 0
top= dplot_o
bottom= dplot_n
no_plots = 9
ct+=1
plt.figure(figsize=(10,9))
plt.subplot(1,no_plots,ct)
plt.plot (T2_x.GR_EDTC,T2_x.DEPTH,'g',cg1,(h+3),'c.',lw=0.5)
plt.title('$GR/ Core.GR $',fontsize=8)
plt.axis([40,130,top,bottom])
plt.xticks(fontsize=8)
plt.yticks(fontsize=8)
plt.xlabel('Gamma Ray ',fontsize=6)
plt.gca().invert_yaxis()
plt.grid(True)
plt.hlines(y=3665.65, xmin=0, xmax=130)
plt.hlines(y=3889.5, xmin=0, xmax=130)
ct+=1
plt.subplot(1,no_plots,ct)
plt.plot (T2_x.PAY_poupon,T2_x.DEPTH,'r',lw=0.5)
plt.title('$PAY_P$',fontsize=8)
plt.fill_between(T2_x.PAY_poupon,T2_x.DEPTH, color='r', alpha=0.8)
plt.axis([0,0.001,top,bottom])
plt.xticks(fontsize=8)
plt.gca().invert_yaxis()
plt.gca().xaxis.set_visible(False)
plt.gca().yaxis.set_visible(False)
plt.grid(True)
plt.hlines(y=3665.65, xmin=0, xmax=130)
plt.hlines(y=3889.5, xmin=0, xmax=130)
#Waxman-Smits
ct+=1
plt.subplot(1,no_plots,ct)
plt.plot (T2_x.PAY_waxman,T2_x.DEPTH,'g',lw=0.5)
plt.title('$PAY_W$',fontsize=8)
plt.fill_between(T2_x.PAY_waxman,T2_x.DEPTH, color='g', alpha=0.8)
plt.axis([0,0.001,top,bottom])
plt.xticks(fontsize=8)
plt.gca().invert_yaxis()
plt.gca().xaxis.set_visible(False)
plt.gca().yaxis.set_visible(False)
plt.grid(True)
plt.hlines(y=3665.65, xmin=0, xmax=130)
plt.hlines(y=3889.5, xmin=0, xmax=130)
#Simandoux
ct+=1
plt.subplot(1,no_plots,ct)
plt.plot (T2_x.PAY_simandoux,T2_x.DEPTH,'y',lw=0.5)
plt.title('$PAY_S$',fontsize=8)
plt.fill_between(T2_x.PAY_simandoux,T2_x.DEPTH, color='y', alpha=0.8)
plt.axis([0,0.001,top,bottom])
plt.xticks(fontsize=8)
plt.gca().invert_yaxis()
plt.gca().xaxis.set_visible(False)
plt.gca().yaxis.set_visible(False)
plt.grid(True)
plt.hlines(y=3665.65, xmin=0, xmax=130)
plt.hlines(y=3889.5, xmin=0, xmax=130)
ct+=1
plt.subplot(1,no_plots,ct)
plt.plot(sub['GRAY'], sub['DEPTH'], 'mediumseagreen', linewidth=0.5);
plt.axis([50, 250, dplot_o, dplot_n]);
plt.xticks(fontsize=8)
plt.title('$Core Img$',fontsize=8)
plt.gca().invert_yaxis();
plt.gca().yaxis.set_visible(False)
plt.fill_between(sub['GRAY'], 0, sub['DEPTH'], facecolor='green', alpha=0.5)
plt.xlabel('Gray Scale RGB', fontsize=7)
ct+=1
corte= 140
PAY_Gray_scale = res['GRAY'].apply(lambda x: 1 if x<corte else 0)
plt.subplot(1,no_plots,ct)
plt.plot (PAY_Gray_scale,res.DEPT,'c',lw=0.5)
plt.title('$PAY-GS$',fontsize=8)
plt.fill_between(PAY_Gray_scale,res.DEPT, color='c', alpha=0.8)
plt.axis([0,0.001,top,bottom])
plt.xticks(fontsize=8)
plt.gca().invert_yaxis()
plt.gca().xaxis.set_visible(False)
plt.gca().yaxis.set_visible(False)
plt.grid(True)
plt.xlabel('Resolution to Log Scale',fontsize=7)
ct+=1
plt.subplot(1,no_plots,ct)
plt.imshow(ImgStackk[istr:iend,80:120], aspect='auto', origin='upper', extent=[0,1,dplot_n,dplot_o], cmap=shading);
plt.axis([0, 1, dplot_o, dplot_n]);
plt.xticks(fontsize=8)
plt.gca().invert_yaxis()
plt.xlabel('Processed \n Image', fontsize=7)
plt.colorbar()
p_50 = np.percentile(sub['DEPTH'], 50)
plt.yticks([]); plt.xticks([])
ct+=1
plt.subplot(1,no_plots,ct)
plt.plot (res['RandomForest'],res.DEPT,'r',lw=1)
plt.plot (res.GRAY,res.DEPT,'k',lw=0.5)
plt.title('Machine Learning',fontsize=8)
plt.axis([0,2,top,bottom])
plt.xticks(fontsize=8)
plt.xlabel('RandomForest',fontsize=7)
plt.gca().invert_yaxis()
plt.gca().invert_xaxis()
plt.gca().yaxis.set_visible(False)
plt.grid(True)
plt.xlim(0, 255)
plt.hlines(y=3665.65, xmin=0, xmax=130)
plt.hlines(y=3889.5, xmin=0, xmax=130)
ct+=1
PAY_Gray_scale = res['RandomForest'].apply(lambda x: 1 if x<corte else 0)
plt.subplot(1,no_plots,ct)
plt.plot (res.DEPT,'c',lw=0.5)
plt.title('$Validations$',fontsize=8)
plt.fill_between(PAY_Gray_scale,res.DEPT, color='c', alpha=0.8)
plt.axis([0,0.001,top,bottom])
plt.xticks(fontsize=8)
plt.gca().invert_yaxis()
plt.gca().xaxis.set_visible(False)
plt.gca().yaxis.set_visible(False)
plt.grid(True)
plt.suptitle('Tinmiaq-2 Method Comparison')
plt.show()
# %%
plt.figure(figsize=(10,9))
plt.subplot(1,1,1)
plt.plot(res.GRAY, res['RandomForest'], 'ko')
plt.plot(res.GRAY, res.GRAY, 'r')
plt.xlim(0, 255)
plt.ylim(0, 255)
plt.xlabel('Valor en Escala de Gris Suavizado a res. de Registros',fontsize=17)
plt.ylabel('Predicción de Escala de Gris usando Random Forest',fontsize=17)
plt.show()
# %% Erro Calculation
# T2_x.PAY_poupon,T2_x.DEPTH
# T2_x.PAY_waxman
# T2_x.PAY_simandoux
def integrate(y_vals, h):
i = 1
total = y_vals[0] + y_vals[-1]
for y in y_vals[1:-1]:
if i % 2 == 0:
total += 2 * y
else:
total += 4 * y
i += 1
return total * (h / 3.0)
# %%
rmse = pd.DataFrame(columns=['Poupon', 'Waxman-Smits', 'Simandooux', 'Machine Learning'])
rmse['Poupon'] = mean_squared_error(y_test, y_pred_test, squared=False)
# %%
| [
"[email protected]"
] | |
49b0fd738807eeab31b75555caa2b91688fc10ca | 551b75f52d28c0b5c8944d808a361470e2602654 | /huaweicloud-sdk-apig/huaweicloudsdkapig/v2/model/list_apis_unbinded_to_request_throttling_policy_v2_request.py | 7718fef3150b580f16a91dad09b7bdf4ca4b3448 | [
"Apache-2.0"
] | permissive | wuchen-huawei/huaweicloud-sdk-python-v3 | 9d6597ce8ab666a9a297b3d936aeb85c55cf5877 | 3683d703f4320edb2b8516f36f16d485cff08fc2 | refs/heads/master | 2023-05-08T21:32:31.920300 | 2021-05-26T08:54:18 | 2021-05-26T08:54:18 | 370,898,764 | 0 | 0 | NOASSERTION | 2021-05-26T03:50:07 | 2021-05-26T03:50:07 | null | UTF-8 | Python | false | false | 8,683 | py | # coding: utf-8
import pprint
import re
import six
class ListApisUnbindedToRequestThrottlingPolicyV2Request:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'instance_id': 'str',
'throttle_id': 'str',
'env_id': 'str',
'group_id': 'str',
'api_id': 'str',
'api_name': 'str',
'offset': 'int',
'limit': 'int'
}
attribute_map = {
'instance_id': 'instance_id',
'throttle_id': 'throttle_id',
'env_id': 'env_id',
'group_id': 'group_id',
'api_id': 'api_id',
'api_name': 'api_name',
'offset': 'offset',
'limit': 'limit'
}
def __init__(self, instance_id=None, throttle_id=None, env_id=None, group_id=None, api_id=None, api_name=None, offset=None, limit=None):
"""ListApisUnbindedToRequestThrottlingPolicyV2Request - a model defined in huaweicloud sdk"""
self._instance_id = None
self._throttle_id = None
self._env_id = None
self._group_id = None
self._api_id = None
self._api_name = None
self._offset = None
self._limit = None
self.discriminator = None
self.instance_id = instance_id
self.throttle_id = throttle_id
if env_id is not None:
self.env_id = env_id
if group_id is not None:
self.group_id = group_id
if api_id is not None:
self.api_id = api_id
if api_name is not None:
self.api_name = api_name
if offset is not None:
self.offset = offset
if limit is not None:
self.limit = limit
@property
def instance_id(self):
"""Gets the instance_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
实例编号
:return: The instance_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
:rtype: str
"""
return self._instance_id
@instance_id.setter
def instance_id(self, instance_id):
"""Sets the instance_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
实例编号
:param instance_id: The instance_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
:type: str
"""
self._instance_id = instance_id
@property
def throttle_id(self):
"""Gets the throttle_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
流控策略编号
:return: The throttle_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
:rtype: str
"""
return self._throttle_id
@throttle_id.setter
def throttle_id(self, throttle_id):
"""Sets the throttle_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
流控策略编号
:param throttle_id: The throttle_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
:type: str
"""
self._throttle_id = throttle_id
@property
def env_id(self):
"""Gets the env_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
环境的ID
:return: The env_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
:rtype: str
"""
return self._env_id
@env_id.setter
def env_id(self, env_id):
"""Sets the env_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
环境的ID
:param env_id: The env_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
:type: str
"""
self._env_id = env_id
@property
def group_id(self):
"""Gets the group_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
API分组编号
:return: The group_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
:rtype: str
"""
return self._group_id
@group_id.setter
def group_id(self, group_id):
"""Sets the group_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
API分组编号
:param group_id: The group_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
:type: str
"""
self._group_id = group_id
@property
def api_id(self):
"""Gets the api_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
API编号
:return: The api_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
:rtype: str
"""
return self._api_id
@api_id.setter
def api_id(self, api_id):
"""Sets the api_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
API编号
:param api_id: The api_id of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
:type: str
"""
self._api_id = api_id
@property
def api_name(self):
"""Gets the api_name of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
API名称
:return: The api_name of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
:rtype: str
"""
return self._api_name
@api_name.setter
def api_name(self, api_name):
"""Sets the api_name of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
API名称
:param api_name: The api_name of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
:type: str
"""
self._api_name = api_name
@property
def offset(self):
"""Gets the offset of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
偏移量,表示从此偏移量开始查询,偏移量小于0时,自动转换为0
:return: The offset of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
:rtype: int
"""
return self._offset
@offset.setter
def offset(self, offset):
"""Sets the offset of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
偏移量,表示从此偏移量开始查询,偏移量小于0时,自动转换为0
:param offset: The offset of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
:type: int
"""
self._offset = offset
@property
def limit(self):
"""Gets the limit of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
每页显示的条目数量
:return: The limit of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
:rtype: int
"""
return self._limit
@limit.setter
def limit(self, limit):
"""Sets the limit of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
每页显示的条目数量
:param limit: The limit of this ListApisUnbindedToRequestThrottlingPolicyV2Request.
:type: int
"""
self._limit = limit
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ListApisUnbindedToRequestThrottlingPolicyV2Request):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
eb7d3fd6e473974c51a4200dcf634be0569c227d | ca75f7099b93d8083d5b2e9c6db2e8821e63f83b | /z2/part2/batch/jm/parser_errors_2/425622553.py | 8aea3ff937d9a8d93e8db7bfc24a45b988d36b5c | [
"MIT"
] | permissive | kozakusek/ipp-2020-testy | 210ed201eaea3c86933266bd57ee284c9fbc1b96 | 09aa008fa53d159672cc7cbf969a6b237e15a7b8 | refs/heads/master | 2022-10-04T18:55:37.875713 | 2020-06-09T21:15:37 | 2020-06-09T21:15:37 | 262,290,632 | 0 | 0 | MIT | 2020-06-09T21:15:38 | 2020-05-08T10:10:47 | C | UTF-8 | Python | false | false | 2,607 | py | from part1 import (
gamma_board,
gamma_busy_fields,
gamma_delete,
gamma_free_fields,
gamma_golden_move,
gamma_golden_possible,
gamma_move,
gamma_new,
)
"""
scenario: test_random_actions
uuid: 425622553
"""
"""
random actions, total chaos
"""
board = gamma_new(4, 4, 2, 7)
assert board is not None
assert gamma_move(board, 1, 0, 3) == 1
assert gamma_move(board, 1, 0, 3) == 0
assert gamma_move(board, 2, 2, 1) == 1
assert gamma_move(board, 1, 3, 3) == 1
assert gamma_free_fields(board, 1) == 13
assert gamma_move(board, 2, 1, 3) == 1
assert gamma_move(board, 1, 2, 3) == 1
assert gamma_free_fields(board, 1) == 11
assert gamma_move(board, 2, 2, 2) == 1
assert gamma_move(board, 1, 0, 0) == 1
assert gamma_move(board, 1, 1, 0) == 1
assert gamma_busy_fields(board, 1) == 5
assert gamma_move(board, 2, 2, 1) == 0
assert gamma_move(board, 2, 2, 2) == 0
assert gamma_move(board, 1, 0, 3) == 0
board424270140 = gamma_board(board)
assert board424270140 is not None
assert board424270140 == ("1211\n"
"..2.\n"
"..2.\n"
"11..\n")
del board424270140
board424270140 = None
assert gamma_move(board, 2, 3, 2) == 1
assert gamma_move(board, 2, 0, 1) == 1
assert gamma_busy_fields(board, 1) == 5
assert gamma_golden_move(board, 1, 1, 0) == 0
assert gamma_move(board, 2, 1, 2) == 1
assert gamma_move(board, 2, 2, 2) == 0
assert gamma_move(board, 2, 0, 3) == 0
assert gamma_busy_fields(board, 2) == 6
assert gamma_free_fields(board, 2) == 5
assert gamma_golden_possible(board, 2) == 1
assert gamma_move(board, 1, 3, 3) == 0
assert gamma_move(board, 1, 2, 0) == 1
assert gamma_move(board, 2, 2, 3) == 0
assert gamma_move(board, 1, 0, 3) == 0
assert gamma_move(board, 2, 1, 3) == 0
assert gamma_move(board, 2, 1, 1) == 1
assert gamma_move(board, 1, 1, 0) == 0
assert gamma_move(board, 1, 0, 0) == 0
assert gamma_move(board, 2, 1, 3) == 0
assert gamma_move(board, 2, 0, 1) == 0
assert gamma_move(board, 1, 0, 3) == 0
assert gamma_move(board, 2, 0, 3) == 0
assert gamma_move(board, 2, 2, 2) == 0
assert gamma_move(board, 1, 2, 0) == 0
assert gamma_move(board, 2, 1, 3) == 0
assert gamma_move(board, 1, 0, 3) == 0
assert gamma_move(board, 1, 3, 3) == 0
assert gamma_move(board, 2, 2, 0) == 0
assert gamma_move(board, 2, 0, 3) == 0
board885784383 = gamma_board(board)
assert board885784383 is not None
assert board885784383 == ("1211\n"
".222\n"
"222.\n"
"111.\n")
del board885784383
board885784383 = None
assert gamma_move(board, 1, 1, 3) == 0
assert gamma_busy_fields(board, 1) == 6
assert gamma_move(board, 2, 2, 0) == 0
gamma_delete(board)
| [
"[email protected]"
] | |
cbaf69e9724173c78d6bde16435635a8760e7ef2 | 0d803b9d03867c0827a10b155cdcdb7a4e693c9b | /geocoding.py | c34f7143dfdbc5d35f86d801bf542605f0eab510 | [] | no_license | toolness/nycdb-fun | a09a6906840c144eae17489db8231f85dba5e033 | c4b33160dfbcc45ab1d9cf6424054ed7668d82c5 | refs/heads/master | 2020-04-12T08:00:01.808139 | 2018-12-23T22:09:59 | 2018-12-23T22:09:59 | 162,378,287 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,953 | py | from typing import List, Optional
import logging
import pydantic
import requests
GEOCODING_SEARCH_URL = "https://geosearch.planninglabs.nyc/v1/search"
GEOCODING_TIMEOUT = 3
logger = logging.getLogger(__name__)
class FeatureGeometry(pydantic.BaseModel):
# This is generally "Point".
type: str
# The latitude and longitude.
coordinates: List[float]
class FeatureProperties(pydantic.BaseModel):
# The ZIP code, e.g. "11201".
postalcode: str
# The name, e.g. "666 FIFTH AVENUE".
name: str
# The region, e.g. "New York State".
region: str
# The locality, e.g. "New York".
locality: str
# The borough, e.g. "Manhattan"
borough: str
# e.g. "whosonfirst:borough:2"
borough_gid: str
# The full address, e.g. "666 FIFTH AVENUE, Manhattan, New York, NY, USA"
label: str
# The borough, block, lot number of the address, e.g. "3002920026".
pad_bbl: str
class Feature(pydantic.BaseModel):
# This is generally "Feature".
type: str
geometry: FeatureGeometry
properties: FeatureProperties
def search(text: str) -> Optional[List[Feature]]:
'''
Retrieves geo search results for the given search
criteria. For more details, see:
https://geosearch.planninglabs.nyc/docs/#search
If any errors occur, this function will log an
exception and return None.
'''
if not GEOCODING_SEARCH_URL:
# Geocoding is disabled.
return None
try:
response = requests.get(
GEOCODING_SEARCH_URL,
{'text': text},
timeout=GEOCODING_TIMEOUT
)
if response.status_code != 200:
raise Exception(f'Expected 200 response, got {response.status_code}')
return [Feature(**kwargs) for kwargs in response.json()['features']]
except Exception:
logger.exception(f'Error while retrieving data from {GEOCODING_SEARCH_URL}')
return None
| [
"[email protected]"
] | |
109fadc9340f4c94f4aabffe103bf717eb2211c7 | c1c47173f7291a3e436ac5b5a389f6a198a129ba | /monitoring/google/cloud/monitoring_v3/proto/dropped_labels_pb2.py | b7aab12e4ab09ca4b42375e34e8f5e7591c7c38a | [
"Apache-2.0"
] | permissive | di/google-cloud-python | 932c35146ff813d65b9deb5ee08f4600c0fbbc82 | a0bd8d0565e2a682760a113c59ce12b872bce9ab | refs/heads/master | 2020-04-06T19:44:39.937268 | 2018-11-15T03:26:34 | 2018-11-15T03:26:34 | 157,748,058 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | true | 5,624 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/monitoring_v3/proto/dropped_labels.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/cloud/monitoring_v3/proto/dropped_labels.proto',
package='google.monitoring.v3',
syntax='proto3',
serialized_pb=_b('\n5google/cloud/monitoring_v3/proto/dropped_labels.proto\x12\x14google.monitoring.v3\x1a\x1cgoogle/api/annotations.proto\"|\n\rDroppedLabels\x12=\n\x05label\x18\x01 \x03(\x0b\x32..google.monitoring.v3.DroppedLabels.LabelEntry\x1a,\n\nLabelEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x42@Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoringb\x06proto3')
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_DROPPEDLABELS_LABELENTRY = _descriptor.Descriptor(
name='LabelEntry',
full_name='google.monitoring.v3.DroppedLabels.LabelEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='google.monitoring.v3.DroppedLabels.LabelEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='google.monitoring.v3.DroppedLabels.LabelEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=_descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001')),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=189,
serialized_end=233,
)
_DROPPEDLABELS = _descriptor.Descriptor(
name='DroppedLabels',
full_name='google.monitoring.v3.DroppedLabels',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='label', full_name='google.monitoring.v3.DroppedLabels.label', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_DROPPEDLABELS_LABELENTRY, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=109,
serialized_end=233,
)
_DROPPEDLABELS_LABELENTRY.containing_type = _DROPPEDLABELS
_DROPPEDLABELS.fields_by_name['label'].message_type = _DROPPEDLABELS_LABELENTRY
DESCRIPTOR.message_types_by_name['DroppedLabels'] = _DROPPEDLABELS
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DroppedLabels = _reflection.GeneratedProtocolMessageType('DroppedLabels', (_message.Message,), dict(
LabelEntry = _reflection.GeneratedProtocolMessageType('LabelEntry', (_message.Message,), dict(
DESCRIPTOR = _DROPPEDLABELS_LABELENTRY,
__module__ = 'google.cloud.monitoring_v3.proto.dropped_labels_pb2'
# @@protoc_insertion_point(class_scope:google.monitoring.v3.DroppedLabels.LabelEntry)
))
,
DESCRIPTOR = _DROPPEDLABELS,
__module__ = 'google.cloud.monitoring_v3.proto.dropped_labels_pb2'
,
__doc__ = """A set of (label, value) pairs which were dropped during aggregation,
attached to google.api.Distribution.Exemplars in google.api.Distribution
values during aggregation.
These values are used in combination with the label values that remain
on the aggregated Distribution timeseries to construct the full label
set for the exemplar values. The resulting full label set may be used to
identify the specific task/job/instance (for example) which may be
contributing to a long-tail, while allowing the storage savings of only
storing aggregated distribution values for a large group.
Note that there are no guarantees on ordering of the labels from
exemplar-to-exemplar and from distribution-to-distribution in the same
stream, and there may be duplicates. It is up to clients to resolve any
ambiguities.
Attributes:
label:
Map from label to its value, for all labels dropped in any
aggregation.
""",
# @@protoc_insertion_point(class_scope:google.monitoring.v3.DroppedLabels)
))
_sym_db.RegisterMessage(DroppedLabels)
_sym_db.RegisterMessage(DroppedLabels.LabelEntry)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('Z>google.golang.org/genproto/googleapis/monitoring/v3;monitoring'))
_DROPPEDLABELS_LABELENTRY.has_options = True
_DROPPEDLABELS_LABELENTRY._options = _descriptor._ParseOptions(descriptor_pb2.MessageOptions(), _b('8\001'))
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
2c02a8ba8cb8fd6fdb3adaa229c5b39a866126a7 | 553766d4fd37aee9b737b1a28a050229b6d9e375 | /setup.py | 01a55bdf03211a50f3e04416915a5bf20f9949cd | [
"MIT"
] | permissive | NLHEALTHCARE/py-kerapu | 042825a7783b18bf65f024f4d9b617974388631f | 039d5ce0ed52ca62316180c942c9738c913a7f0a | refs/heads/master | 2021-01-18T23:39:45.709965 | 2017-02-03T13:41:41 | 2017-02-03T13:41:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,202 | py | from setuptools import setup, find_packages
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as handle:
long_description = handle.read()
setup(
name='Kerapu',
version='1.0.0',
description='Een implementatie van de grouper',
long_description=long_description,
url='https://github.com/SetBased/py-kerapu',
author='Paul Water',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: Dutch',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
keywords='DBC Grouper',
packages=find_packages(exclude=['build', 'test']),
entry_points={
'console_scripts': [
'kerapu = kerapu.application.kerapu:main',
],
},
install_requires=['lxml', 'cleo==0.5.0']
)
| [
"[email protected]"
] | |
5e929ab522e2992b332ec4de8073c0ef70793e0d | e247d9261676f257752c0c6beac161954137a81c | /src/0791.custom-sort-string/custom-sort-string.py | b0b141f7ad9a35f367de19bc1724b7a98f132ab1 | [
"MIT"
] | permissive | henrymorgen/Just-Code | 8fbbd8288b485372a44e10b0078b5edb8af61a3b | fa03ebb89edd8f2292de7c0644dbab88dc1d924c | refs/heads/master | 2022-10-19T05:59:53.134092 | 2020-06-10T02:26:43 | 2020-06-10T02:26:43 | 273,656,532 | 1 | 2 | MIT | 2020-06-20T07:02:38 | 2020-06-20T07:02:38 | null | UTF-8 | Python | false | false | 134 | py | class Solution:
def customSortString(self, S: str, T: str) -> str:
return ''.join(sorted(list(T), key=lambda x:S.find(x))) | [
"[email protected]"
] | |
8849114cecf22f40b1ff3abab1147e515485e339 | 3035e6a2b4e5b5662670c188785ed9fad0e1a315 | /Chapter07/example/python/permissions/can_create_asset.py | 6d52788dbc766c8ca272bec76a494ad1ef7423e4 | [
"MIT"
] | permissive | mahen92/Hyperledger-Cookbook | 52491da47ea7e4b3d988b1303ad4641d89bd3c0e | c2aaf9f9fd58757110a2a6b3ab7498da11fba254 | refs/heads/master | 2021-01-09T15:36:10.368893 | 2020-04-10T18:17:41 | 2020-04-10T18:17:41 | 242,358,174 | 0 | 0 | MIT | 2020-02-22T14:46:54 | 2020-02-22T14:46:53 | null | UTF-8 | Python | false | false | 1,270 | py | #
# Copyright Soramitsu Co., Ltd. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
#
import iroha
import commons
admin = commons.new_user('admin@test')
alice = commons.new_user('alice@test')
@commons.hex
def genesis_tx():
test_permissions = iroha.RolePermissionSet([iroha.Role_kCreateAsset])
tx = iroha.ModelTransactionBuilder() \
.createdTime(commons.now()) \
.creatorAccountId(admin['id']) \
.addPeer('0.0.0.0:50541', admin['key'].publicKey()) \
.createRole('admin_role', commons.all_permissions()) \
.createRole('test_role', test_permissions) \
.createDomain('test', 'test_role') \
.createAccount('admin', 'test', admin['key'].publicKey()) \
.createAccount('alice', 'test', alice['key'].publicKey()) \
.appendRole(admin['id'], 'admin_role') \
.build()
return iroha.ModelProtoTransaction(tx) \
.signAndAddSignature(admin['key']).finish()
@commons.hex
def create_asset_tx():
tx = iroha.ModelTransactionBuilder() \
.createdTime(commons.now()) \
.creatorAccountId(alice['id']) \
.createAsset('coin', 'test', 2) \
.build()
return iroha.ModelProtoTransaction(tx) \
.signAndAddSignature(alice['key']).finish()
| [
"[email protected]"
] | |
575579d1ff638a7b0cca8347ada4ef25a82fe6e2 | eb9c3dac0dca0ecd184df14b1fda62e61cc8c7d7 | /google/cloud/datacatalog/v1beta1/datacatalog-v1beta1-py/scripts/fixup_datacatalog_v1beta1_keywords.py | 5304bf50a8687ea3dba5a4c94271975dafc65f62 | [
"Apache-2.0"
] | permissive | Tryweirder/googleapis-gen | 2e5daf46574c3af3d448f1177eaebe809100c346 | 45d8e9377379f9d1d4e166e80415a8c1737f284d | refs/heads/master | 2023-04-05T06:30:04.726589 | 2021-04-13T23:35:20 | 2021-04-13T23:35:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,080 | py | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import os
import libcst as cst
import pathlib
import sys
from typing import (Any, Callable, Dict, List, Sequence, Tuple)
def partition(
predicate: Callable[[Any], bool],
iterator: Sequence[Any]
) -> Tuple[List[Any], List[Any]]:
"""A stable, out-of-place partition."""
results = ([], [])
for i in iterator:
results[int(predicate(i))].append(i)
# Returns trueList, falseList
return results[1], results[0]
class datacatalogCallTransformer(cst.CSTTransformer):
CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
'create_entry': ('parent', 'entry_id', 'entry', ),
'create_entry_group': ('parent', 'entry_group_id', 'entry_group', ),
'create_policy_tag': ('parent', 'policy_tag', ),
'create_tag': ('parent', 'tag', ),
'create_tag_template': ('parent', 'tag_template_id', 'tag_template', ),
'create_tag_template_field': ('parent', 'tag_template_field_id', 'tag_template_field', ),
'create_taxonomy': ('parent', 'taxonomy', ),
'delete_entry': ('name', ),
'delete_entry_group': ('name', 'force', ),
'delete_policy_tag': ('name', ),
'delete_tag': ('name', ),
'delete_tag_template': ('name', 'force', ),
'delete_tag_template_field': ('name', 'force', ),
'delete_taxonomy': ('name', ),
'export_taxonomies': ('parent', 'taxonomies', 'serialized_taxonomies', ),
'get_entry': ('name', ),
'get_entry_group': ('name', 'read_mask', ),
'get_iam_policy': ('resource', 'options_', ),
'get_policy_tag': ('name', ),
'get_tag_template': ('name', ),
'get_taxonomy': ('name', ),
'import_taxonomies': ('parent', 'inline_source', ),
'list_entries': ('parent', 'page_size', 'page_token', 'read_mask', ),
'list_entry_groups': ('parent', 'page_size', 'page_token', ),
'list_policy_tags': ('parent', 'page_size', 'page_token', ),
'list_tags': ('parent', 'page_size', 'page_token', ),
'list_taxonomies': ('parent', 'page_size', 'page_token', ),
'lookup_entry': ('linked_resource', 'sql_resource', ),
'rename_tag_template_field': ('name', 'new_tag_template_field_id', ),
'search_catalog': ('scope', 'query', 'page_size', 'page_token', 'order_by', ),
'set_iam_policy': ('resource', 'policy_', ),
'test_iam_permissions': ('resource', 'permissions', ),
'update_entry': ('entry', 'update_mask', ),
'update_entry_group': ('entry_group', 'update_mask', ),
'update_policy_tag': ('policy_tag', 'update_mask', ),
'update_tag': ('tag', 'update_mask', ),
'update_tag_template': ('tag_template', 'update_mask', ),
'update_tag_template_field': ('name', 'tag_template_field', 'update_mask', ),
'update_taxonomy': ('taxonomy', 'update_mask', ),
}
def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode:
try:
key = original.func.attr.value
kword_params = self.METHOD_TO_PARAMS[key]
except (AttributeError, KeyError):
# Either not a method from the API or too convoluted to be sure.
return updated
# If the existing code is valid, keyword args come after positional args.
# Therefore, all positional args must map to the first parameters.
args, kwargs = partition(lambda a: not bool(a.keyword), updated.args)
if any(k.keyword.value == "request" for k in kwargs):
# We've already fixed this file, don't fix it again.
return updated
kwargs, ctrl_kwargs = partition(
lambda a: not a.keyword.value in self.CTRL_PARAMS,
kwargs
)
args, ctrl_args = args[:len(kword_params)], args[len(kword_params):]
ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl))
for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS))
request_arg = cst.Arg(
value=cst.Dict([
cst.DictElement(
cst.SimpleString("'{}'".format(name)),
cst.Element(value=arg.value)
)
# Note: the args + kwargs looks silly, but keep in mind that
# the control parameters had to be stripped out, and that
# those could have been passed positionally or by keyword.
for name, arg in zip(kword_params, args + kwargs)]),
keyword=cst.Name("request")
)
return updated.with_changes(
args=[request_arg] + ctrl_kwargs
)
def fix_files(
in_dir: pathlib.Path,
out_dir: pathlib.Path,
*,
transformer=datacatalogCallTransformer(),
):
"""Duplicate the input dir to the output dir, fixing file method calls.
Preconditions:
* in_dir is a real directory
* out_dir is a real, empty directory
"""
pyfile_gen = (
pathlib.Path(os.path.join(root, f))
for root, _, files in os.walk(in_dir)
for f in files if os.path.splitext(f)[1] == ".py"
)
for fpath in pyfile_gen:
with open(fpath, 'r') as f:
src = f.read()
# Parse the code and insert method call fixes.
tree = cst.parse_module(src)
updated = tree.visit(transformer)
# Create the path and directory structure for the new file.
updated_path = out_dir.joinpath(fpath.relative_to(in_dir))
updated_path.parent.mkdir(parents=True, exist_ok=True)
# Generate the updated source file at the corresponding path.
with open(updated_path, 'w') as f:
f.write(updated.code)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="""Fix up source that uses the datacatalog client library.
The existing sources are NOT overwritten but are copied to output_dir with changes made.
Note: This tool operates at a best-effort level at converting positional
parameters in client method calls to keyword based parameters.
Cases where it WILL FAIL include
A) * or ** expansion in a method call.
B) Calls via function or method alias (includes free function calls)
C) Indirect or dispatched calls (e.g. the method is looked up dynamically)
These all constitute false negatives. The tool will also detect false
positives when an API method shares a name with another method.
""")
parser.add_argument(
'-d',
'--input-directory',
required=True,
dest='input_dir',
help='the input directory to walk for python files to fix up',
)
parser.add_argument(
'-o',
'--output-directory',
required=True,
dest='output_dir',
help='the directory to output files fixed via un-flattening',
)
args = parser.parse_args()
input_dir = pathlib.Path(args.input_dir)
output_dir = pathlib.Path(args.output_dir)
if not input_dir.is_dir():
print(
f"input directory '{input_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if not output_dir.is_dir():
print(
f"output directory '{output_dir}' does not exist or is not a directory",
file=sys.stderr,
)
sys.exit(-1)
if os.listdir(output_dir):
print(
f"output directory '{output_dir}' is not empty",
file=sys.stderr,
)
sys.exit(-1)
fix_files(input_dir, output_dir)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
49f314421b54272208c9d7dd1c1688e292aaa3f0 | 78dc15505e17cef3e49410bbadc1bb4812cdbbad | /foiamachine/local/apps/mail/admin.py | 8910bd8de33398c08778b64f72cb4ddbe6ebd319 | [
"MIT"
] | permissive | jgillum/foiamachine | 4a7e4ef9fec681341c014dbe7c98bbce79debb4e | 26d3b02870227696cdaab639c39d47b2a7a42ae5 | refs/heads/master | 2020-06-29T11:19:46.232758 | 2019-08-19T02:27:45 | 2019-08-19T02:27:45 | 200,519,075 | 3 | 1 | null | 2019-08-04T16:57:27 | 2019-08-04T16:57:27 | null | UTF-8 | Python | false | false | 53 | py | /home/foiamachine/repo/foiamachine/apps/mail/admin.py | [
"[email protected]"
] | |
dc78fd1c8a81852927824c45753dfe0703be82fe | de4d88db6ea32d20020c169f734edd4b95c3092d | /aiotdlib/api/types/chat_permissions.py | 50bb4223e13d4380851b065d1b4a787c41fb65f4 | [
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | thiagosm/aiotdlib | 5cc790a5645f7e4cc61bbd0791433ed182d69062 | 4528fcfca7c5c69b54a878ce6ce60e934a2dcc73 | refs/heads/main | 2023-08-15T05:16:28.436803 | 2021-10-18T20:41:27 | 2021-10-18T20:41:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,355 | py | # =============================================================================== #
# #
# This file has been generated automatically!! Do not change this manually! #
# #
# =============================================================================== #
from __future__ import annotations
from pydantic import Field
from ..base_object import BaseObject
class ChatPermissions(BaseObject):
"""
Describes actions that a user is allowed to take in a chat
:param can_send_messages: True, if the user can send text messages, contacts, locations, and venues
:type can_send_messages: :class:`bool`
:param can_send_media_messages: True, if the user can send audio files, documents, photos, videos, video notes, and voice notes. Implies can_send_messages permissions
:type can_send_media_messages: :class:`bool`
:param can_send_polls: True, if the user can send polls. Implies can_send_messages permissions
:type can_send_polls: :class:`bool`
:param can_send_other_messages: True, if the user can send animations, games, stickers, and dice and use inline bots. Implies can_send_messages permissions
:type can_send_other_messages: :class:`bool`
:param can_add_web_page_previews: True, if the user may add a web page preview to their messages. Implies can_send_messages permissions
:type can_add_web_page_previews: :class:`bool`
:param can_change_info: True, if the user can change the chat title, photo, and other settings
:type can_change_info: :class:`bool`
:param can_invite_users: True, if the user can invite new users to the chat
:type can_invite_users: :class:`bool`
:param can_pin_messages: True, if the user can pin messages
:type can_pin_messages: :class:`bool`
"""
ID: str = Field("chatPermissions", alias="@type")
can_send_messages: bool
can_send_media_messages: bool
can_send_polls: bool
can_send_other_messages: bool
can_add_web_page_previews: bool
can_change_info: bool
can_invite_users: bool
can_pin_messages: bool
@staticmethod
def read(q: dict) -> ChatPermissions:
return ChatPermissions.construct(**q)
| [
"[email protected]"
] | |
cab80e9366575b86ff4a1faca70c78fca028284b | 3f2c08c8081c6b515cad3ba1e625954ea7ae37a9 | /myScripts/Hot100/70.爬楼梯.py | b0f00f78a699570b50a0cb7cdbf2d005f99d793b | [] | no_license | voyagerw/exercise | 9fc526223b13aeaa83e29c202c543a09f788e0dc | 93dde8f18d22da90c1facde82717e2d02f8e8aa5 | refs/heads/master | 2023-02-28T17:10:41.515611 | 2021-02-05T08:56:18 | 2021-02-05T08:56:18 | 307,633,252 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 491 | py | class Solution:
def climbStairs(self, n: int) -> int:
# 方法:动态规划
# if n < 3: return n
# dp = [0] * (n + 1)
# dp[0], dp[1] = 1, 1
# for i in range(2, n + 1):
# dp[i] = dp[i - 1] + dp[i - 2]
# return dp[-1]
# 空间优化
if n < 3: return n
a, b = 1, 2
for i in range(3, n + 1):
c = a + b
a, b = b, c
return c
N = 3
print(Solution().climbStairs(N))
| [
"[email protected]"
] | |
efab979c2dee193bc56767c76327b898212519dc | 6c0553ee3af8796f7ecc16172120518325a80fbe | /fabfile.py | 7fd0b3683a1e0f61c32461b7541ef5cdb97a4427 | [] | no_license | INCF/nap | 090d7437b9bfd8ebfd3e898ff09a47c147e8150f | 3c9eb9b33dba8751b59677e0e09f8ec7583ea4a0 | refs/heads/master | 2016-09-06T12:45:04.465197 | 2013-03-27T00:23:30 | 2013-03-27T00:23:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | true | false | 9,647 | py | from __future__ import with_statement
from fabric.api import *
from fabric.contrib.console import confirm
from fabric.contrib.files import exists
import boto
from boto.s3.key import Key
from boto import ec2
from datetime import datetime
import sys, pprint, time, ConfigParser
from ConfigParser import SafeConfigParser
parser = SafeConfigParser()
parser.read('configure.ini')
ec2keypairname = parser.get('aws', 'ec2keypairname')
localkeypath = parser.get('aws', 'localkeypath')
aws_access_key_id = parser.get('aws', 'aws_access_key_id')
aws_secret_access_key =parser.get('aws', 'aws_secret_access_key')
nitrc_ce_ami = 'ami-83921eea'
precise_12_04_2 = 'ami-de0d9eb7'
volid = 'some predefined volume id'
def last():
from ConfigParser import SafeConfigParser
parser = SafeConfigParser()
parser.read('lastLaunch.ini')
env.user = parser.get('lastLaunch', 'username')
env.host_string = parser.get('lastLaunch', 'host_string')
env.key_filename = [parser.get('lastLaunch', 'keypath')]
env.last_instance = parser.get('lastLaunch', 'instance')
def printssh():
print 'ssh -i ~/.ssh/%s ubuntu@%s' % (ec2keypairname, env.host_string)
local('echo "ssh -i ~/.ssh/ec2-keypair ubuntu@%s" | pbcopy ' % (env.host_string))
def printhttp():
print 'http://%s' % (env.host_string)
local('echo "http://%s" | pbcopy ' % (env.host_string))
def terminate():
#terminate_instances
with settings(warn_only = True):
print 'killing last instance'
conn = ec2.EC2Connection(aws_access_key_id, aws_secret_access_key)
conn.terminate_instances(env.last_instance)
time.sleep(1)
def test():
run('uname -a')
run('lsb_release -a')
def createXL():
_create('m3.xlarge')
def createL():
_create('m1.large')
def createCustom():
'''usage: fab --set ec2=m2.xlarge createCustom '''
_create(env.ec2)
def createS():
_create('m1.small')
def createXXL():
_create('m2.2xlarge')
def _create(size):
'''Creates a new large instance on ec2'''
with settings(warn_only = True):
conn = ec2.EC2Connection(aws_access_key_id, aws_secret_access_key)
time.sleep(1)
reservation = conn.run_instances(nitrc_ce_ami, instance_type=size, placement='us-east-1d', key_name=ec2keypairname, security_groups=['irods-web', 'default'])
time.sleep(1)
instance = reservation.instances[0]
time.sleep(1)
print 'Starting instance %s' %(instance)
while not instance.update() == 'running':
time.sleep(1)
instance.add_tag('Name', 'ipython-deploy')
time.sleep(1)
print 'Instance started: %s' % instance.__dict__['id']
print 'Private DNS: %s' % instance.__dict__['private_dns_name']
print 'Private IP: %s' % instance.__dict__['private_ip_address']
print 'Public DNS: %s' % instance.__dict__['public_dns_name']
# write temporary settings in case something goes wrong mid-configuration
import ConfigParser
import sys
parser = ConfigParser.SafeConfigParser()
parser.add_section('lastLaunch')
parser.set('lastLaunch', 'host_string', str(instance.__dict__['public_dns_name']))
parser.set('lastLaunch', 'keypath', localkeypath)
parser.set('lastLaunch', 'username', 'ubuntu')
parser.set('lastLaunch', 'instance', instance.__dict__['id'])
parser.write(open('lastLaunch.ini', 'w'))
env.user = 'ubuntu'
env.host_string = instance.__dict__['public_dns_name']
env.key_filename = [localkeypath]
print 'Instance has been launched successfully'
print 'To access, open a browser to http://%s' % (instance.__dict__['public_dns_name'])
print 'ssh -i ~/.ssh/ec2-keypair ubuntu@%s' % (instance.__dict__['public_dns_name'])
def install():
with settings(warn_only=True):
# _enableNeuroDebian()
_base()
_provision()
_externals()
def nap():
with settings(warn_only=True):
_notebook()
# get the most recent version, or clone to directory
if exists('nap'):
with cd('nap'):
run('git pull --rebase')
else:
run('git clone [email protected]:INCF/nap.git')
run('git config --global user.name "richstoner"')
run('git config --global user.email "[email protected]"')
def mountstatus():
with settings(warn_only=True):
v_to_mount = ''
conn = ec2.EC2Connection(aws_access_key_id, aws_secret_access_key)
vol = conn.get_all_volumes()
for v in vol:
if v.id == volid:
v_to_mount = v
if v_to_mount.attachment_state() == None:
print 'Volume not attached'
else:
print 'Volume attached with status: %s' % v_to_mount.attachment_state()
def attachebs():
with settings(warn_only=True):
v_to_mount = ''
conn = ec2.EC2Connection(aws_access_key_id, aws_secret_access_key)
vol = conn.get_all_volumes()
for v in vol:
if v.id == volid:
v_to_mount = v
print 'trying to attach volume %s to instance %s' % (v_to_mount, env.last_instance)
if v_to_mount.attachment_state() == None:
print 'volume not attached, continuing'
result = v_to_mount.attach(env.last_instance, '/dev/xvdf')
else:
print v_to_mount.attachment_state()
def mountebs():
with settings(warn_only=True):
if not exists('/vol'):
sudo('mkdir -m 000 /vol')
sudo('mount /dev/xvdf /vol')
# # sudo mkfs.ext4 /dev/xvdf
def unmountebs():
with settings(warn_only=True):
sudo('umount /dev/xvdf')
v_to_unmount = ''
conn = ec2.EC2Connection(aws_access_key_id, aws_secret_access_key)
vol = conn.get_all_volumes()
for v in vol:
if v.id == volid:
v_to_unmount = v
result = v_to_unmount.detach(force=True)
if result == True:
print 'volume detached successfully'
else:
print 'volume not attached successfully'
print v_to_unmount.attachment_state()
def _base():
'''[create] Basic packages for building, version control'''
with settings(warn_only=True):
sudo('service apache2 stop')
# update existing tools
run("sudo apt-get -y update", pty = True)
# run("sudo apt-get -y upgrade", pty = True)
# install build and CVS tools
packagelist = ' '.join(['git-core', 'mercurial', 'subversion', 'unzip', 'build-essential', 'g++', 'libav-tools', 'uuid-dev', 'libfreetype6-dev','libpng12-dev'])
run('sudo apt-get -y install %s' % packagelist, pty = True)
# install python components
packagelist = ' '.join(['python-setuptools', 'python-pip', 'python-dev', 'python-lxml', 'libxml2-dev', 'python-imaging', 'libncurses5-dev', 'cmake-curses-gui', 'imagemagick', 's3cmd'])
run('sudo apt-get -y install %s' % packagelist, pty = True)
packagelist = ['tornado', 'supervisor', 'virtualenv', 'jinja2']
for each_package in packagelist:
print each_package
run('sudo pip install %s' % each_package, pty = True)
def _provision():
'''[create] configure base directories'''
with settings(warn_only=True):
run('mkdir ~/internal')
run('mkdir ~/external')
run('mkdir ~/config')
# Adds neurodebian support to the instance
def _enableNeuroDebian():
'''[create] Configures the ubuntu base to use the neurodebian repository (10.04LTS)'''
with settings(warn_only=True):
run('sudo apt-key adv --recv-keys --keyserver pgp.mit.edu 2649A5A9')
run('wget -O- http://neuro.debian.net/lists/precise.us-nh | sudo tee /etc/apt/sources.list.d/neurodebian.sources.list')
def _externals():
'''[create] some external dependencies'''
with settings(warn_only=True):
with cd('external'):
run('git clone https://github.com/ipython/ipython.git')
with cd('ipython'):
run('python setup.py build')
sudo('python setup.py install')
sudo('apt-get build-dep -y python-numpy python-scipy')
sudo('pip install cython')
sudo('pip install -U numpy')
sudo('pip install -U scipy')
sudo('pip install git+git://github.com/scikit-image/scikit-image.git')
# run('wget http://sourceforge.net/projects/fiji-bi/files/fiji/Madison/fiji-linux64-20110307.tar.bz2/download')
# run('mv download fiji.tar.bz2')
# run('tar xvjf fiji.tar.bz2')
# run('wget http://sourceforge.net/projects/itk/files/itk/3.20/InsightToolkit-3.20.1.tar.gz/download')
# run('mv download itk32.tar.gz')
# run('tar xvzf itk32.tar.gz')
def _notebook():
'''install python notebook'''
with settings(warn_only=True):
put('id_rsa.pub','~/.ssh/id_rsa.pub')
put('id_rsa', '~/.ssh/id_rsa')
sudo('chmod 0600 .ssh/id_rsa')
sudo('chmod 0600 .ssh/id_rsa.pub')
# if exists('ipynb'):
# with cd('ipynb'):
# run('git pull --rebase')
# else:
# run('git clone [email protected]:richstoner/ipynb.git')
sudo('easy_install readline')
sudo('apt-get install -y libfreetype6-dev libpng12-dev python-matplotlib')
sudo('pip install -U requests')
sudo('pip install -U beautifulsoup4')
sudo('pip install pyzmq')
sudo('pip install workerpool')
# sudo('pip install -U matplotlib')
run('ipython profile create default')
# put('install_mathjax.py')
# sudo('ipython install_mathjax.py')
# run('rm install_mathjax.py')
# run('ipython profile create nbserver')
# run("rm -rvf ~/.ipython/profile_nbserver")
# put('profile_nbserver.zip', '.ipython/profile_nbserver.zip')
# with cd('.ipython'):
# run('unzip profile_nbserver.zip')
# run('rm profile_nbserver.zip')
put('supervisord.conf.ipython')
sudo('mv supervisord.conf.ipython /home/ubuntu/config/supervisord.conf')
sudo('rm /etc/supervisord.conf')
sudo('ln -s /home/ubuntu/config/supervisord.conf /etc/supervisord.conf')
put('supervisor.start')
sudo('supervisord')
sudo('chmod +x supervisor.start')
sudo('chown root:root supervisor.start')
sudo('mv /home/ubuntu/supervisor.start /etc/init.d/supervisor')
sudo('update-rc.d -f supervisor remove')
sudo('update-rc.d supervisor defaults')
sudo('supervisorctl restart all')
| [
"[email protected]"
] | |
23b4c0f2721a2f3c299f892b7d0e0f7bedd11fc7 | 779d80920d42732d29ebaf253533606208fcb1fc | /metaci/notification/migrations/0003_auto_20170111_2103.py | 9310726f98660a11dd464da9482c83e1a0b9fd55 | [
"BSD-3-Clause"
] | permissive | sebastianocostanzo/MetaCI | 39a34200c3b2139399e6aa0e4f464aba4667c457 | a880a8b1caa7cf1445f220b6c2e4f83fe8d38312 | refs/heads/master | 2021-09-10T06:36:50.906130 | 2018-03-21T17:09:34 | 2018-03-21T17:09:34 | 125,919,095 | 0 | 0 | BSD-3-Clause | 2018-03-21T17:09:35 | 2018-03-19T20:54:27 | Python | UTF-8 | Python | false | false | 738 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-01-11 21:03
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('notification', '0002_planbranchnotification'),
]
operations = [
migrations.RemoveField(
model_name='planbranchnotification',
name='branch',
),
migrations.RemoveField(
model_name='planbranchnotification',
name='plan',
),
migrations.RemoveField(
model_name='planbranchnotification',
name='user',
),
migrations.DeleteModel(
name='PlanBranchNotification',
),
]
| [
"[email protected]"
] | |
4839b810c501483362679a841ded6fcd4c6624ad | 60d5b5b1f1c912d1655de3884efc09dfddd8d132 | /sites/vras/common/translations.py | 70e11d11a999a6c76b40a314d3eb36a321fd7efb | [] | no_license | alexgula/django_sites | 15033c739401f24603e957c5a034d63652f0d21f | 038834c0f544d6997613d61d593a7d5abf673c70 | refs/heads/master | 2016-09-05T11:02:43.838095 | 2014-07-07T11:36:07 | 2014-07-07T11:36:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 491 | py | # coding=utf-8
from modeltranslation.translator import translator, TranslationOptions
from content.models import News, StaticPage
from catalog.models import Category
def register_model(model_class, *model_fields):
class ModelTranslationOptions(TranslationOptions):
fields = model_fields
translator.register(model_class, ModelTranslationOptions)
register_model(News, 'title', 'desc')
register_model(StaticPage, 'title', 'desc')
register_model(Category, 'title', 'desc')
| [
"[email protected]"
] | |
f744461aca6b0ac3bf9429e6a521ed82648f0e9a | 08c01544e8ca95761d55ab307c00cd9cd3290e30 | /fofa_sprider.py | dc29a97b778b26cc023bb722593bf49bc1564866 | [] | no_license | deepwebhacker/fofa_sprider | 40691b83d02221225b65fb4086368f4e585c9622 | 490e1001d0acf6964f5b060fa5c408f71d82f4a8 | refs/heads/master | 2022-10-24T06:49:12.972731 | 2020-06-19T04:25:13 | 2020-06-19T04:25:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,503 | py | import requests
import re
import base64
import urllib.parse
import warnings
from config import headers
import time
def waf():
print(" _____ _____ _____ ")
print(" /\ \ /\ \ /\ \ ")
print(" /::\____\ /::\ \ /::\ \ ")
print(" /:::/ / /::::\ \ /::::\ \ ")
print(" /:::/ _/___ /::::::\ \ /::::::\ \ ")
print(" /:::/ /\ \ /:::/\:::\ \ /:::/\:::\ \ ")
print(" /:::/ /::\____\ /:::/__\:::\ \ /:::/__\:::\ \ ")
print(" /:::/ /:::/ / /::::\ \:::\ \ /::::\ \:::\ \ ")
print(" /:::/ /:::/ _/___ /::::::\ \:::\ \ /::::::\ \:::\ \ ")
print(" /:::/___/:::/ /\ \ /:::/\:::\ \:::\ \ /:::/\:::\ \:::\ \ ")
print("|:::| /:::/ /::\____\/:::/ \:::\ \:::\____\/:::/ \:::\ \:::\____\ ")
print("|:::|__/:::/ /:::/ /\::/ \:::\ /:::/ /\::/ \:::\ \::/ / ")
print(" \:::\/:::/ /:::/ / \/____/ \:::\/:::/ / \/____/ \:::\ \/____/ ")
print(" \::::::/ /:::/ / \::::::/ / \:::\ \ ")
print(" \::::/___/:::/ / \::::/ / \:::\____\ ")
print(" \:::\__/:::/ / /:::/ / \::/ / ")
print(" \::::::::/ / /:::/ / \/____/ ")
print(" \::::::/ / /:::/ / ")
print(" \::::/ / /:::/ / ")
print(" \::/____/ \::/ / ")
print(" -- \/____/ ")
print(" ")
print(" --WAF 2.1 ")
headers = headers
warnings.filterwarnings("ignore")
def fofa1_request(url, headers):
with open("fofa.txt", 'a+') as f:
res = requests.get(url=url, headers=headers).text.encode('utf-8').decode('unicode_escape')
time.sleep(4)
res1 = re.compile('<a target="_blank" href="(https://|http://)(.*?)"', re.S)
res2 = res1.findall(res)
for i in res2:
if "gov" not in i:
f.write((i[0] + i[1]) + "\n")
f.flush()
else:
pass
f.close()
def page_numbers(pagenumbers1, pagenumbers2, arg):
for i in range(int(pagenumbers1), int(pagenumbers2) + 1):
url = "https://fofa.so/result?q=" + str(s) + "&page=" + str(i) + "&qbase64=" + str(arg)
fofa1_request(url, headers)
print("第{0}页以成功爬取完".format(i))
if __name__ == "__main__":
waf()
pagenumbers1 = input("请输入抓取起始页:")
pagenumbers2 = input("请输入抓取结尾页:")
f = input("请输入关键词:").encode('utf-8')
print("等待吧,心急吃不了热豆腐....")
arg = str(base64.b64encode(f), "utf-8").replace('+', '%2B')
s = urllib.parse.quote(f)
page_numbers(pagenumbers1, pagenumbers2, arg)
print("抓取成功")
| [
"[email protected]"
] | |
b8804d17881d4e9ba28377cbca07a896bef12aea | b5767cdf805cf3a6ed0b5ceb2745e27de2781f35 | /tensor2tensor/data_generators/wmt.py | 0be28ab732f8c081475238e8c07bccc0752ac499 | [
"Apache-2.0"
] | permissive | montecarlo1/tensor2tensor | a903785fd2ec2a7a637ba4c95507a53b892f382d | 2e85c3fc87672b65a149ba10293cd3a502e99cfa | refs/heads/master | 2020-05-30T17:17:19.913994 | 2017-06-25T22:18:38 | 2017-06-25T22:18:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,209 | py | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Data generators for WMT data-sets."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tarfile
# Dependency imports
from tensor2tensor.data_generators import generator_utils
from tensor2tensor.data_generators import text_encoder
import tensorflow as tf
def character_generator(source_path, target_path, eos=None):
"""Generator for sequence-to-sequence tasks that just uses characters.
This generator assumes the files at source_path and target_path have
the same number of lines and yields dictionaries of "inputs" and "targets"
where inputs are characters from the source lines converted to integers,
and targets are characters from the target lines, also converted to integers.
Args:
source_path: path to the file with source sentences.
target_path: path to the file with target sentences.
eos: integer to append at the end of each sequence (default: None).
Yields:
A dictionary {"inputs": source-line, "targets": target-line} where
the lines are integer lists converted from characters in the file lines.
"""
eos_list = [] if eos is None else [eos]
with tf.gfile.GFile(source_path, mode="r") as source_file:
with tf.gfile.GFile(target_path, mode="r") as target_file:
source, target = source_file.readline(), target_file.readline()
while source and target:
source_ints = [ord(c) for c in source.strip()] + eos_list
target_ints = [ord(c) for c in target.strip()] + eos_list
yield {"inputs": source_ints, "targets": target_ints}
source, target = source_file.readline(), target_file.readline()
def token_generator(source_path, target_path, token_vocab, eos=None):
"""Generator for sequence-to-sequence tasks that uses tokens.
This generator assumes the files at source_path and target_path have
the same number of lines and yields dictionaries of "inputs" and "targets"
where inputs are token ids from the " "-split source (and target, resp.) lines
converted to integers using the token_map.
Args:
source_path: path to the file with source sentences.
target_path: path to the file with target sentences.
token_vocab: text_encoder.TextEncoder object.
eos: integer to append at the end of each sequence (default: None).
Yields:
A dictionary {"inputs": source-line, "targets": target-line} where
the lines are integer lists converted from tokens in the file lines.
"""
eos_list = [] if eos is None else [eos]
with tf.gfile.GFile(source_path, mode="r") as source_file:
with tf.gfile.GFile(target_path, mode="r") as target_file:
source, target = source_file.readline(), target_file.readline()
while source and target:
source_ints = token_vocab.encode(source.strip()) + eos_list
target_ints = token_vocab.encode(target.strip()) + eos_list
yield {"inputs": source_ints, "targets": target_ints}
source, target = source_file.readline(), target_file.readline()
def _get_wmt_ende_dataset(directory, filename):
"""Extract the WMT en-de corpus `filename` to directory unless it's there."""
train_path = os.path.join(directory, filename)
if not (tf.gfile.Exists(train_path + ".de") and
tf.gfile.Exists(train_path + ".en")):
# We expect that this file has been downloaded from:
# https://drive.google.com/open?id=0B_bZck-ksdkpM25jRUN2X2UxMm8 and placed
# in `directory`.
corpus_file = os.path.join(directory, "wmt16_en_de.tar.gz")
with tarfile.open(corpus_file, "r:gz") as corpus_tar:
corpus_tar.extractall(directory)
return train_path
def ende_bpe_token_generator(tmp_dir, train):
"""Instance of token generator for the WMT en->de task, training set."""
dataset_path = ("train.tok.clean.bpe.32000"
if train else "newstest2013.tok.bpe.32000")
train_path = _get_wmt_ende_dataset(tmp_dir, dataset_path)
token_path = os.path.join(tmp_dir, "vocab.bpe.32000")
token_vocab = text_encoder.TokenTextEncoder(vocab_filename=token_path)
return token_generator(train_path + ".en", train_path + ".de", token_vocab, 1)
_ENDE_TRAIN_DATASETS = [
[
"http://data.statmt.org/wmt16/translation-task/training-parallel-nc-v11.tgz", # pylint: disable=line-too-long
("training-parallel-nc-v11/news-commentary-v11.de-en.en",
"training-parallel-nc-v11/news-commentary-v11.de-en.de")
],
[
"http://www.statmt.org/wmt13/training-parallel-commoncrawl.tgz",
("commoncrawl.de-en.en", "commoncrawl.de-en.de")
],
[
"http://www.statmt.org/wmt13/training-parallel-europarl-v7.tgz",
("training/europarl-v7.de-en.en", "training/europarl-v7.de-en.de")
],
]
_ENDE_TEST_DATASETS = [
[
"http://data.statmt.org/wmt16/translation-task/dev.tgz",
("dev/newstest2013.en", "dev/newstest2013.de")
],
]
_ENFR_TRAIN_DATASETS = [
[
"http://www.statmt.org/wmt13/training-parallel-commoncrawl.tgz",
("commoncrawl.fr-en.en", "commoncrawl.fr-en.fr")
],
[
"http://www.statmt.org/wmt13/training-parallel-europarl-v7.tgz",
("training/europarl-v7.fr-en.en", "training/europarl-v7.fr-en.fr")
],
[
"http://www.statmt.org/wmt14/training-parallel-nc-v9.tgz",
("training/news-commentary-v9.fr-en.en",
"training/news-commentary-v9.fr-en.fr")
],
[
"http://www.statmt.org/wmt10/training-giga-fren.tar",
("giga-fren.release2.fixed.en.gz", "giga-fren.release2.fixed.fr.gz")
],
[
"http://www.statmt.org/wmt13/training-parallel-un.tgz",
("un/undoc.2000.fr-en.en", "un/undoc.2000.fr-en.fr")
],
]
_ENFR_TEST_DATASETS = [
[
"http://data.statmt.org/wmt16/translation-task/dev.tgz",
("dev/newstest2013.en", "dev/newstest2013.fr")
],
]
def _compile_data(tmp_dir, datasets, filename):
"""Concatenate all `datasets` and save to `filename`."""
filename = os.path.join(tmp_dir, filename)
lang1_lines, lang2_lines = [], []
for dataset in datasets:
url = dataset[0]
compressed_filename = os.path.basename(url)
compressed_filepath = os.path.join(tmp_dir, compressed_filename)
lang1_filename, lang2_filename = dataset[1]
lang1_filepath = os.path.join(tmp_dir, lang1_filename)
lang2_filepath = os.path.join(tmp_dir, lang2_filename)
if not os.path.exists(compressed_filepath):
generator_utils.maybe_download(tmp_dir, compressed_filename, url)
if not os.path.exists(lang1_filepath) or not os.path.exists(lang2_filepath):
mode = "r:gz" if "gz" in compressed_filepath else "r"
with tarfile.open(compressed_filepath, mode) as corpus_tar:
corpus_tar.extractall(tmp_dir)
if ".gz" in lang1_filepath:
new_filepath = lang1_filepath.strip(".gz")
generator_utils.gunzip_file(lang1_filepath, new_filepath)
lang1_filepath = new_filepath
if ".gz" in lang2_filepath:
new_filepath = lang2_filepath.strip(".gz")
generator_utils.gunzip_file(lang2_filepath, new_filepath)
lang2_filepath = new_filepath
with tf.gfile.GFile(lang1_filepath, mode="r") as lang1_file:
with tf.gfile.GFile(lang2_filepath, mode="r") as lang2_file:
lang1_file_lines = lang1_file.readlines()
lang2_file_lines = lang2_file.readlines()
assert len(lang1_file_lines) == len(lang2_file_lines), lang1_filepath
lang1_lines.extend(lang1_file_lines)
lang2_lines.extend(lang2_file_lines)
write_chunk_size = 10000
assert len(lang1_lines) == len(lang2_lines)
with tf.gfile.GFile(filename + ".lang1", mode="w") as lang1_file:
i = 0
while i <= len(lang1_lines):
for line in lang1_lines[i * write_chunk_size:(i + 1) * write_chunk_size]:
lang1_file.write(line)
i += 1
for line in lang1_lines[i * write_chunk_size:]:
lang1_file.write(line)
with tf.gfile.GFile(filename + ".lang2", mode="w") as lang2_file:
i = 0
while i <= len(lang2_lines):
for line in lang2_lines[i * write_chunk_size:(i + 1) * write_chunk_size]:
lang2_file.write(line)
i += 1
for line in lang2_lines[i * write_chunk_size:]:
lang2_file.write(line)
return filename
def ende_wordpiece_token_generator(tmp_dir, train, vocab_size):
symbolizer_vocab = generator_utils.get_or_generate_vocab(
tmp_dir, "tokens.vocab.%d" % vocab_size, vocab_size)
datasets = _ENDE_TRAIN_DATASETS if train else _ENDE_TEST_DATASETS
tag = "train" if train else "dev"
data_path = _compile_data(tmp_dir, datasets, "wmt_ende_tok_%s" % tag)
return token_generator(data_path + ".lang1", data_path + ".lang2",
symbolizer_vocab, 1)
def ende_character_generator(tmp_dir, train):
datasets = _ENDE_TRAIN_DATASETS if train else _ENDE_TEST_DATASETS
tag = "train" if train else "dev"
data_path = _compile_data(tmp_dir, datasets, "wmt_ende_chr_%s" % tag)
return character_generator(data_path + ".lang1", data_path + ".lang2", 1)
def enfr_wordpiece_token_generator(tmp_dir, train, vocab_size):
"""Instance of token generator for the WMT en->fr task."""
symbolizer_vocab = generator_utils.get_or_generate_vocab(
tmp_dir, "tokens.vocab.%d" % vocab_size, vocab_size)
datasets = _ENFR_TRAIN_DATASETS if train else _ENFR_TEST_DATASETS
tag = "train" if train else "dev"
data_path = _compile_data(tmp_dir, datasets, "wmt_enfr_tok_%s" % tag)
return token_generator(data_path + ".lang1", data_path + ".lang2",
symbolizer_vocab, 1)
def enfr_character_generator(tmp_dir, train):
"""Instance of character generator for the WMT en->fr task."""
datasets = _ENFR_TRAIN_DATASETS if train else _ENFR_TEST_DATASETS
tag = "train" if train else "dev"
data_path = _compile_data(tmp_dir, datasets, "wmt_enfr_chr_%s" % tag)
return character_generator(data_path + ".lang1", data_path + ".lang2", 1)
def parsing_character_generator(tmp_dir, train):
filename = "parsing_%s" % ("train" if train else "dev")
text_filepath = os.path.join(tmp_dir, filename + ".text")
tags_filepath = os.path.join(tmp_dir, filename + ".tags")
return character_generator(text_filepath, tags_filepath, 1)
def parsing_token_generator(tmp_dir, train, vocab_size):
symbolizer_vocab = generator_utils.get_or_generate_vocab(
tmp_dir, "tokens.vocab.%d" % vocab_size, vocab_size)
filename = "parsing_%s" % ("train" if train else "dev")
text_filepath = os.path.join(tmp_dir, filename + ".text")
tags_filepath = os.path.join(tmp_dir, filename + ".tags")
return token_generator(text_filepath, tags_filepath, symbolizer_vocab, 1)
| [
"[email protected]"
] | |
568d6cf311f2430409734548f041a30ba662a189 | 5679731cee36c537615d285ed72810f4c6b17380 | /350_IntersectionOfTwoArraysII.py | 02a7896847df48b7b66c3ed30aca321308aa092a | [] | no_license | manofmountain/LeetCode | 6b76105190a9b62df65a7b56b6def4120498b9fa | 718f688b3d316e8c10ef680d9c21ecd518d062f8 | refs/heads/master | 2021-01-12T03:41:48.318116 | 2017-07-18T12:35:58 | 2017-07-18T12:35:58 | 78,252,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 707 | py | # 77.2%
class Solution(object):
def intersect(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
import collections
c = collections.Counter(nums1)
res = list()
for num in nums2:
if num in c and c[num] > 0:
c[num] -= 1
res.append(num)
return res
# A solution from LeetCode and pretty clear
from collections import Counter
class Solution(object):
def intersect(self, nums1, nums2):
c1, c2 = Counter(nums1), Counter(nums2)
return sum([[num] * min(c1[num], c2[num]) for num in c1 & c2], []) | [
"[email protected]"
] | |
bebefcb0eccc61a519ee015e2757f9f92bb2e39e | a0f719fa9f24917619e19a10574c21cc1c621d9a | /socialapp/migrations/0008_auto_20201122_1917.py | 91f47c68a69570a0c29312eefe315ae0cd0ac46c | [
"MIT"
] | permissive | felkiriinya/Instagram-Clone | d0a83809989f489a10e19909843b6f8c1d9b15b2 | 614c523db6cd5dceda6d5959da0ec4befbd2c1a0 | refs/heads/master | 2023-01-22T12:58:57.202168 | 2020-11-23T16:15:44 | 2020-11-23T16:15:44 | 314,453,966 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 553 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-11-22 16:17
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('socialapp', '0007_image_user'),
]
operations = [
migrations.AlterField(
model_name='image',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='posts', to='socialapp.Profile'),
),
]
| [
"[email protected]"
] | |
d412d011285db5a9ed022d53ce535bdc13bdf3c5 | 6e177f4b81d511c0592171a685e8fcbf6b0677fa | /aggregators/best_response_linear.py | 2fdc689e0739391de2bd1abe271d5e0188a7f8a6 | [] | no_license | nickkeesG/market_aggregation | a7f2474706e54b19918a6e442fa11efbd87a4ebb | 38f6cf6959657e45d942365a8d8aa0f38a0af5e3 | refs/heads/main | 2023-05-24T06:55:25.207281 | 2021-06-16T11:20:50 | 2021-06-16T11:20:50 | 374,623,000 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 810 | py | import math
def get_best_response_lin(p_hat, q_hat, belief, endowment, policy):
#The math below assumes that the agent initially prefers alpha securities. If that is not the case, the math is very similar, but it's just easier to exploit the symmetry of the securities and solve assuming it prefers alpha securities
initial_price = p_hat / (p_hat + q_hat)
if not belief >= initial_price:
s_b, s_a = get_best_response_lin(q_hat, p_hat, (1-belief), endowment, policy)
return s_a, s_b
Z = math.sqrt((belief/(1-belief)) * (p_hat / q_hat))
p = Z / (Z + 1)
s_min = (Z*q_hat - p_hat)/endowment
s_a = s_min + (1-s_min)*p*policy
s_b = (1-s_min)*(1-p)*policy
s_a = max(0,s_a)
s_b = max(0,s_b)
s_a = min(1,s_a)
s_b = min(1,s_b)
return s_a, s_b
| [
"[email protected]"
] | |
ec6bf45fc1685e30a4ce70ba73698e47ae85cedc | b8faf65ea23a2d8b119b9522a0aa182e9f51d8b1 | /vmraid/patches/v10_0/remove_custom_field_for_disabled_domain.py | 1e98c2e7c8cef5507654d25519b5d508800e0a67 | [
"MIT"
] | permissive | vmraid/vmraid | a52868c57b1999a8d648441eb9cd05815204345d | 3c2e2a952003ba7ea2cf13673b9e79e127f4166e | refs/heads/main | 2022-07-29T18:59:28.585133 | 2022-04-22T08:02:52 | 2022-04-22T08:02:52 | 372,473,120 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 403 | py | import vmraid
def execute():
vmraid.reload_doc("core", "doctype", "domain")
vmraid.reload_doc("core", "doctype", "has_domain")
active_domains = vmraid.get_active_domains()
all_domains = vmraid.get_all("Domain")
for d in all_domains:
if d.name not in active_domains:
inactive_domain = vmraid.get_doc("Domain", d.name)
inactive_domain.setup_data()
inactive_domain.remove_custom_field()
| [
"[email protected]"
] | |
019105d320f29101729291f79cd82beca16ad2c9 | be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1 | /Gauss_v45r8/Gen/DecFiles/options/34124001.py | 03941c6c708c5f9889d3b55a8533fea3694f5f19 | [] | no_license | Sally27/backup_cmtuser_full | 34782102ed23c6335c48650a6eaa901137355d00 | 8924bebb935b96d438ce85b384cfc132d9af90f6 | refs/heads/master | 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 735 | py | # file /home/hep/ss4314/cmtuser/Gauss_v45r8/Gen/DecFiles/options/34124001.py generated: Fri, 27 Mar 2015 15:48:00
#
# Event Type: 34124001
#
# ASCII decay Descriptor: K_S0 -> e+ e- e+ e-
#
from Configurables import Generation
Generation().EventType = 34124001
Generation().SampleGenerationTool = "SignalPlain"
from Configurables import SignalPlain
Generation().addTool( SignalPlain )
Generation().SignalPlain.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/KS_4e=DecProdCut.dec"
Generation().SignalPlain.CutTool = "DaughtersInLHCb"
Generation().SignalPlain.SignalPIDList = [ 310 ]
| [
"[email protected]"
] | |
7defb694b9da10ae9c2a5faca93cee793d8c77b5 | 2cc0a6819b63a89593c49867dc4e0e9f12b53843 | /test/test_connection.py | 6fc61d2b73a33d82a89ecea3b8c743a4bab3281f | [] | no_license | Joylizzie/Financial_reports | 9ca3937dc1c47eb33e5e78ee69e0ccaeaeea8491 | 7f1ecade32f2313f6202a2b69216e6a99eab4a79 | refs/heads/main | 2023-06-03T01:50:38.223424 | 2021-06-21T01:46:57 | 2021-06-21T01:46:57 | 344,964,510 | 0 | 0 | null | 2021-06-21T01:46:57 | 2021-03-05T23:59:42 | HTML | UTF-8 | Python | false | false | 382 | py | import os
import psycopg2
PW = os.environ['POSTGRES_PW']
conn = psycopg2.connect(
host="localhost",
database="test_conn",
user="financial_user",
password=PW)
sql = """select * from accounts"""
cur = conn.cursor()
cur.execute('SELECT version()')
rows = cur.fetchall()
for i in rows:
print(i)
cur.execute(sql)
rows = cur.fetchall()
for i in rows:
print(i)
| [
"[email protected]"
] | |
ef030462b615c2d921a1effe49df91c546a68666 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03208/s083588568.py | c3423c84bec3c3107acc309a6da0564f9bbce475 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 167 | py | N,K = map(int,input().split())
H = []
sa = []
for i in range(N):
H.append(int(input()))
H.sort()
for j in range(N-K+1):
sa.append(H[K+j-1]-H[j])
print(min(sa)) | [
"[email protected]"
] | |
cc742016af4d686f203e5ee69ffcd5b746a42c06 | 1eca8d5b6c0f04f3f8018510ca6dc2a4a15af67e | /backend/lizz_11_11_m1_dev_14963/wsgi.py | db14a52e53de169882381259ac8536aa1dc401bb | [] | no_license | crowdbotics-apps/lizz-11-11-m1-dev-14963 | 297fe0505d9a27fe122d725f69600c0e20e5b84a | 2d2412c66ef523cb073140a5ec907789f8a6f6af | refs/heads/master | 2023-01-20T01:00:08.237085 | 2020-12-04T17:27:36 | 2020-12-04T17:27:36 | 312,037,239 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 423 | py | """
WSGI config for lizz_11_11_m1_dev_14963 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'lizz_11_11_m1_dev_14963.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
ec912be6778d066dcaa2f248cc99c9af17300990 | 5a9e05b4c40cbd886a4a101ec48603dd32bfc9a7 | /interpolación.py | 9eec43557ac268d39cc1fb8739b3d67c9debb170 | [] | no_license | Arto1597/python | ef4be9f9e595c16bd7de026340bdcf286f32240b | 6e9f6b4d1354b060601cebd40e8a55db23f45587 | refs/heads/master | 2022-12-01T14:07:33.564361 | 2020-08-13T01:42:46 | 2020-08-13T01:42:46 | 287,153,277 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 98 | py | person = "carlos"
age = 22
text = f'¿Puedes creer que {person} tiene {age} de edad?'
print(text)
| [
"[email protected]"
] | |
b7e35daf542d295673d501423b3fe1a0daae6795 | 58ff923a903cf2393c87988559e55ab6d0fd5be2 | /venv/bin/cythonize | 4e43504453d07855bb73147fe9a3d7f84217f8e4 | [] | no_license | gurmeet1109/AVJat0621 | 9930d08b9ae719918ee1e53b673e541c7900f940 | e845a4a5b48f5a63fd2833fbd18b91133d5ca736 | refs/heads/master | 2023-06-11T21:01:21.686729 | 2021-07-03T14:21:51 | 2021-07-03T14:21:51 | 382,629,300 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 259 | #!/home/gurmeet/PycharmProjects/AnaVidya/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from Cython.Build.Cythonize import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
918ff97a8c8b92592d6080e6b658b6f5a4788f35 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p3BR/R1/benchmark/startQiskit_Class179.py | f789429598672477d05760566331b2e6914d7b2a | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,243 | py | # qubit number=3
# total number=31
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
# oracle.draw('mpl', filename=(kernel + '-oracle.png'))
return oracle
def build_circuit(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the Bernstein-Vazirani circuit
zero = np.binary_repr(0, n)
b = f(zero)
# initial n + 1 bits
input_qubit = QuantumRegister(n+1, "qc")
classicals = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classicals)
# inverse last one (can be omitted if using O_f^\pm)
prog.x(input_qubit[n])
# circuit begin
prog.h(input_qubit[1]) # number=1
prog.rx(-0.09738937226128368,input_qubit[2]) # number=2
prog.cx(input_qubit[2],input_qubit[1]) # number=27
prog.h(input_qubit[1]) # number=3
# apply H to get superposition
for i in range(n):
prog.h(input_qubit[i])
prog.h(input_qubit[n])
prog.barrier()
# apply oracle O_f
oracle = build_oracle(n, f)
prog.append(
oracle.to_gate(),
[input_qubit[i] for i in range(n)] + [input_qubit[n]])
# apply H back (QFT on Z_2^n)
for i in range(n):
prog.h(input_qubit[i])
prog.barrier()
# measure
return prog
def get_statevector(prog: QuantumCircuit) -> Any:
state_backend = Aer.get_backend('statevector_simulator')
statevec = execute(prog, state_backend).result()
quantum_state = statevec.get_statevector()
qubits = round(log2(len(quantum_state)))
quantum_state = {
"|" + np.binary_repr(i, qubits) + ">": quantum_state[i]
for i in range(2 ** qubits)
}
return quantum_state
def evaluate(backend_str: str, prog: QuantumCircuit, shots: int, b: str) -> Any:
# Q: which backend should we use?
# get state vector
quantum_state = get_statevector(prog)
# get simulate results
# provider = IBMQ.load_account()
# backend = provider.get_backend(backend_str)
# qobj = compile(prog, backend, shots)
# job = backend.run(qobj)
# job.result()
backend = Aer.get_backend(backend_str)
# transpile/schedule -> assemble -> backend.run
results = execute(prog, backend, shots=shots).result()
counts = results.get_counts()
a = Counter(counts).most_common(1)[0][0][::-1]
return {
"measurements": counts,
# "state": statevec,
"quantum_state": quantum_state,
"a": a,
"b": b
}
def bernstein_test_1(rep: str):
"""011 . x + 1"""
a = "011"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_2(rep: str):
"""000 . x + 0"""
a = "000"
b = "0"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_3(rep: str):
"""111 . x + 1"""
a = "111"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
if __name__ == "__main__":
n = 2
a = "11"
b = "1"
f = lambda rep: \
bitwise_xor(bitwise_dot(a, rep), b)
prog = build_circuit(n, f)
sample_shot =4000
writefile = open("../data/startQiskit_Class179.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
backend = BasicAer.get_backend('statevector_simulator')
circuit1 = transpile(prog, FakeYorktown())
circuit1.h(qubit=2)
circuit1.x(qubit=3)
info = execute(circuit1,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| [
"[email protected]"
] | |
da57d6664d72853b506b97392be904e06771259b | 1f813c3cd6a9d293acfbc81f198c64f816a9a95d | /devel/lib/python2.7/dist-packages/gazebo_msgs/srv/_GetLinkProperties.py | 6fb592d96134a50776ba067b423adb7d65f82237 | [] | no_license | koteshrv/Vargi_Bots_1418 | 5ada79746785a9f9cc0e1d686a1dd2702c9e0f0f | 6bcf843c7150c93caee2b596e0864749c51b6155 | refs/heads/main | 2023-01-06T03:42:53.555701 | 2020-11-08T17:36:59 | 2020-11-08T17:36:59 | 302,916,294 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 115 | py | /home/kotesh/catkin_ws/devel/.private/gazebo_msgs/lib/python2.7/dist-packages/gazebo_msgs/srv/_GetLinkProperties.py | [
"[email protected]"
] | |
880fcd0c207f5c5cc5664e89ff0c6d94f5842e7d | 583194153161233874ae069adb9e8ffeb8442262 | /authentication/admin.py | 1c2e0b497ed34ed5189d522420a782a2780214dc | [] | no_license | AlekseiChirkov/meso | 75faa7f9f83b9c9a88b9cf180a8bd140244af086 | 1a1c544d0533736c68b3c21706534f6f8a6d2505 | refs/heads/main | 2023-02-17T18:59:08.756122 | 2021-01-12T09:57:08 | 2021-01-12T09:57:08 | 325,181,058 | 0 | 1 | null | 2021-01-12T09:57:09 | 2020-12-29T03:57:28 | Python | UTF-8 | Python | false | false | 230 | py | from django.contrib import admin
# Register your models here.
from .models import User
class UserAdmin(admin.ModelAdmin):
list_display = ['id', 'email', 'auth_provider', 'created_at']
admin.site.register(User, UserAdmin)
| [
"[email protected]"
] | |
227ede342f7bfe595de25dfa2cc02ac4fb1a043f | d4889901b6337ede68d2dee42d50c76f184ffe98 | /generate_figures/fig2/vs.py | 7fd5419900eb79905720c168d7913082dae4f6fc | [] | no_license | ModelDBRepository/231105 | 41b473ed06f46d7b8d7772984ffeb65c7c4d36da | c7c1697acb810bb6d55c6f7b97804b9c671ed9fb | refs/heads/master | 2020-05-29T18:28:50.083115 | 2019-05-31T02:07:48 | 2019-05-31T02:07:48 | 189,299,894 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,030 | py | #!/usr/bin/python
# This program plots the firing rate nu_I vs. Isyn_E-I_syn_I .
import sys
import os
import math
def read_fr(frI, cvI, ffrIx):
for line_read in ffrIx:
line_list = line_read.split()
fr_val = float(line_list[1])
cv_val = float(line_list[7])
frI.append(fr_val)
cvI.append(cv_val)
def read_synvar(synvarIT, synvarII, fzmpx):
for line_read in fzmpx:
line_list = line_read.split()
VsIT_val = -float(line_list[5])
synvarIT.append(VsIT_val)
VsII_val = -float(line_list[6])
synvarII.append(VsII_val)
# main
#suffix = str(sys.argv[1]);
#print 'suffix=',suffix
suffix = 'b2b'
#suffix = 'b2e'
#'b2ra'
ffri = open('tc.fri.' + suffix, 'r')
fzmp = open('tc.zmp.' + suffix, 'r')
ffis = open('vs.fis.' + suffix, 'w')
ffcv = open('vs.fcv.' + suffix, 'w')
fout = open('vs.out.' + suffix, 'w')
Isnic = 0.7328
VsynE = 0.0
tsynE = 2.0
VsynI = -85.0
tsynI = 3.0
KIT = 75.0
gIT = 0.2 * 4
KII = 25.0
gII = 0.55 * 4
gsynIT = gIT / (math.sqrt(KIT) * tsynE)
gsynII = gII / (math.sqrt(KII) * tsynI)
JIT = gsynIT
JII = gsynII
print 'gsynIT=', gsynIT, ' JIT=', JIT, 'gsynII=', gsynII, ' JII=', JII
frI = []
cvI = []
read_fr(frI, cvI, ffri)
synvarIT = []
synvarII = []
read_synvar(synvarIT, synvarII, fzmp)
non = len(frI)
for ion in range(0, non):
Jsitx = JIT * synvarIT[ion]
Jsiix = JII * synvarII[ion]
ffis.write('{0:g} {1:g} {2:g} {3:g} {4:g} {5:g} {6:d}\n'.format( \
Jsitx + Jsiix, frI[ion], Jsitx, Jsiix, synvarIT[ion], synvarII[ion], ion+1))
ffcv.write('{0:g} {1:g}\n'.format(frI[ion], cvI[ion]))
ffis.write(' \n')
for ion in range(0, non):
Jsitx = JIT * synvarIT[ion]
ffis.write('{0:g} {1:g}\n'.format(Jsitx, frI[ion]))
ffis.write(' \n')
for ion in range(0, non):
Jsiix = JII * synvarII[ion]
ffis.write('{0:g} {1:g}\n'.format(Jsiix, frI[ion]))
ffri.close()
fzmp.close()
ffis.close()
ffcv.close()
fout.close()
| [
"[email protected]"
] | |
211a480a8f12fd78e6c18298b4314762a0b8ea65 | 3327a87cefa2275bd0ba90a500444f3494b14fdf | /captainhcg/py/222-count-complete-tree-nodes.py | c63c98539d7003e40cab6734a601b99ef068043e | [] | no_license | captainhcg/leetcode-in-py-and-go | e1b56f4228e0d60feff8f36eb3d457052a0c8d61 | 88a822c48ef50187507d0f75ce65ecc39e849839 | refs/heads/master | 2021-06-09T07:27:20.358074 | 2017-01-07T00:23:10 | 2017-01-07T00:23:10 | 61,697,502 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 506 | py | class Solution(object):
def countNodes(self, root):
"""
:type root: TreeNode
:rtype: int
"""
left_h = right_h = 0
lnd = rnd = root
while lnd:
left_h += 1
lnd = lnd.left
while rnd:
right_h += 1
rnd = rnd.right
if left_h == right_h:
return 2 ** left_h - 1
else:
return 1 + self.countNodes(root.left) + self.countNodes(root.right)
| [
"[email protected]"
] | |
0da38d3c1576fa5bb62cea3d0cb2f88ec6ecead5 | 47357b6fdc93539679a2b805a1970ce872e7bf80 | /hackbulgaria/week10/Cinema-Reservation/__main__.py | f943f4d14c5640aa1de2de5140d57a63d313789d | [] | no_license | alexanderbackup/Python-backup | 82d5d38b44f77fdd7fda69c0a133006d23aca814 | 24bc19c6fd5850cccfa1029a2304330601a5ea14 | refs/heads/master | 2020-08-03T04:38:04.193000 | 2017-02-15T21:50:14 | 2017-02-15T21:50:14 | 73,554,019 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 37 | py | from user_interface import main_menu
| [
"[email protected]"
] | |
510a31c918cf293f006bfecde11e6d76c5d2aa37 | 08a0a5603acb4711fa25acfc3b7034f104d993b0 | /jump-game-ii/Solution.8415759.py | d7202d95ee0c24d813579f558af5a02543841586 | [
"MIT"
] | permissive | rahul-ramadas/leetcode | 401fe2df2f710c824264cc0538f5f2aa8bbe383e | 6c84c2333a613729361c5cdb63dc3fc80203b340 | refs/heads/master | 2020-05-30T07:17:07.311215 | 2016-11-01T05:27:34 | 2016-11-01T05:27:34 | 22,866,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 488 | py | class Solution:
def jump(self, A):
if len(A) == 1:
return 0
steps = 0
farthest = 0
next_farthest = 0
for current in xrange(len(A)):
if current > farthest:
farthest = next_farthest
steps += 1
next_farthest = max(next_farthest, current + A[current])
if next_farthest >= (len(A) - 1):
return steps + 1
return -1
| [
"[email protected]"
] | |
f9eb340e62736e865ce4d10ba52f23207a4e491d | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2927/60608/259827.py | 2edeb3c30f2fdf138f74e200b06069554769cd62 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 810 | py | import math
def isInside(a1, b1, a2, b2, x, y):
if a1 <= x <= a2 and b2 <= y <= b1:
return True
else:
return False
def func31():
arr = list(map(int, input().split()))
a1 = math.sin(math.pi / 4) * arr[1]
b1 = math.cos(math.pi / 4) * arr[1]
a2 = math.cos(math.pi / 4) * arr[0] + math.sin(math.pi / 4) * (arr[0] - arr[1])
b2 = -math.sin(math.pi / 4) * arr[0] + math.cos(math.pi / 4) * (arr[0] - arr[1])
m = eval(input())
for _ in range(0, m):
arr = list(map(int, input().split()))
x = math.cos(math.pi / 4) * arr[0] + math.sin(math.pi / 4) * arr[1]
y = -math.sin(math.pi / 4) * arr[0] + math.cos(math.pi / 4) * arr[1]
if isInside(a1, b1, a2, b2, x, y):
print("YES")
else:
print("NO")
func31()
| [
"[email protected]"
] | |
bca794248ea71a51a1e951fc0b8aa6a397b56be2 | eb9f655206c43c12b497c667ba56a0d358b6bc3a | /python/testData/intentions/PyAnnotateVariableTypeIntentionTest/annotationListType.py | bede741880e46fb1f24b4c19fa3177ab272b2525 | [
"Apache-2.0"
] | permissive | JetBrains/intellij-community | 2ed226e200ecc17c037dcddd4a006de56cd43941 | 05dbd4575d01a213f3f4d69aa4968473f2536142 | refs/heads/master | 2023-09-03T17:06:37.560889 | 2023-09-03T11:51:00 | 2023-09-03T12:12:27 | 2,489,216 | 16,288 | 6,635 | Apache-2.0 | 2023-09-12T07:41:58 | 2011-09-30T13:33:05 | null | UTF-8 | Python | false | false | 46 | py | def func():
var = [1, 2, 3]
v<caret>ar | [
"[email protected]"
] | |
eebd75b28665ca91cb500de5634618a87885eacc | 5b771c11e8967038025376c6ec31962ca90748dd | /resturant/Resturant/src/blog/migrations/0004_comment.py | f07e3f8d6a9a53ea6011d24313d8a9f81336e5e0 | [] | no_license | AsemAntar/Django_Projects | 7135eca3b4bcb656fc88e0838483c97d7f1746e1 | 4141c2c7e91845eec307f6dd6c69199302eabb16 | refs/heads/master | 2022-12-10T06:32:35.787504 | 2020-05-26T14:43:01 | 2020-05-26T14:43:01 | 216,863,494 | 0 | 0 | null | 2022-12-05T13:31:53 | 2019-10-22T16:47:28 | Python | UTF-8 | Python | false | false | 965 | py | # Generated by Django 3.0.5 on 2020-05-17 09:14
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('blog', '0003_post_tags'),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('created', models.DateTimeField(default=django.utils.timezone.now)),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blog.Post')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
] | |
99615e87d4fbff1af9b49e1437dedc8adb4d0149 | 245b92f4140f30e26313bfb3b2e47ed1871a5b83 | /airflow/providers/google_vendor/googleads/v12/services/services/audience_insights_service/client.py | 5441dafcf07c137622fb58722943ce4f38360b68 | [
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] | permissive | ephraimbuddy/airflow | 238d6170a0e4f76456f00423124a260527960710 | 3193857376bc2c8cd2eb133017be1e8cbcaa8405 | refs/heads/main | 2023-05-29T05:37:44.992278 | 2023-05-13T19:49:43 | 2023-05-13T19:49:43 | 245,751,695 | 2 | 1 | Apache-2.0 | 2021-05-20T08:10:14 | 2020-03-08T04:28:27 | null | UTF-8 | Python | false | false | 33,548 | py | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from airflow.providers.google_vendor.googleads.v12.enums.types import audience_insights_dimension
from airflow.providers.google_vendor.googleads.v12.services.types import audience_insights_service
from .transports.base import (
AudienceInsightsServiceTransport,
DEFAULT_CLIENT_INFO,
)
from .transports.grpc import AudienceInsightsServiceGrpcTransport
class AudienceInsightsServiceClientMeta(type):
"""Metaclass for the AudienceInsightsService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[AudienceInsightsServiceTransport]]
_transport_registry["grpc"] = AudienceInsightsServiceGrpcTransport
def get_transport_class(
cls, label: str = None,
) -> Type[AudienceInsightsServiceTransport]:
"""Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class AudienceInsightsServiceClient(
metaclass=AudienceInsightsServiceClientMeta
):
"""Audience Insights Service helps users find information about
groups of people and how they can be reached with Google Ads.
"""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "googleads.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
AudienceInsightsServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(
info
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
AudienceInsightsServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename
)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> AudienceInsightsServiceTransport:
"""Returns the transport used by the client instance.
Returns:
AudienceInsightsServiceTransport: The transport used by the client
instance.
"""
return self._transport
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
"""Releases underlying transport's resources.
.. warning::
ONLY use as a context manager if the transport is NOT shared
with other clients! Exiting the with block will CLOSE the transport
and may cause errors in other clients!
"""
self.transport.close()
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Returns a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Returns a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
"""Returns a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
"""Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path
)
return m.groupdict() if m else {}
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, AudienceInsightsServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the audience insights service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, AudienceInsightsServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in (
"true",
"false",
):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
)
use_client_cert = (
os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true"
)
client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
is_mtls = True
client_cert_source_func = client_options.client_cert_source
else:
is_mtls = mtls.has_default_client_cert_source()
if is_mtls:
client_cert_source_func = mtls.default_client_cert_source()
else:
client_cert_source_func = None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
api_endpoint = (
self.DEFAULT_MTLS_ENDPOINT
if is_mtls
else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
"values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, AudienceInsightsServiceTransport):
# transport is a AudienceInsightsServiceTransport instance.
if credentials or client_options.credentials_file:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
if client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
)
def generate_insights_finder_report(
self,
request: Union[
audience_insights_service.GenerateInsightsFinderReportRequest, dict
] = None,
*,
customer_id: str = None,
baseline_audience: audience_insights_service.BasicInsightsAudience = None,
specific_audience: audience_insights_service.BasicInsightsAudience = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> audience_insights_service.GenerateInsightsFinderReportResponse:
r"""Creates a saved report that can be viewed in the Insights Finder
tool.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `FieldError <>`__ `HeaderError <>`__
`InternalError <>`__ `QuotaError <>`__ `RangeError <>`__
`RequestError <>`__
Args:
request (Union[google.ads.googleads.v12.services.types.GenerateInsightsFinderReportRequest, dict]):
The request object. Request message for
[AudienceInsightsService.GenerateInsightsFinderReport][google.ads.googleads.v12.services.AudienceInsightsService.GenerateInsightsFinderReport].
customer_id (str):
Required. The ID of the customer.
This corresponds to the ``customer_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
baseline_audience (google.ads.googleads.v12.services.types.BasicInsightsAudience):
Required. A baseline audience for
this report, typically all people in a
region.
This corresponds to the ``baseline_audience`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
specific_audience (google.ads.googleads.v12.services.types.BasicInsightsAudience):
Required. The specific audience of
interest for this report. The insights
in the report will be based on
attributes more prevalent in this
audience than in the report's baseline
audience.
This corresponds to the ``specific_audience`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v12.services.types.GenerateInsightsFinderReportResponse:
The response message for
[AudienceInsightsService.GenerateInsightsFinderReport][google.ads.googleads.v12.services.AudienceInsightsService.GenerateInsightsFinderReport],
containing the shareable URL for the report.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any(
[customer_id, baseline_audience, specific_audience]
)
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a audience_insights_service.GenerateInsightsFinderReportRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request,
audience_insights_service.GenerateInsightsFinderReportRequest,
):
request = audience_insights_service.GenerateInsightsFinderReportRequest(
request
)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if customer_id is not None:
request.customer_id = customer_id
if baseline_audience is not None:
request.baseline_audience = baseline_audience
if specific_audience is not None:
request.specific_audience = specific_audience
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.generate_insights_finder_report
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("customer_id", request.customer_id),)
),
)
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
def list_audience_insights_attributes(
self,
request: Union[
audience_insights_service.ListAudienceInsightsAttributesRequest,
dict,
] = None,
*,
customer_id: str = None,
dimensions: Sequence[
audience_insights_dimension.AudienceInsightsDimensionEnum.AudienceInsightsDimension
] = None,
query_text: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> audience_insights_service.ListAudienceInsightsAttributesResponse:
r"""Searches for audience attributes that can be used to generate
insights.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `FieldError <>`__ `HeaderError <>`__
`InternalError <>`__ `QuotaError <>`__ `RangeError <>`__
`RequestError <>`__
Args:
request (Union[google.ads.googleads.v12.services.types.ListAudienceInsightsAttributesRequest, dict]):
The request object. Request message for
[AudienceInsightsService.ListAudienceInsightsAttributes][google.ads.googleads.v12.services.AudienceInsightsService.ListAudienceInsightsAttributes].
customer_id (str):
Required. The ID of the customer.
This corresponds to the ``customer_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
dimensions (Sequence[google.ads.googleads.v12.enums.types.AudienceInsightsDimensionEnum.AudienceInsightsDimension]):
Required. The types of attributes to
be returned.
This corresponds to the ``dimensions`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
query_text (str):
Required. A free text query.
Attributes matching or related to this
string will be returned.
This corresponds to the ``query_text`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v12.services.types.ListAudienceInsightsAttributesResponse:
Response message for
[AudienceInsightsService.ListAudienceInsightsAttributes][google.ads.googleads.v12.services.AudienceInsightsService.ListAudienceInsightsAttributes].
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([customer_id, dimensions, query_text])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a audience_insights_service.ListAudienceInsightsAttributesRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request,
audience_insights_service.ListAudienceInsightsAttributesRequest,
):
request = audience_insights_service.ListAudienceInsightsAttributesRequest(
request
)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if customer_id is not None:
request.customer_id = customer_id
if dimensions is not None:
request.dimensions = dimensions
if query_text is not None:
request.query_text = query_text
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.list_audience_insights_attributes
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("customer_id", request.customer_id),)
),
)
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
def list_insights_eligible_dates(
self,
request: Union[
audience_insights_service.ListInsightsEligibleDatesRequest, dict
] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> audience_insights_service.ListInsightsEligibleDatesResponse:
r"""Lists date ranges for which audience insights data can be
requested.
List of thrown errors: `AuthenticationError <>`__
`AuthorizationError <>`__ `FieldError <>`__ `HeaderError <>`__
`InternalError <>`__ `QuotaError <>`__ `RangeError <>`__
`RequestError <>`__
Args:
request (Union[google.ads.googleads.v12.services.types.ListInsightsEligibleDatesRequest, dict]):
The request object. Request message for
[AudienceInsightsService.ListAudienceInsightsDates][].
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v12.services.types.ListInsightsEligibleDatesResponse:
Response message for
[AudienceInsightsService.ListAudienceInsightsDates][].
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a audience_insights_service.ListInsightsEligibleDatesRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request, audience_insights_service.ListInsightsEligibleDatesRequest
):
request = audience_insights_service.ListInsightsEligibleDatesRequest(
request
)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.list_insights_eligible_dates
]
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
def generate_audience_composition_insights(
self,
request: Union[
audience_insights_service.GenerateAudienceCompositionInsightsRequest,
dict,
] = None,
*,
customer_id: str = None,
audience: audience_insights_service.InsightsAudience = None,
dimensions: Sequence[
audience_insights_dimension.AudienceInsightsDimensionEnum.AudienceInsightsDimension
] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> audience_insights_service.GenerateAudienceCompositionInsightsResponse:
r"""Returns a collection of attributes that are represented in an
audience of interest, with metrics that compare each attribute's
share of the audience with its share of a baseline audience.
List of thrown errors: `AudienceInsightsError <>`__
`AuthenticationError <>`__ `AuthorizationError <>`__
`FieldError <>`__ `HeaderError <>`__ `InternalError <>`__
`QuotaError <>`__ `RangeError <>`__ `RequestError <>`__
Args:
request (Union[google.ads.googleads.v12.services.types.GenerateAudienceCompositionInsightsRequest, dict]):
The request object. Request message for
[AudienceInsightsService.GenerateAudienceCompositionInsights][google.ads.googleads.v12.services.AudienceInsightsService.GenerateAudienceCompositionInsights].
customer_id (str):
Required. The ID of the customer.
This corresponds to the ``customer_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
audience (google.ads.googleads.v12.services.types.InsightsAudience):
Required. The audience of interest
for which insights are being requested.
This corresponds to the ``audience`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
dimensions (Sequence[google.ads.googleads.v12.enums.types.AudienceInsightsDimensionEnum.AudienceInsightsDimension]):
Required. The audience dimensions for
which composition insights should be
returned.
This corresponds to the ``dimensions`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.ads.googleads.v12.services.types.GenerateAudienceCompositionInsightsResponse:
Response message for
[AudienceInsightsService.GenerateAudienceCompositionInsights][google.ads.googleads.v12.services.AudienceInsightsService.GenerateAudienceCompositionInsights].
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([customer_id, audience, dimensions])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a audience_insights_service.GenerateAudienceCompositionInsightsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(
request,
audience_insights_service.GenerateAudienceCompositionInsightsRequest,
):
request = audience_insights_service.GenerateAudienceCompositionInsightsRequest(
request
)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if customer_id is not None:
request.customer_id = customer_id
if audience is not None:
request.audience = audience
if dimensions is not None:
request.dimensions = dimensions
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[
self._transport.generate_audience_composition_insights
]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("customer_id", request.customer_id),)
),
)
# Send the request.
response = rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
)
# Done; return the response.
return response
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-ads",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("AudienceInsightsServiceClient",)
| [
"[email protected]"
] | |
45a6c789ab4c46a1a8e9f75786bd68dbf3e2c43e | 7769cb512623c8d3ba96c68556b2cea5547df5fd | /mmdet/models/losses/smooth_l1_loss.py | bc340730b2ec0db938508dfc94e71369484d1918 | [
"MIT"
] | permissive | JialeCao001/D2Det | 0e49f4c76e539d574e46b02f278242ca912c31ea | a76781ab624a1304f9c15679852a73b4b6770950 | refs/heads/master | 2022-12-05T01:00:08.498629 | 2020-09-04T11:33:26 | 2020-09-04T11:33:26 | 270,723,372 | 312 | 88 | MIT | 2020-07-08T23:53:23 | 2020-06-08T15:37:35 | Python | UTF-8 | Python | false | false | 1,288 | py | import torch
import torch.nn as nn
from ..registry import LOSSES
from .utils import weighted_loss
@weighted_loss
def smooth_l1_loss(pred, target, beta=1.0):
assert beta > 0
assert pred.size() == target.size() and target.numel() > 0
diff = torch.abs(pred - target)
loss = torch.where(diff < beta, 0.5 * diff * diff / beta,
diff - 0.5 * beta)
return loss
@LOSSES.register_module
class SmoothL1Loss(nn.Module):
def __init__(self, beta=1.0, reduction='mean', loss_weight=1.0):
super(SmoothL1Loss, self).__init__()
self.beta = beta
self.reduction = reduction
self.loss_weight = loss_weight
def forward(self,
pred,
target,
weight=None,
avg_factor=None,
reduction_override=None,
**kwargs):
assert reduction_override in (None, 'none', 'mean', 'sum')
reduction = (
reduction_override if reduction_override else self.reduction)
loss_bbox = self.loss_weight * smooth_l1_loss(
pred,
target,
weight,
beta=self.beta,
reduction=reduction,
avg_factor=avg_factor,
**kwargs)
return loss_bbox
| [
"[email protected]"
] | |
130e3ec189c725c6c0fbbcedda199db9b4d2f77a | d364123a0655bff7e9d725382934fe2c15b5bfc4 | /Crawler/two/xingneng.py | ae3a6428bc0601f76f83c19217cd42d06832e0f5 | [] | no_license | yuan1093040152/SeleniumTest | 88d75361c8419354f56856c326f843a0a89d7ca6 | d155b98702bc46c174499042b43257696b861b5e | refs/heads/master | 2023-08-31T15:00:25.415642 | 2023-08-30T09:26:42 | 2023-08-30T09:26:42 | 227,269,300 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,741 | py | #coding=utf-8
'''
Created on 2018年3月15日
@author: 唐路
'''
fields = ('area','population','iso','country' ,'capital', 'continent','tld','currency_code','currency_name','phone','postal_code_format','postal_code_regex','languages','neighbours')
import re,time
import lxml.html
import lxml.cssselect
from re_grab import download
from bs4 import BeautifulSoup
def re_scraper(html,fields):
results = {}
for field in fields:
results[field] = re.findall('<tr id="places_%s__row">.*?<td class="w2p_fw">(.*?)</td>'%field,html)[0]
return results
def bs_scraper(html,fields):
soup = BeautifulSoup(html,'html.parser')
results = {}
for field in fields:
results[field] = soup.find('table').find('tr',id='places_%s__row'%field).find('td',attrs={'class':"w2p_fw"}).text
return results
def lxml_scraper(html,fields):
tree = lxml.html.fromstring(html)
results = {}
for field in fields:
results[field] = tree.cssselect('table>tr#places_%s__row > td.w2p_fw'%field)[0].text_content()
return results
num_iteraions = 1000
url = 'http://example.webscraping.com/places/default/view/China-47'
html = download(url)
for name ,scraper in [('Regular expressions',re_scraper),('BeauifulSoup',bs_scraper),('Lxml',lxml_scraper)]:
start = time.time()
for i in range(num_iteraions):
if scraper == re_scraper:
re.purge()
result = scraper(html,fields)
assert(result['area'] == '9,596,960 square kilometres')
end = time.time()
print '%s: %.2f seconds'%(name,end-start)
# url = 'http://example.webscraping.com/places/default/view/China-47'
# html = download(url)
# # print html
# results = lxml_scraper(html,fields)
# print results
| [
"[email protected]"
] | |
c8654d745ab7e9b8739b049d093a243c901b2dec | 778d942f39abfb74b395d521af1cabfa1487c91c | /Greedy/JobSequencingProblem.py | aec789872cc841135a035f34f279a4553e9cc587 | [] | no_license | yashwanthguguloth24/Algorithms | ff80f47ef20be23da245c737640df0799151fdd1 | 0b9c7540f09f9d5792f5058ef901d588418f4f27 | refs/heads/master | 2023-07-09T16:51:17.174314 | 2021-08-08T18:47:43 | 2021-08-08T18:47:43 | 279,512,532 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,494 | py | '''
Given a set of N jobs where each job i has a deadline and profit associated to it. Each job takes 1 unit of time to complete and only one job can be scheduled at a time. We earn the profit if and only if the job is completed by its deadline. The task is to find the maximum profit and the number of jobs done.
Input Format:
Jobs will be given in the form (Job id, Deadline, Profit) associated to that Job.
Example 1:
Input:
N = 4
Jobs = (1,4,20)(2,1,10)(3,1,40)(4,1,30)
Output: 2 60
Explanation: 2 jobs can be done with
maximum profit of 60 (20+40).
'''
# code by yashwanth
def JobScheduling(Jobs,n):
'''
:param Jobs: list of "Job" class defined in driver code, with "profit" and "deadline".
:param n: total number of jobs
:return: A list of size 2 having list[0] = count of jobs and list[1] = max profit
'''
'''
{
class Job:.
def __init__(self,profit=0,deadline=0):
self.profit = profit
self.deadline = deadline
self.id = 0
}
'''
profit = 0
slots = [False]*n
Jobs = sorted(Jobs,key = lambda x : x.profit,reverse = True)
for i in range(n):
for j in range(min(n,Jobs[i].deadline)-1,-1,-1):
if slots[j] == False:
slots[j] = True
profit += Jobs[i].profit
break
num_jobs = 0
for k in range(n):
if slots[k]:
num_jobs += 1
res = [num_jobs,profit]
return res
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.