blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9367c9fc788b09d6bf9c8369612096e5c5ffa3fa | 55647a80c8b412af9df0ba3f50595cc2f29c25e6 | /res/scripts/client/AvatarInputHandler/AimingSystems/StrategicAimingSystem.py | 2ec9e5a041a24e763dee53002932f7d06da6e9d5 | [] | no_license | cnsuhao/WOT-0.9.17-CT | 0035eb6070fb4fab8d8ee9f8bbc676c10d511cfb | d1f932d8cabaf8aa21708622e87f83c8d24d6451 | refs/heads/master | 2021-06-08T18:11:07.039293 | 2016-11-19T19:12:37 | 2016-11-19T19:12:37 | null | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 2,452 | py | # 2016.11.19 19:47:39 Střední Evropa (běžný čas)
# Embedded file name: scripts/client/AvatarInputHandler/AimingSystems/StrategicAimingSystem.py
import BigWorld
import Math
from Math import Vector3, Matrix
import math
from AvatarInputHandler import mathUtils, AimingSystems
from AvatarInputHandler.AimingSystems import IAimingSystem
from AvatarInputHandler.cameras import _clampPoint2DInBox2D
class StrategicAimingSystem(IAimingSystem):
_LOOK_DIR = Vector3(0, -math.cos(0.001), math.sin(0.001))
height = property(lambda self: self.__height)
heightFromPlane = property(lambda self: self.__heightFromPlane)
def __init__(self, height, yaw):
self._matrix = mathUtils.createRotationMatrix((yaw, 0, 0))
self.__planePosition = Vector3(0, 0, 0)
self.__height = height
self.__heightFromPlane = 0.0
def destroy(self):
pass
def enable(self, targetPos):
self.updateTargetPos(targetPos)
def disable(self):
pass
def getDesiredShotPoint(self, terrainOnlyCheck = False):
return AimingSystems.getDesiredShotPoint(self._matrix.translation, Vector3(0, -1, 0), True, True, terrainOnlyCheck)
def handleMovement(self, dx, dy):
shift = self._matrix.applyVector(Vector3(dx, 0, dy))
self.__planePosition += Vector3(shift.x, 0, shift.z)
self.__updateMatrix()
def updateTargetPos(self, targetPos):
self.__planePosition.x = targetPos.x
self.__planePosition.z = targetPos.z
self.__updateMatrix()
def __updateMatrix(self):
bb = BigWorld.player().arena.arenaType.boundingBox
pos2D = _clampPoint2DInBox2D(bb[0], bb[1], Math.Vector2(self.__planePosition.x, self.__planePosition.z))
self.__planePosition.x = pos2D[0]
self.__planePosition.z = pos2D[1]
collPoint = BigWorld.wg_collideSegment(BigWorld.player().spaceID, self.__planePosition + Math.Vector3(0, 1000.0, 0), self.__planePosition + Math.Vector3(0, -250.0, 0), 3)
self.__heightFromPlane = 0.0 if collPoint is None else collPoint[0][1]
self._matrix.translation = self.__planePosition + Vector3(0, self.__heightFromPlane + self.__height, 0)
return
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\client\AvatarInputHandler\AimingSystems\StrategicAimingSystem.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.11.19 19:47:39 Střední Evropa (běžný čas)
| [
"[email protected]"
] | |
e83f53e3b09d4c31e6cddb4686f5993e3a6dc7b9 | 3899a37d1f500f7935cd04079e0b293bd64fe1cb | /docs/conf.py | 9902ebaf5dadaec57cdc679f1cbc45f4be1e8a5b | [
"MIT"
] | permissive | jubaer145/nlpaug | 06d5fa83d68537f6485ed5afccfe2ece056aae8b | b631660f1997fc503258735ec011ffbe164d12af | refs/heads/master | 2023-06-02T03:45:18.094793 | 2021-06-20T21:17:13 | 2021-06-20T21:17:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,645 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# nlpaug documentation build configuration file, created by
# sphinx-quickstart on Wed Aug 7 07:37:05 2019.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import sys, os
from unittest.mock import MagicMock
sys.path.append(os.path.abspath('..'))
# Mock module to bypass pip install
class Mock(MagicMock):
@classmethod
def __getattr__(cls, name):
return MagicMock()
MOCK_MODULES = [
'librosa', 'librosa.display', 'numpy', 'nltk', 'matplotlib', 'matplotlib.pyplot',
'setuptools', 'python-dotenv', 'nltk.corpus', 'torch', 'transformers']
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
'sphinx.ext.autodoc']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'nlpaug'
copyright = '2019, Edward Ma'
author = 'Edward Ma'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.1.4'
# The full version, including alpha/beta/rc tags.
release = '1.1.4'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# html_theme = 'alabaster'
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'nlpaugdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'nlpaug.tex', 'nlpaug Documentation',
'Edward Ma', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'nlpaug', 'nlpaug Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'nlpaug', 'nlpaug Documentation',
author, 'nlpaug', 'One line description of project.',
'Miscellaneous'),
]
| [
"[email protected]"
] | |
6150cb6eab8ab3c168a1eead8a17ce4cc4735cb6 | e9348d1689215220b7820134a82c2afdf8aed107 | /backend/young_waterfall_29324/urls.py | 19b2e1e3e6e251b7ff93a5593048f905ce1b2e58 | [] | no_license | crowdbotics-apps/young-waterfall-29324 | 8bf2accb9197c45f59ac717b2ec4fe289830b3f8 | ea74f174180c6af5acca25a82397daa7c48eb7c2 | refs/heads/master | 2023-06-26T05:12:51.154938 | 2021-08-01T20:50:29 | 2021-08-01T20:50:29 | 391,735,458 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,253 | py | """young_waterfall_29324 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include, re_path
from django.views.generic.base import TemplateView
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("modules/", include("modules.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
]
admin.site.site_header = "Young Waterfall"
admin.site.site_title = "Young Waterfall Admin Portal"
admin.site.index_title = "Young Waterfall Admin"
# swagger
api_info = openapi.Info(
title="Young Waterfall API",
default_version="v1",
description="API documentation for Young Waterfall App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
urlpatterns += [path("", TemplateView.as_view(template_name='index.html'))]
urlpatterns += [re_path(r"^(?:.*)/?$",
TemplateView.as_view(template_name='index.html'))]
| [
"[email protected]"
] | |
98911ce8c4bc073fa0ada3fad0c3d1e3231ad68e | 13c2f109585a033a1acecdd912a3142802170921 | /Python_Object_Serialization_Context_Manager.py | 2566f5ec1aeb15199b4802d5e018e7fa67a537bf | [] | no_license | VakinduPhilliam/Hierachy_Serialization | 88175764e24d03602eca06e8df13223e8ec4dd7e | 61d534b23bc3e072356cb33fd763b0cbb6320896 | refs/heads/master | 2020-05-24T15:59:41.674047 | 2019-11-01T15:02:08 | 2019-11-01T15:02:08 | 187,346,172 | 0 | 0 | null | null | null | null | WINDOWS-1252 | Python | false | false | 1,953 | py | # Python object serialization
# The 'pickle' module implements binary protocols for serializing and de-serializing
# a Python object structure.
# “Pickling” is the process whereby a Python object hierarchy is converted into a byte stream,
# and “unpickling” is the inverse operation, whereby a byte stream (from a binary file or bytes-like
# object) is converted back into an object hierarchy.
# Pickling (and unpickling) is alternatively known as “serialization”, “marshalling,” or “flattening”;
# however, to avoid confusion, the terms used here are “pickling” and “unpickling”.
# sqlite3 — DB-API 2.0 interface for SQLite databases
# SQLite is a C library that provides a lightweight disk-based database that doesn’t require a separate
# server process and allows accessing the database using a nonstandard variant of the SQL query language.
# Some applications can use SQLite for internal data storage.
# It’s also possible to prototype an application using SQLite and then port the code to a larger database
# such as PostgreSQL or Oracle.
# Using the connection as a context manager
# Connection objects can be used as context managers that automatically commit or rollback transactions.
# In the event of an exception, the transaction is rolled back; otherwise, the transaction is committed:
import sqlite3
con = sqlite3.connect(":memory:")
con.execute("create table person (id integer primary key, firstname varchar unique)")
# Successful, con.commit() is called automatically afterwards
with con:
con.execute("insert into person(firstname) values (?)", ("Joe",))
# con.rollback() is called after the with block finishes with an exception, the
# exception is still raised and must be caught
try:
with con:
con.execute("insert into person(firstname) values (?)", ("Joe",))
except sqlite3.IntegrityError:
print("couldn't add Joe twice")
| [
"[email protected]"
] | |
471f69a116bb3f8ea26d0e157151b03c8573d7fb | 4586fcc1afd15f04dbb269899a5b954adcd8d60e | /bin/ldgp.py | b825bbe162b265a0c48f0c32c7daf4bf04ca4e6c | [] | no_license | gautamits/rgbd | d0f1435a2b91b2aa0e848688d3c1c12fc1c77931 | a055a6b718a1e20957f20f19a0c49bbfa63cbd08 | refs/heads/master | 2021-01-20T05:59:43.891910 | 2017-11-25T09:16:34 | 2017-11-25T09:16:34 | 87,881,081 | 0 | 0 | null | 2017-04-25T19:22:50 | 2017-04-11T02:51:16 | Python | UTF-8 | Python | false | false | 1,636 | py | import cv2
import numpy as np
def dist(x,y):
return np.sqrt(np.sum((x-y)**2)) #this function returns euclidean distance between two one dimensional arrays
#this function returns histogram of image,
def hist(a):
hist, bin_edges = np.histogram(a, bins = range(64))
return hist
#this function returns ldgp of an image
def ldgp(i):
if i.shape >=3:
i=cv2.cvtColor(i,cv2.COLOR_BGR2GRAY)
height,width=i.shape
#zero padding
first=np.pad(i,((0,0),(1,0)),'constant')
second=np.pad(i,((0,1),(1,0)),'constant')
third=np.pad(i,((0,1),(0,0)),'constant')
fourth=np.pad(i,((0,1),(0,1)),'constant')
first=first[:,0:width]
second=second[1:height+1,0:width]
third=third[1:height+1,:]
fourth=fourth[1:height+1,1:width+1]
first=i-first #gradient at 0 degree
second=i-second #gradient at 45 degree
third=i-third #gradient at 90 degree
fourth=i-fourth # gradient at 135 degree
combo1=32*np.array( first >= second, dtype=int) #binary arrays being converted to decimal
combo2=16*np.array( first >= third, dtype=int)
combo3=8*np.array( first >= fourth, dtype=int)
combo4=4*np.array( second >= third, dtype=int)
combo5=2*np.array( second >= fourth, dtype=int)
combo6=np.array( third >= fourth, dtype=int)
ldgp=combo1+combo2+combo3+combo4+combo5+combo6
ldgp=np.array(ldgp,dtype='uint8')
return ldgp #final ldgp returned
| [
"[email protected]"
] | |
e8a6c6d6bc56b44d9ac2fae0497b557fe4c040d9 | b87ea98bc166cade5c78d246aeb0e23c59183d56 | /samples/openapi3/client/petstore/python-nextgen-aiohttp/setup.py | d584a44727dd30a7685acc7a8fbbfecd38037804 | [
"Apache-2.0"
] | permissive | holisticon/openapi-generator | 88f8e6a3d7bc059c8f56563c87f6d473694d94e5 | 6a67551ea54a1aa9a49eb48ee26b4e9bb7fb1272 | refs/heads/master | 2023-05-12T02:55:19.037397 | 2023-04-14T08:31:59 | 2023-04-14T08:31:59 | 450,034,139 | 1 | 0 | Apache-2.0 | 2022-01-20T09:34:14 | 2022-01-20T09:34:13 | null | UTF-8 | Python | false | false | 1,473 | py | # coding: utf-8
"""
OpenAPI Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
"""
from setuptools import setup, find_packages # noqa: H301
# To install the library, run the following
#
# python setup.py install
#
# prerequisite: setuptools
# http://pypi.python.org/pypi/setuptools
NAME = "petstore-api"
VERSION = "1.0.0"
PYTHON_REQUIRES = ">=3.7"
REQUIRES = [
"urllib3 >= 1.25.3",
"python-dateutil",
"aiohttp >= 3.0.0",
"pem>=19.3.0",
"pycryptodome>=3.9.0",
"pydantic >= 1.10.5, < 2",
"aenum"
]
setup(
name=NAME,
version=VERSION,
description="OpenAPI Petstore",
author="OpenAPI Generator community",
author_email="[email protected]",
url="",
keywords=["OpenAPI", "OpenAPI-Generator", "OpenAPI Petstore"],
install_requires=REQUIRES,
packages=find_packages(exclude=["test", "tests"]),
include_package_data=True,
license="Apache-2.0",
long_description_content_type='text/markdown',
long_description="""\
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501
"""
)
| [
"[email protected]"
] | |
17040ccedb5c26efb123bc8a9513defa32f9b4dc | f92fbb5ecbcd0adf4998e19d9d27e49386f898ab | /rls/algorithms/single/modelbased/planet.py | be9147eddd57a4a5b08109d7b2682e733572c12f | [
"Apache-2.0"
] | permissive | tonylibing/RLs | 26e5dedbe7e36704ac98fa8efd00184059cdc717 | 21607d93e26f3be7a1243a642ed7e76178c856ae | refs/heads/master | 2023-08-02T06:14:19.142614 | 2021-09-15T16:20:28 | 2021-09-15T16:20:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,145 | py | #!/usr/bin/env python3
# encoding: utf-8
from typing import Dict, List, NoReturn, Union
import numpy as np
import torch as t
from torch import distributions as td
from rls.algorithms.base.sarl_off_policy import SarlOffPolicy
from rls.common.data import Data, get_first_vector, get_first_visual
from rls.common.decorator import iton
from rls.nn.dreamer import DenseModel, RecurrentStateSpaceModel
from rls.nn.utils import OPLR
class PlaNet(SarlOffPolicy):
'''
Learning Latent Dynamics for Planning from Pixels, http://arxiv.org/abs/1811.04551
'''
policy_mode = 'off-policy'
def __init__(self,
stoch_dim=30,
deter_dim=200,
model_lr=6e-4,
kl_free_nats=3,
kl_scale=1.0,
reward_scale=1.0,
cem_horizon=12,
cem_iter_nums=10,
cem_candidates=1000,
cem_tops=100,
action_sigma=0.3,
network_settings=dict(),
**kwargs):
super().__init__(**kwargs)
assert self.is_continuous == True, 'assert self.is_continuous == True'
self.cem_horizon = cem_horizon
self.cem_iter_nums = cem_iter_nums
self.cem_candidates = cem_candidates
self.cem_tops = cem_tops
assert self.use_rnn == False, 'assert self.use_rnn == False'
if self.obs_spec.has_visual_observation \
and len(self.obs_spec.visual_dims) == 1 \
and not self.obs_spec.has_vector_observation:
visual_dim = self.obs_spec.visual_dims[0]
# TODO: optimize this
assert visual_dim[0] == visual_dim[1] == 64, 'visual dimension must be [64, 64, *]'
self._is_visual = True
elif self.obs_spec.has_vector_observation \
and len(self.obs_spec.vector_dims) == 1 \
and not self.obs_spec.has_visual_observation:
self._is_visual = False
else:
raise ValueError("please check the observation type")
self.stoch_dim = stoch_dim
self.deter_dim = deter_dim
self.kl_free_nats = kl_free_nats
self.kl_scale = kl_scale
self.reward_scale = reward_scale
self._action_sigma = action_sigma
self._network_settings = network_settings
if self.obs_spec.has_visual_observation:
from rls.nn.dreamer import VisualDecoder, VisualEncoder
self.obs_encoder = VisualEncoder(self.obs_spec.visual_dims[0],
**network_settings['obs_encoder']['visual']).to(self.device)
self.obs_decoder = VisualDecoder(self.decoder_input_dim,
self.obs_spec.visual_dims[0],
**network_settings['obs_decoder']['visual']).to(self.device)
else:
from rls.nn.dreamer import VectorEncoder
self.obs_encoder = VectorEncoder(self.obs_spec.vector_dims[0],
**network_settings['obs_encoder']['vector']).to(self.device)
self.obs_decoder = DenseModel(self.decoder_input_dim,
self.obs_spec.vector_dims[0],
**network_settings['obs_decoder']['vector']).to(self.device)
self.rssm = self._dreamer_build_rssm()
"""
p(r_t | s_t, h_t)
Reward model to predict reward from state and rnn hidden state
"""
self.reward_predictor = DenseModel(self.decoder_input_dim,
1,
**network_settings['reward']).to(self.device)
self.model_oplr = OPLR([self.obs_encoder, self.rssm, self.obs_decoder, self.reward_predictor],
model_lr, **self._oplr_params)
self._trainer_modules.update(obs_encoder=self.obs_encoder,
obs_decoder=self.obs_decoder,
reward_predictor=self.reward_predictor,
rssm=self.rssm,
model_oplr=self.model_oplr)
@property
def decoder_input_dim(self):
return self.stoch_dim + self.deter_dim
def _dreamer_build_rssm(self):
return RecurrentStateSpaceModel(self.stoch_dim,
self.deter_dim,
self.a_dim,
self.obs_encoder.h_dim,
**self._network_settings['rssm']).to(self.device)
@iton
def select_action(self, obs):
if self._is_visual:
obs = get_first_visual(obs)
else:
obs = get_first_vector(obs)
# Compute starting state for planning
# while taking information from current observation (posterior)
embedded_obs = self.obs_encoder(obs) # [B, *]
state_posterior = self.rssm.posterior(self.rnncs['hx'], embedded_obs) # dist # [B, *]
# Initialize action distribution
mean = t.zeros((self.cem_horizon, 1, self.n_copys, self.a_dim)) # [H, 1, B, A]
stddev = t.ones((self.cem_horizon, 1, self.n_copys, self.a_dim)) # [H, 1, B, A]
# Iteratively improve action distribution with CEM
for itr in range(self.cem_iter_nums):
action_candidates = mean + stddev * t.randn(self.cem_horizon, self.cem_candidates, self.n_copys, self.a_dim) # [H, N, B, A]
action_candidates = action_candidates.reshape(self.cem_horizon, -1, self.a_dim) # [H, N*B, A]
# Initialize reward, state, and rnn hidden state
# These are for parallel exploration
total_predicted_reward = t.zeros((self.cem_candidates*self.n_copys, 1)) # [N*B, 1]
state = state_posterior.sample((self.cem_candidates,)) # [N, B, *]
state = state.view(-1, state.shape[-1]) # [N*B, *]
rnn_hidden = self.rnncs['hx'].repeat((self.cem_candidates, 1)) # [B, *] => [N*B, *]
# Compute total predicted reward by open-loop prediction using pri
for _t in range(self.cem_horizon):
next_state_prior, rnn_hidden = self.rssm.prior(state, t.tanh(action_candidates[_t]), rnn_hidden)
state = next_state_prior.sample() # [N*B, *]
post_feat = t.cat([state, rnn_hidden], -1) # [N*B, *]
total_predicted_reward += self.reward_predictor(post_feat).mean # [N*B, 1]
# update action distribution using top-k samples
total_predicted_reward = total_predicted_reward.view(self.cem_candidates, self.n_copys, 1) # [N, B, 1]
_, top_indexes = total_predicted_reward.topk(self.cem_tops, dim=0, largest=True, sorted=False) # [N', B, 1]
action_candidates = action_candidates.view(self.cem_horizon, self.cem_candidates, self.n_copys, -1) # [H, N, B, A]
top_action_candidates = action_candidates[:, top_indexes, t.arange(self.n_copys).reshape(self.n_copys, 1), t.arange(self.a_dim)] # [H, N', B, A]
mean = top_action_candidates.mean(dim=1, keepdim=True) # [H, 1, B, A]
stddev = top_action_candidates.std(dim=1, unbiased=False, keepdim=True) # [H, 1, B, A]
# Return only first action (replan each state based on new observation)
actions = t.tanh(mean[0].squeeze(0)) # [B, A]
actions = self._exploration(actions)
_, self.rnncs_['hx'] = self.rssm.prior(state_posterior.sample(),
actions,
self.rnncs['hx'])
return actions, Data(action=actions)
def _exploration(self, action: t.Tensor) -> t.Tensor:
"""
:param action: action to take, shape (1,) (if categorical), or (action dim,) (if continuous)
:return: action of the same shape passed in, augmented with some noise
"""
sigma = self._action_sigma if self._is_train_mode else 0.
noise = t.randn(*action.shape) * sigma
return t.clamp(action + noise, -1, 1)
@iton
def _train(self, BATCH):
T, B = BATCH.action.shape[:2]
if self._is_visual:
obs_ = get_first_visual(BATCH.obs_)
else:
obs_ = get_first_vector(BATCH.obs_)
# embed observations with CNN
embedded_observations = self.obs_encoder(obs_) # [T, B, *]
# initialize state and rnn hidden state with 0 vector
state, rnn_hidden = self.rssm.init_state(shape=B) # [B, S], [B, D]
# compute state and rnn hidden sequences and kl loss
kl_loss = 0
states, rnn_hiddens = [], []
for l in range(T):
# if the begin of this episode, then reset to 0.
# No matther whether last episode is beened truncated of not.
state = state * (1. - BATCH.begin_mask[l]) # [B, S]
rnn_hidden = rnn_hidden * (1. - BATCH.begin_mask[l]) # [B, D]
next_state_prior, next_state_posterior, rnn_hidden = self.rssm(state,
BATCH.action[l],
rnn_hidden,
embedded_observations[l]) # a, s_
state = next_state_posterior.rsample() # [B, S] posterior of s_
states.append(state) # [B, S]
rnn_hiddens.append(rnn_hidden) # [B, D]
kl_loss += self._kl_loss(next_state_prior, next_state_posterior)
kl_loss /= T # 1
# compute reconstructed observations and predicted rewards
post_feat = t.cat([t.stack(states, 0), t.stack(rnn_hiddens, 0)], -1) # [T, B, *]
obs_pred = self.obs_decoder(post_feat) # [T, B, C, H, W] or [T, B, *]
reward_pred = self.reward_predictor(post_feat) # [T, B, 1], s_ => r
# compute loss for observation and reward
obs_loss = -t.mean(obs_pred.log_prob(obs_)) # [T, B] => 1
# [T, B, 1]=>1
reward_loss = -t.mean(reward_pred.log_prob(BATCH.reward).unsqueeze(-1))
# add all losses and update model parameters with gradient descent
model_loss = self.kl_scale*kl_loss + obs_loss + self.reward_scale * reward_loss # 1
self.model_oplr.optimize(model_loss)
summaries = dict([
['LEARNING_RATE/model_lr', self.model_oplr.lr],
['LOSS/model_loss', model_loss],
['LOSS/kl_loss', kl_loss],
['LOSS/obs_loss', obs_loss],
['LOSS/reward_loss', reward_loss]
])
return t.ones_like(BATCH.reward), summaries
def _initial_rnncs(self, batch: int) -> Dict[str, np.ndarray]:
return {'hx': np.zeros((batch, self.deter_dim))}
def _kl_loss(self, prior_dist, post_dist):
# 1
return td.kl_divergence(prior_dist, post_dist).clamp(min=self.kl_free_nats).mean()
| [
"[email protected]"
] | |
a9ede54f8c311163f3f1593922779cde560263bc | 3d0838cc0d3cca599c2dfb6bea3274ff3cabe0ac | /discore/models/_channel.py | 316c280d8e0f64347a51b6e2f534a923fc679e15 | [
"MIT"
] | permissive | geek-space-hq/discore | 6a799f411c81b580f0b3e0aac238e5dcf48d899c | 45f4870426e635353b3621f5089880cbb30c683c | refs/heads/develop | 2022-12-15T20:51:50.904945 | 2020-09-16T16:05:36 | 2020-09-16T16:05:36 | 295,093,463 | 3 | 0 | null | 2020-09-16T09:41:21 | 2020-09-13T06:36:35 | Python | UTF-8 | Python | false | false | 5,679 | py | from __future__ import annotations
from datetime import datetime
from enum import Enum, IntEnum
from typing import TYPE_CHECKING, List, Optional, Union
from pydantic import BaseModel
from pydantic.fields import Field
if TYPE_CHECKING:
from ._emoji import Emoji
from ._guild import GuildMember, Role
from ._user import User, UserMentioned
class Channel(BaseModel):
id: str
type: int
guild_id: Optional[str] = None
position: Optional[int] = None
permission_overwrites: Optional["Overwrite"] = None
name: Optional[str] = None
topic: Optional[str] = None
nsfw: bool = Field(default=False)
last_message_id: Optional[str] = None
bitrate: Optional[int] = None
user_limit: Optional[int] = None
rate_limit_per_user: Optional[int] = None
recipients: Optional[List["User"]] = None
icon: Optional[str] = None
parent_id: Optional[str] = None
last_pin_timestamp: Optional[datetime] = None
class ChannelType(IntEnum):
GUILD_TEXT = 0
DM = 1
GUILD_VOICE = 2
GROUP_DM = 3
GUILD_CATEGORY = 4
GUILD_NEWS = 5
GUILD_STORE = 6
class Message(BaseModel):
id: str
channel_id: str
aurhor: "User"
content: str
timestamp: datetime
tts: bool
mention_everyone: bool
mentions: List["UserMentioned"]
mention_roles: List["Role"]
attachments: List["Attachment"]
embeds: List["Embed"]
pinned: bool
type: "MessageType"
guild_id: Optional[str] = None
member: Optional["GuildMember"] = None
mention_channels: Optional[List["ChannelMention"]] = None
reactions: Optional[List["Reaction"]] = None
nonce: Optional[Union[int, str]] = None
webhook_id: Optional[str] = None
activity: Optional["MessageActivity"] = None
application: Optional["MessageApplication"] = None
message_reference: Optional["MessageReference"] = None
flags: Optional[int] = None
class MessageType(IntEnum):
DEFAULT = 0
RECIPIENT_ADD = 1
RECIPIENT_REMOVE = 2
CALL = 3
CHANNEL_NAME_CHANGE = 4
CHANNEL_ICON_CHANGE = 5
CHANNEL_PINNED_MESSAGE = 6
GUILD_MEMBER_JOIN = 7
USER_PREMIUM_GUILD_SUBSCRIPTION = 8
USER_PREMIUM_GUILD_SUBSCRIPTION_TIER_1 = 9
USER_PREMIUM_GUILD_SUBSCRIPTION_TIER_2 = 10
USER_PREMIUM_GUILD_SUBSCRIPTION_TIER_3 = 11
CHANNEL_FOLLOW_ADD = 12
GUILD_DISCOVERY_DISQUALIFIED = 14
GUILD_DISCOVERY_REQUALIFIED = 15
class MessageActivity(BaseModel):
type: int
party_id: Optional[str] = None
class MessageApplication(BaseModel):
id: str
description: str
name: str
cover_image: Optional[str] = None
icon: Optional[str] = None
class MessageReference(BaseModel):
channel_id: str
message_id: Optional[str] = None
guild_id: Optional[str] = None
class MessageActivityType(IntEnum):
JOIN = 1
SPECTATE = 2
LISTEN = 3
JOIN_REQUEST = 5
class MessageFlag(IntEnum):
CROSSPOSTED = 1 << 0
IS_CROSSPOST = 1 << 1
SUPPRESS_EMBEDS = 1 << 2
SOURCE_MESSAGE_DELETED = 1 << 3
URGENT = 1 << 4
class FollowedChannel(BaseModel):
channel_id: str
webhook_id: str
class Reaction:
count: int
me: bool
emoji: "Emoji"
class OverwriteReceiving(BaseModel):
id: str
type: str
allow: int
allow_new: str
deny: int
deny_new: str
class OverwriteSending(BaseModel):
id: str
type: str
allow: Union[int, str]
deny: Union[int, str]
Overwrite = Union[OverwriteReceiving, OverwriteSending]
class Embed(BaseModel):
title: Optional[str] = None
type: Optional["EmbedType"] = None
description: Optional[str] = None
url: Optional[str] = None
timestamp: Optional[datetime] = None
color: Optional[int] = None
footer: Optional["EmbedFooter"] = None
image: Optional["EmbedImage"] = None
thumbnail: Optional["EmbedThumbnail"] = None
video: Optional["EmbedVideo"] = None
provider: Optional["EmbedProvider"] = None
author: Optional["EmbedAuthor"] = None
fields_: Optional[List["EmbedField"]] = Field(default=None, alias="fields")
class EmbedType(str, Enum):
rich = "rich"
image = "image"
video = "video"
gifv = "gifv"
article = "article"
link = "link"
class EmbedThumbnail(BaseModel):
url: Optional[str] = None
proxy_url: Optional[str] = None
height: Optional[int] = None
width: Optional[int] = None
class EmbedVideo(BaseModel):
url: Optional[str] = None
height: Optional[int] = None
width: Optional[int] = None
class EmbedImage(BaseModel):
url: Optional[str] = None
proxy_url: Optional[str] = None
height: Optional[int] = None
width: Optional[int] = None
class EmbedProvider(BaseModel):
name: Optional[str] = None
url: Optional[str] = None
class EmbedAuthor(BaseModel):
name: Optional[str] = None
url: Optional[str] = None
icon_url: Optional[str] = None
proxy_icon_url: Optional[str] = None
class EmbedFooter(BaseModel):
text: str
icon_url: Optional[str] = None
proxy_icon_url: Optional[str] = None
class EmbedField(BaseModel):
name: str
value: str
inline: Optional[bool] = None
class Attachment(BaseModel):
id: str
filename: str
size: int
url: str
proxy_url: str
height: Optional[int] = None
width: Optional[int] = None
class ChannelMention(BaseModel):
id: str
guild_id: str
type: "ChannelType"
name: str
class AllowedMentionType(str, Enum):
ROLE_MENTIONS = "roles"
USER_MENTIONS = "users"
EVERYONE_MENTINS = "everyone"
class AllowedMention(BaseModel):
parse: "AllowedMentionType"
roles: List[str]
users: List[str]
| [
"[email protected]"
] | |
a07cab13bbac62cbe9da389c04efe73253dd55ba | c6b1919498776cfc408076246390e2bba56f4c4e | /devops_tool/settings.py | e422d3a5d42866f11728863fdae9c727b4dd35e6 | [] | no_license | huozhihui/devops_tool | f2ceaf7f1828853e43859645f5ab36a00b0fa7df | 0eb7b4a14203e30bb2c262075864cec0db21829f | refs/heads/master | 2020-05-20T19:02:47.855055 | 2017-04-18T05:25:59 | 2017-04-18T05:25:59 | 84,509,976 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,196 | py | """
Django settings for devops_tool project.
Generated by 'django-admin startproject' using Django 1.10.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'xv*oxmw8)_0jw=e!f6bi1bop1#cpi4_2=jy2da04gf*1!h2he*'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
LOGIN_REDIRECT_URL = '/role_manage'
# custom
# ===================================
TMP = os.path.join(BASE_DIR, 'tmp')
LOCALE_PATHS = (
os.path.join(BASE_DIR, 'locale'),
)
UPLOAD_FILE = (
os.path.join(TMP, 'upload_file')
)
# ANSIBLE = "/etc/ansible"
# ANSIBLE_ROLES = os.path.join(ANSIBLE, 'roles')
# ANSIBLE_YAMLS = os.path.join(ANSIBLE)
ANSIBLE = "/Users/huozhihui/huo/paas_deploy"
ANSIBLE_ROLES = os.path.join(ANSIBLE, 'roles')
ANSIBLE_YAMLS = ANSIBLE
ANSIBLE_HOSTS = ANSIBLE
ANSIBLE_INIT_USER = 'ubunt'
ANSIBLE_INIT_PASS = 'huo244'
# ===================================
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'channels',
'ext_command',
'client',
'workflow',
'client.templatetags.ext_template',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'devops_tool.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
# 'DIRS': [os.path.join(os.path.dirname(__file__), 'templates').replace('\\', '/')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'devops_tool.wsgi.application'
CHANNEL_LAYERS = {
"default": {
"BACKEND": "asgiref.inmemory.ChannelLayer",
"ROUTING": "devops_tool.routing.channel_routing",
},
}
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
# LANGUAGE_CODE = 'en-us'
LANGUAGE_CODE = 'zh_cn'
TIME_ZONE = 'Asia/Shanghai'
# TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = False
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': os.path.join(BASE_DIR, 'console.log'),
},
# 'console': {
# 'level': 'INFO',
# 'class': 'logging.StreamHandler',
# # 'formatter': 'simple'
# },
},
'loggers': {
'django.request': {
'handlers': ['file'],
'level': 'INFO',
'propagate': False,
},
},
}
# LOGGING = {
# 'version': 1,
# 'disable_existing_loggers': False,
# 'handlers': {
# 'console': {
# 'class': 'logging.StreamHandler',
# # 'level': 'INFO',
# # 'filename': os.path.join(BASE_DIR, 'console.log'),
# # 'maxBytes': 1024 * 1024 * 15, # 15MB
# # 'backupCount': 10,
# },
# },
# 'loggers': {
# 'django': {
# 'handlers': ['console'],
# 'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
# },
# },
# }
# LOGGING = {
# 'version': 1,
# 'disable_existing_loggers': False,
# 'filters': {
# 'require_debug_false': {
# '()': 'django.utils.log.RequireDebugFalse'
# }
# },
# 'handlers': {
# 'mail_admins': {
# 'level': 'ERROR',
# 'filters': ['require_debug_false'],
# 'class': 'django.utils.log.AdminEmailHandler'
# },
# 'applogfile': {
# 'level':'INFO',
# 'class':'logging.handlers.RotatingFileHandler',
# 'filename': os.path.join(BASE_DIR, 'APPNAME.log'),
# 'maxBytes': 1024*1024*15, # 15MB
# 'backupCount': 10,
# },
# },
# 'loggers': {
# 'django.request': {
# 'handlers': ['applogfile'],
# 'level': 'INFO',
# 'propagate': True,
# },
# }
# }
| [
"[email protected]"
] | |
6af287c25e7567cc97a37b41e9b9df7d8d589d3a | 69427716f39ddb8541b7dca39d26015a26e04104 | /学习脚本/Python基础学习脚本/select_socket_server.py | 619aaeec0a826ffbd3a7f9299ce892eb9ef5e5a3 | [] | no_license | xiatian0918/auto_scripts | a0fa80f3ec8a5e49e1b049ebed39a8ae3e7cdf7a | 413c614260340557cf9e615b1339eae68a8f9acf | refs/heads/master | 2020-05-14T13:32:46.556775 | 2020-01-21T00:18:56 | 2020-01-21T00:18:56 | 181,812,978 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 606 | py | #!/usr/bin/env python
#-*- coding:utf-8 -*-
# author: xiatian
import select,socket,sys,queue
server = socket.socket()
server.bind(('localhost',9000))
server.listen(1000)
server.setblocking(False) #不阻塞
inputs = [server,]
outputs = []
readable , writeable, exceptional = select.select(inputs, outputs, inputs)
print(readable,writeable,exceptional)
for i in readable:
if r is server: #代表来了一个新链接
conn,addr = server.accept()
print("来了个新链接",addr)
inputs.append(conn)
else:
data = conn.recv(1024)
print("收到数据",data)
| [
"[email protected]"
] | |
7f9ea1866114fe062661f28006ec80d13194dd03 | a8062308fb3bf6c8952257504a50c3e97d801294 | /problems/N875_Koko_Eating_Bananas.py | 2d632a0fdaa6c0078dec7406cb6fa8e0e852a916 | [] | no_license | wan-catherine/Leetcode | 650d697a873ad23c0b64d08ad525bf9fcdb62b1b | 238995bd23c8a6c40c6035890e94baa2473d4bbc | refs/heads/master | 2023-09-01T00:56:27.677230 | 2023-08-31T00:49:31 | 2023-08-31T00:49:31 | 143,770,000 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 878 | py | class Solution(object):
def minEatingSpeed(self, piles, H):
"""
:type piles: List[int]
:type H: int
:rtype: int
"""
length = len(piles)
if length == H:
return max(piles)
right = max(piles)
total = sum(piles)
if total <= H:
return 1
left = total // H
while left < right:
mid = (right - left) // 2 + left
if self.helper(mid, piles, H):
right = mid
else:
left = mid + 1
return left
def helper(self, value, piles, H):
hours = 0
for pile in piles:
if pile % value:
hours += pile // value + 1
else:
hours += pile // value
if hours > H:
return False
else:
return True
| [
"[email protected]"
] | |
85a8fe446187a595ad11c4c0a6dba3786a9af595 | 5e6d8b9989247801718dd1f10009f0f7f54c1eb4 | /sdk/python/pulumi_azure_native/web/v20201001/web_app_auth_settings.py | 8c0f10d3d946c6e637d1c161b9fabc2e4c33aae2 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | vivimouret29/pulumi-azure-native | d238a8f91688c9bf09d745a7280b9bf2dd6d44e0 | 1cbd988bcb2aa75a83e220cb5abeb805d6484fce | refs/heads/master | 2023-08-26T05:50:40.560691 | 2021-10-21T09:25:07 | 2021-10-21T09:25:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 85,144 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
__all__ = ['WebAppAuthSettingsArgs', 'WebAppAuthSettings']
@pulumi.input_type
class WebAppAuthSettingsArgs:
def __init__(__self__, *,
name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
aad_claims_authorization: Optional[pulumi.Input[str]] = None,
additional_login_params: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_audiences: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_external_redirect_urls: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
auth_file_path: Optional[pulumi.Input[str]] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret: Optional[pulumi.Input[str]] = None,
client_secret_certificate_thumbprint: Optional[pulumi.Input[str]] = None,
client_secret_setting_name: Optional[pulumi.Input[str]] = None,
default_provider: Optional[pulumi.Input['BuiltInAuthenticationProvider']] = None,
enabled: Optional[pulumi.Input[bool]] = None,
facebook_app_id: Optional[pulumi.Input[str]] = None,
facebook_app_secret: Optional[pulumi.Input[str]] = None,
facebook_app_secret_setting_name: Optional[pulumi.Input[str]] = None,
facebook_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
git_hub_client_id: Optional[pulumi.Input[str]] = None,
git_hub_client_secret: Optional[pulumi.Input[str]] = None,
git_hub_client_secret_setting_name: Optional[pulumi.Input[str]] = None,
git_hub_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
google_client_id: Optional[pulumi.Input[str]] = None,
google_client_secret: Optional[pulumi.Input[str]] = None,
google_client_secret_setting_name: Optional[pulumi.Input[str]] = None,
google_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
is_auth_from_file: Optional[pulumi.Input[str]] = None,
issuer: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
microsoft_account_client_id: Optional[pulumi.Input[str]] = None,
microsoft_account_client_secret: Optional[pulumi.Input[str]] = None,
microsoft_account_client_secret_setting_name: Optional[pulumi.Input[str]] = None,
microsoft_account_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
runtime_version: Optional[pulumi.Input[str]] = None,
token_refresh_extension_hours: Optional[pulumi.Input[float]] = None,
token_store_enabled: Optional[pulumi.Input[bool]] = None,
twitter_consumer_key: Optional[pulumi.Input[str]] = None,
twitter_consumer_secret: Optional[pulumi.Input[str]] = None,
twitter_consumer_secret_setting_name: Optional[pulumi.Input[str]] = None,
unauthenticated_client_action: Optional[pulumi.Input['UnauthenticatedClientAction']] = None,
validate_issuer: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a WebAppAuthSettings resource.
:param pulumi.Input[str] name: Name of web app.
:param pulumi.Input[str] resource_group_name: Name of the resource group to which the resource belongs.
:param pulumi.Input[str] aad_claims_authorization: Gets a JSON string containing the Azure AD Acl settings.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_login_params: Login parameters to send to the OpenID Connect authorization endpoint when
a user logs in. Each parameter must be in the form "key=value".
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_audiences: Allowed audience values to consider when validating JWTs issued by
Azure Active Directory. Note that the <code>ClientID</code> value is always considered an
allowed audience, regardless of this setting.
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_external_redirect_urls: External URLs that can be redirected to as part of logging in or logging out of the app. Note that the query string part of the URL is ignored.
This is an advanced setting typically only needed by Windows Store application backends.
Note that URLs within the current domain are always implicitly allowed.
:param pulumi.Input[str] auth_file_path: The path of the config file containing auth settings.
If the path is relative, base will the site's root directory.
:param pulumi.Input[str] client_id: The Client ID of this relying party application, known as the client_id.
This setting is required for enabling OpenID Connection authentication with Azure Active Directory or
other 3rd party OpenID Connect providers.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
:param pulumi.Input[str] client_secret: The Client Secret of this relying party application (in Azure Active Directory, this is also referred to as the Key).
This setting is optional. If no client secret is configured, the OpenID Connect implicit auth flow is used to authenticate end users.
Otherwise, the OpenID Connect Authorization Code Flow is used to authenticate end users.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
:param pulumi.Input[str] client_secret_certificate_thumbprint: An alternative to the client secret, that is the thumbprint of a certificate used for signing purposes. This property acts as
a replacement for the Client Secret. It is also optional.
:param pulumi.Input[str] client_secret_setting_name: The app setting name that contains the client secret of the relying party application.
:param pulumi.Input['BuiltInAuthenticationProvider'] default_provider: The default authentication provider to use when multiple providers are configured.
This setting is only needed if multiple providers are configured and the unauthenticated client
action is set to "RedirectToLoginPage".
:param pulumi.Input[bool] enabled: <code>true</code> if the Authentication / Authorization feature is enabled for the current app; otherwise, <code>false</code>.
:param pulumi.Input[str] facebook_app_id: The App ID of the Facebook app used for login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
:param pulumi.Input[str] facebook_app_secret: The App Secret of the Facebook app used for Facebook Login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
:param pulumi.Input[str] facebook_app_secret_setting_name: The app setting name that contains the app secret used for Facebook Login.
:param pulumi.Input[Sequence[pulumi.Input[str]]] facebook_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Facebook Login authentication.
This setting is optional.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
:param pulumi.Input[str] git_hub_client_id: The Client Id of the GitHub app used for login.
This setting is required for enabling Github login
:param pulumi.Input[str] git_hub_client_secret: The Client Secret of the GitHub app used for Github Login.
This setting is required for enabling Github login.
:param pulumi.Input[str] git_hub_client_secret_setting_name: The app setting name that contains the client secret of the Github
app used for GitHub Login.
:param pulumi.Input[Sequence[pulumi.Input[str]]] git_hub_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of GitHub Login authentication.
This setting is optional
:param pulumi.Input[str] google_client_id: The OpenID Connect Client ID for the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
:param pulumi.Input[str] google_client_secret: The client secret associated with the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
:param pulumi.Input[str] google_client_secret_setting_name: The app setting name that contains the client secret associated with
the Google web application.
:param pulumi.Input[Sequence[pulumi.Input[str]]] google_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Google Sign-In authentication.
This setting is optional. If not specified, "openid", "profile", and "email" are used as default scopes.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
:param pulumi.Input[str] is_auth_from_file: "true" if the auth config settings should be read from a file,
"false" otherwise
:param pulumi.Input[str] issuer: The OpenID Connect Issuer URI that represents the entity which issues access tokens for this application.
When using Azure Active Directory, this value is the URI of the directory tenant, e.g. https://sts.windows.net/{tenant-guid}/.
This URI is a case-sensitive identifier for the token issuer.
More information on OpenID Connect Discovery: http://openid.net/specs/openid-connect-discovery-1_0.html
:param pulumi.Input[str] kind: Kind of resource.
:param pulumi.Input[str] microsoft_account_client_id: The OAuth 2.0 client ID that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
:param pulumi.Input[str] microsoft_account_client_secret: The OAuth 2.0 client secret that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
:param pulumi.Input[str] microsoft_account_client_secret_setting_name: The app setting name containing the OAuth 2.0 client secret that was created for the
app used for authentication.
:param pulumi.Input[Sequence[pulumi.Input[str]]] microsoft_account_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Microsoft Account authentication.
This setting is optional. If not specified, "wl.basic" is used as the default scope.
Microsoft Account Scopes and permissions documentation: https://msdn.microsoft.com/en-us/library/dn631845.aspx
:param pulumi.Input[str] runtime_version: The RuntimeVersion of the Authentication / Authorization feature in use for the current app.
The setting in this value can control the behavior of certain features in the Authentication / Authorization module.
:param pulumi.Input[float] token_refresh_extension_hours: The number of hours after session token expiration that a session token can be used to
call the token refresh API. The default is 72 hours.
:param pulumi.Input[bool] token_store_enabled: <code>true</code> to durably store platform-specific security tokens that are obtained during login flows; otherwise, <code>false</code>.
The default is <code>false</code>.
:param pulumi.Input[str] twitter_consumer_key: The OAuth 1.0a consumer key of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
:param pulumi.Input[str] twitter_consumer_secret: The OAuth 1.0a consumer secret of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
:param pulumi.Input[str] twitter_consumer_secret_setting_name: The app setting name that contains the OAuth 1.0a consumer secret of the Twitter
application used for sign-in.
:param pulumi.Input['UnauthenticatedClientAction'] unauthenticated_client_action: The action to take when an unauthenticated client attempts to access the app.
:param pulumi.Input[bool] validate_issuer: Gets a value indicating whether the issuer should be a valid HTTPS url and be validated as such.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if aad_claims_authorization is not None:
pulumi.set(__self__, "aad_claims_authorization", aad_claims_authorization)
if additional_login_params is not None:
pulumi.set(__self__, "additional_login_params", additional_login_params)
if allowed_audiences is not None:
pulumi.set(__self__, "allowed_audiences", allowed_audiences)
if allowed_external_redirect_urls is not None:
pulumi.set(__self__, "allowed_external_redirect_urls", allowed_external_redirect_urls)
if auth_file_path is not None:
pulumi.set(__self__, "auth_file_path", auth_file_path)
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if client_secret is not None:
pulumi.set(__self__, "client_secret", client_secret)
if client_secret_certificate_thumbprint is not None:
pulumi.set(__self__, "client_secret_certificate_thumbprint", client_secret_certificate_thumbprint)
if client_secret_setting_name is not None:
pulumi.set(__self__, "client_secret_setting_name", client_secret_setting_name)
if default_provider is not None:
pulumi.set(__self__, "default_provider", default_provider)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if facebook_app_id is not None:
pulumi.set(__self__, "facebook_app_id", facebook_app_id)
if facebook_app_secret is not None:
pulumi.set(__self__, "facebook_app_secret", facebook_app_secret)
if facebook_app_secret_setting_name is not None:
pulumi.set(__self__, "facebook_app_secret_setting_name", facebook_app_secret_setting_name)
if facebook_o_auth_scopes is not None:
pulumi.set(__self__, "facebook_o_auth_scopes", facebook_o_auth_scopes)
if git_hub_client_id is not None:
pulumi.set(__self__, "git_hub_client_id", git_hub_client_id)
if git_hub_client_secret is not None:
pulumi.set(__self__, "git_hub_client_secret", git_hub_client_secret)
if git_hub_client_secret_setting_name is not None:
pulumi.set(__self__, "git_hub_client_secret_setting_name", git_hub_client_secret_setting_name)
if git_hub_o_auth_scopes is not None:
pulumi.set(__self__, "git_hub_o_auth_scopes", git_hub_o_auth_scopes)
if google_client_id is not None:
pulumi.set(__self__, "google_client_id", google_client_id)
if google_client_secret is not None:
pulumi.set(__self__, "google_client_secret", google_client_secret)
if google_client_secret_setting_name is not None:
pulumi.set(__self__, "google_client_secret_setting_name", google_client_secret_setting_name)
if google_o_auth_scopes is not None:
pulumi.set(__self__, "google_o_auth_scopes", google_o_auth_scopes)
if is_auth_from_file is not None:
pulumi.set(__self__, "is_auth_from_file", is_auth_from_file)
if issuer is not None:
pulumi.set(__self__, "issuer", issuer)
if kind is not None:
pulumi.set(__self__, "kind", kind)
if microsoft_account_client_id is not None:
pulumi.set(__self__, "microsoft_account_client_id", microsoft_account_client_id)
if microsoft_account_client_secret is not None:
pulumi.set(__self__, "microsoft_account_client_secret", microsoft_account_client_secret)
if microsoft_account_client_secret_setting_name is not None:
pulumi.set(__self__, "microsoft_account_client_secret_setting_name", microsoft_account_client_secret_setting_name)
if microsoft_account_o_auth_scopes is not None:
pulumi.set(__self__, "microsoft_account_o_auth_scopes", microsoft_account_o_auth_scopes)
if runtime_version is not None:
pulumi.set(__self__, "runtime_version", runtime_version)
if token_refresh_extension_hours is not None:
pulumi.set(__self__, "token_refresh_extension_hours", token_refresh_extension_hours)
if token_store_enabled is not None:
pulumi.set(__self__, "token_store_enabled", token_store_enabled)
if twitter_consumer_key is not None:
pulumi.set(__self__, "twitter_consumer_key", twitter_consumer_key)
if twitter_consumer_secret is not None:
pulumi.set(__self__, "twitter_consumer_secret", twitter_consumer_secret)
if twitter_consumer_secret_setting_name is not None:
pulumi.set(__self__, "twitter_consumer_secret_setting_name", twitter_consumer_secret_setting_name)
if unauthenticated_client_action is not None:
pulumi.set(__self__, "unauthenticated_client_action", unauthenticated_client_action)
if validate_issuer is not None:
pulumi.set(__self__, "validate_issuer", validate_issuer)
@property
@pulumi.getter
def name(self) -> pulumi.Input[str]:
"""
Name of web app.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: pulumi.Input[str]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
Name of the resource group to which the resource belongs.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="aadClaimsAuthorization")
def aad_claims_authorization(self) -> Optional[pulumi.Input[str]]:
"""
Gets a JSON string containing the Azure AD Acl settings.
"""
return pulumi.get(self, "aad_claims_authorization")
@aad_claims_authorization.setter
def aad_claims_authorization(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "aad_claims_authorization", value)
@property
@pulumi.getter(name="additionalLoginParams")
def additional_login_params(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Login parameters to send to the OpenID Connect authorization endpoint when
a user logs in. Each parameter must be in the form "key=value".
"""
return pulumi.get(self, "additional_login_params")
@additional_login_params.setter
def additional_login_params(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "additional_login_params", value)
@property
@pulumi.getter(name="allowedAudiences")
def allowed_audiences(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Allowed audience values to consider when validating JWTs issued by
Azure Active Directory. Note that the <code>ClientID</code> value is always considered an
allowed audience, regardless of this setting.
"""
return pulumi.get(self, "allowed_audiences")
@allowed_audiences.setter
def allowed_audiences(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_audiences", value)
@property
@pulumi.getter(name="allowedExternalRedirectUrls")
def allowed_external_redirect_urls(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
External URLs that can be redirected to as part of logging in or logging out of the app. Note that the query string part of the URL is ignored.
This is an advanced setting typically only needed by Windows Store application backends.
Note that URLs within the current domain are always implicitly allowed.
"""
return pulumi.get(self, "allowed_external_redirect_urls")
@allowed_external_redirect_urls.setter
def allowed_external_redirect_urls(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_external_redirect_urls", value)
@property
@pulumi.getter(name="authFilePath")
def auth_file_path(self) -> Optional[pulumi.Input[str]]:
"""
The path of the config file containing auth settings.
If the path is relative, base will the site's root directory.
"""
return pulumi.get(self, "auth_file_path")
@auth_file_path.setter
def auth_file_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "auth_file_path", value)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> Optional[pulumi.Input[str]]:
"""
The Client ID of this relying party application, known as the client_id.
This setting is required for enabling OpenID Connection authentication with Azure Active Directory or
other 3rd party OpenID Connect providers.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
"""
return pulumi.get(self, "client_id")
@client_id.setter
def client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_id", value)
@property
@pulumi.getter(name="clientSecret")
def client_secret(self) -> Optional[pulumi.Input[str]]:
"""
The Client Secret of this relying party application (in Azure Active Directory, this is also referred to as the Key).
This setting is optional. If no client secret is configured, the OpenID Connect implicit auth flow is used to authenticate end users.
Otherwise, the OpenID Connect Authorization Code Flow is used to authenticate end users.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
"""
return pulumi.get(self, "client_secret")
@client_secret.setter
def client_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_secret", value)
@property
@pulumi.getter(name="clientSecretCertificateThumbprint")
def client_secret_certificate_thumbprint(self) -> Optional[pulumi.Input[str]]:
"""
An alternative to the client secret, that is the thumbprint of a certificate used for signing purposes. This property acts as
a replacement for the Client Secret. It is also optional.
"""
return pulumi.get(self, "client_secret_certificate_thumbprint")
@client_secret_certificate_thumbprint.setter
def client_secret_certificate_thumbprint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_secret_certificate_thumbprint", value)
@property
@pulumi.getter(name="clientSecretSettingName")
def client_secret_setting_name(self) -> Optional[pulumi.Input[str]]:
"""
The app setting name that contains the client secret of the relying party application.
"""
return pulumi.get(self, "client_secret_setting_name")
@client_secret_setting_name.setter
def client_secret_setting_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_secret_setting_name", value)
@property
@pulumi.getter(name="defaultProvider")
def default_provider(self) -> Optional[pulumi.Input['BuiltInAuthenticationProvider']]:
"""
The default authentication provider to use when multiple providers are configured.
This setting is only needed if multiple providers are configured and the unauthenticated client
action is set to "RedirectToLoginPage".
"""
return pulumi.get(self, "default_provider")
@default_provider.setter
def default_provider(self, value: Optional[pulumi.Input['BuiltInAuthenticationProvider']]):
pulumi.set(self, "default_provider", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
<code>true</code> if the Authentication / Authorization feature is enabled for the current app; otherwise, <code>false</code>.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="facebookAppId")
def facebook_app_id(self) -> Optional[pulumi.Input[str]]:
"""
The App ID of the Facebook app used for login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
"""
return pulumi.get(self, "facebook_app_id")
@facebook_app_id.setter
def facebook_app_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "facebook_app_id", value)
@property
@pulumi.getter(name="facebookAppSecret")
def facebook_app_secret(self) -> Optional[pulumi.Input[str]]:
"""
The App Secret of the Facebook app used for Facebook Login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
"""
return pulumi.get(self, "facebook_app_secret")
@facebook_app_secret.setter
def facebook_app_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "facebook_app_secret", value)
@property
@pulumi.getter(name="facebookAppSecretSettingName")
def facebook_app_secret_setting_name(self) -> Optional[pulumi.Input[str]]:
"""
The app setting name that contains the app secret used for Facebook Login.
"""
return pulumi.get(self, "facebook_app_secret_setting_name")
@facebook_app_secret_setting_name.setter
def facebook_app_secret_setting_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "facebook_app_secret_setting_name", value)
@property
@pulumi.getter(name="facebookOAuthScopes")
def facebook_o_auth_scopes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The OAuth 2.0 scopes that will be requested as part of Facebook Login authentication.
This setting is optional.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
"""
return pulumi.get(self, "facebook_o_auth_scopes")
@facebook_o_auth_scopes.setter
def facebook_o_auth_scopes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "facebook_o_auth_scopes", value)
@property
@pulumi.getter(name="gitHubClientId")
def git_hub_client_id(self) -> Optional[pulumi.Input[str]]:
"""
The Client Id of the GitHub app used for login.
This setting is required for enabling Github login
"""
return pulumi.get(self, "git_hub_client_id")
@git_hub_client_id.setter
def git_hub_client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "git_hub_client_id", value)
@property
@pulumi.getter(name="gitHubClientSecret")
def git_hub_client_secret(self) -> Optional[pulumi.Input[str]]:
"""
The Client Secret of the GitHub app used for Github Login.
This setting is required for enabling Github login.
"""
return pulumi.get(self, "git_hub_client_secret")
@git_hub_client_secret.setter
def git_hub_client_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "git_hub_client_secret", value)
@property
@pulumi.getter(name="gitHubClientSecretSettingName")
def git_hub_client_secret_setting_name(self) -> Optional[pulumi.Input[str]]:
"""
The app setting name that contains the client secret of the Github
app used for GitHub Login.
"""
return pulumi.get(self, "git_hub_client_secret_setting_name")
@git_hub_client_secret_setting_name.setter
def git_hub_client_secret_setting_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "git_hub_client_secret_setting_name", value)
@property
@pulumi.getter(name="gitHubOAuthScopes")
def git_hub_o_auth_scopes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The OAuth 2.0 scopes that will be requested as part of GitHub Login authentication.
This setting is optional
"""
return pulumi.get(self, "git_hub_o_auth_scopes")
@git_hub_o_auth_scopes.setter
def git_hub_o_auth_scopes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "git_hub_o_auth_scopes", value)
@property
@pulumi.getter(name="googleClientId")
def google_client_id(self) -> Optional[pulumi.Input[str]]:
"""
The OpenID Connect Client ID for the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
"""
return pulumi.get(self, "google_client_id")
@google_client_id.setter
def google_client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "google_client_id", value)
@property
@pulumi.getter(name="googleClientSecret")
def google_client_secret(self) -> Optional[pulumi.Input[str]]:
"""
The client secret associated with the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
"""
return pulumi.get(self, "google_client_secret")
@google_client_secret.setter
def google_client_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "google_client_secret", value)
@property
@pulumi.getter(name="googleClientSecretSettingName")
def google_client_secret_setting_name(self) -> Optional[pulumi.Input[str]]:
"""
The app setting name that contains the client secret associated with
the Google web application.
"""
return pulumi.get(self, "google_client_secret_setting_name")
@google_client_secret_setting_name.setter
def google_client_secret_setting_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "google_client_secret_setting_name", value)
@property
@pulumi.getter(name="googleOAuthScopes")
def google_o_auth_scopes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The OAuth 2.0 scopes that will be requested as part of Google Sign-In authentication.
This setting is optional. If not specified, "openid", "profile", and "email" are used as default scopes.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
"""
return pulumi.get(self, "google_o_auth_scopes")
@google_o_auth_scopes.setter
def google_o_auth_scopes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "google_o_auth_scopes", value)
@property
@pulumi.getter(name="isAuthFromFile")
def is_auth_from_file(self) -> Optional[pulumi.Input[str]]:
"""
"true" if the auth config settings should be read from a file,
"false" otherwise
"""
return pulumi.get(self, "is_auth_from_file")
@is_auth_from_file.setter
def is_auth_from_file(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "is_auth_from_file", value)
@property
@pulumi.getter
def issuer(self) -> Optional[pulumi.Input[str]]:
"""
The OpenID Connect Issuer URI that represents the entity which issues access tokens for this application.
When using Azure Active Directory, this value is the URI of the directory tenant, e.g. https://sts.windows.net/{tenant-guid}/.
This URI is a case-sensitive identifier for the token issuer.
More information on OpenID Connect Discovery: http://openid.net/specs/openid-connect-discovery-1_0.html
"""
return pulumi.get(self, "issuer")
@issuer.setter
def issuer(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "issuer", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter(name="microsoftAccountClientId")
def microsoft_account_client_id(self) -> Optional[pulumi.Input[str]]:
"""
The OAuth 2.0 client ID that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
"""
return pulumi.get(self, "microsoft_account_client_id")
@microsoft_account_client_id.setter
def microsoft_account_client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "microsoft_account_client_id", value)
@property
@pulumi.getter(name="microsoftAccountClientSecret")
def microsoft_account_client_secret(self) -> Optional[pulumi.Input[str]]:
"""
The OAuth 2.0 client secret that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
"""
return pulumi.get(self, "microsoft_account_client_secret")
@microsoft_account_client_secret.setter
def microsoft_account_client_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "microsoft_account_client_secret", value)
@property
@pulumi.getter(name="microsoftAccountClientSecretSettingName")
def microsoft_account_client_secret_setting_name(self) -> Optional[pulumi.Input[str]]:
"""
The app setting name containing the OAuth 2.0 client secret that was created for the
app used for authentication.
"""
return pulumi.get(self, "microsoft_account_client_secret_setting_name")
@microsoft_account_client_secret_setting_name.setter
def microsoft_account_client_secret_setting_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "microsoft_account_client_secret_setting_name", value)
@property
@pulumi.getter(name="microsoftAccountOAuthScopes")
def microsoft_account_o_auth_scopes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The OAuth 2.0 scopes that will be requested as part of Microsoft Account authentication.
This setting is optional. If not specified, "wl.basic" is used as the default scope.
Microsoft Account Scopes and permissions documentation: https://msdn.microsoft.com/en-us/library/dn631845.aspx
"""
return pulumi.get(self, "microsoft_account_o_auth_scopes")
@microsoft_account_o_auth_scopes.setter
def microsoft_account_o_auth_scopes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "microsoft_account_o_auth_scopes", value)
@property
@pulumi.getter(name="runtimeVersion")
def runtime_version(self) -> Optional[pulumi.Input[str]]:
"""
The RuntimeVersion of the Authentication / Authorization feature in use for the current app.
The setting in this value can control the behavior of certain features in the Authentication / Authorization module.
"""
return pulumi.get(self, "runtime_version")
@runtime_version.setter
def runtime_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "runtime_version", value)
@property
@pulumi.getter(name="tokenRefreshExtensionHours")
def token_refresh_extension_hours(self) -> Optional[pulumi.Input[float]]:
"""
The number of hours after session token expiration that a session token can be used to
call the token refresh API. The default is 72 hours.
"""
return pulumi.get(self, "token_refresh_extension_hours")
@token_refresh_extension_hours.setter
def token_refresh_extension_hours(self, value: Optional[pulumi.Input[float]]):
pulumi.set(self, "token_refresh_extension_hours", value)
@property
@pulumi.getter(name="tokenStoreEnabled")
def token_store_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
<code>true</code> to durably store platform-specific security tokens that are obtained during login flows; otherwise, <code>false</code>.
The default is <code>false</code>.
"""
return pulumi.get(self, "token_store_enabled")
@token_store_enabled.setter
def token_store_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "token_store_enabled", value)
@property
@pulumi.getter(name="twitterConsumerKey")
def twitter_consumer_key(self) -> Optional[pulumi.Input[str]]:
"""
The OAuth 1.0a consumer key of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
"""
return pulumi.get(self, "twitter_consumer_key")
@twitter_consumer_key.setter
def twitter_consumer_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "twitter_consumer_key", value)
@property
@pulumi.getter(name="twitterConsumerSecret")
def twitter_consumer_secret(self) -> Optional[pulumi.Input[str]]:
"""
The OAuth 1.0a consumer secret of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
"""
return pulumi.get(self, "twitter_consumer_secret")
@twitter_consumer_secret.setter
def twitter_consumer_secret(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "twitter_consumer_secret", value)
@property
@pulumi.getter(name="twitterConsumerSecretSettingName")
def twitter_consumer_secret_setting_name(self) -> Optional[pulumi.Input[str]]:
"""
The app setting name that contains the OAuth 1.0a consumer secret of the Twitter
application used for sign-in.
"""
return pulumi.get(self, "twitter_consumer_secret_setting_name")
@twitter_consumer_secret_setting_name.setter
def twitter_consumer_secret_setting_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "twitter_consumer_secret_setting_name", value)
@property
@pulumi.getter(name="unauthenticatedClientAction")
def unauthenticated_client_action(self) -> Optional[pulumi.Input['UnauthenticatedClientAction']]:
"""
The action to take when an unauthenticated client attempts to access the app.
"""
return pulumi.get(self, "unauthenticated_client_action")
@unauthenticated_client_action.setter
def unauthenticated_client_action(self, value: Optional[pulumi.Input['UnauthenticatedClientAction']]):
pulumi.set(self, "unauthenticated_client_action", value)
@property
@pulumi.getter(name="validateIssuer")
def validate_issuer(self) -> Optional[pulumi.Input[bool]]:
"""
Gets a value indicating whether the issuer should be a valid HTTPS url and be validated as such.
"""
return pulumi.get(self, "validate_issuer")
@validate_issuer.setter
def validate_issuer(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "validate_issuer", value)
class WebAppAuthSettings(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
aad_claims_authorization: Optional[pulumi.Input[str]] = None,
additional_login_params: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_audiences: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_external_redirect_urls: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
auth_file_path: Optional[pulumi.Input[str]] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret: Optional[pulumi.Input[str]] = None,
client_secret_certificate_thumbprint: Optional[pulumi.Input[str]] = None,
client_secret_setting_name: Optional[pulumi.Input[str]] = None,
default_provider: Optional[pulumi.Input['BuiltInAuthenticationProvider']] = None,
enabled: Optional[pulumi.Input[bool]] = None,
facebook_app_id: Optional[pulumi.Input[str]] = None,
facebook_app_secret: Optional[pulumi.Input[str]] = None,
facebook_app_secret_setting_name: Optional[pulumi.Input[str]] = None,
facebook_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
git_hub_client_id: Optional[pulumi.Input[str]] = None,
git_hub_client_secret: Optional[pulumi.Input[str]] = None,
git_hub_client_secret_setting_name: Optional[pulumi.Input[str]] = None,
git_hub_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
google_client_id: Optional[pulumi.Input[str]] = None,
google_client_secret: Optional[pulumi.Input[str]] = None,
google_client_secret_setting_name: Optional[pulumi.Input[str]] = None,
google_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
is_auth_from_file: Optional[pulumi.Input[str]] = None,
issuer: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
microsoft_account_client_id: Optional[pulumi.Input[str]] = None,
microsoft_account_client_secret: Optional[pulumi.Input[str]] = None,
microsoft_account_client_secret_setting_name: Optional[pulumi.Input[str]] = None,
microsoft_account_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
runtime_version: Optional[pulumi.Input[str]] = None,
token_refresh_extension_hours: Optional[pulumi.Input[float]] = None,
token_store_enabled: Optional[pulumi.Input[bool]] = None,
twitter_consumer_key: Optional[pulumi.Input[str]] = None,
twitter_consumer_secret: Optional[pulumi.Input[str]] = None,
twitter_consumer_secret_setting_name: Optional[pulumi.Input[str]] = None,
unauthenticated_client_action: Optional[pulumi.Input['UnauthenticatedClientAction']] = None,
validate_issuer: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
Configuration settings for the Azure App Service Authentication / Authorization feature.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] aad_claims_authorization: Gets a JSON string containing the Azure AD Acl settings.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_login_params: Login parameters to send to the OpenID Connect authorization endpoint when
a user logs in. Each parameter must be in the form "key=value".
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_audiences: Allowed audience values to consider when validating JWTs issued by
Azure Active Directory. Note that the <code>ClientID</code> value is always considered an
allowed audience, regardless of this setting.
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_external_redirect_urls: External URLs that can be redirected to as part of logging in or logging out of the app. Note that the query string part of the URL is ignored.
This is an advanced setting typically only needed by Windows Store application backends.
Note that URLs within the current domain are always implicitly allowed.
:param pulumi.Input[str] auth_file_path: The path of the config file containing auth settings.
If the path is relative, base will the site's root directory.
:param pulumi.Input[str] client_id: The Client ID of this relying party application, known as the client_id.
This setting is required for enabling OpenID Connection authentication with Azure Active Directory or
other 3rd party OpenID Connect providers.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
:param pulumi.Input[str] client_secret: The Client Secret of this relying party application (in Azure Active Directory, this is also referred to as the Key).
This setting is optional. If no client secret is configured, the OpenID Connect implicit auth flow is used to authenticate end users.
Otherwise, the OpenID Connect Authorization Code Flow is used to authenticate end users.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
:param pulumi.Input[str] client_secret_certificate_thumbprint: An alternative to the client secret, that is the thumbprint of a certificate used for signing purposes. This property acts as
a replacement for the Client Secret. It is also optional.
:param pulumi.Input[str] client_secret_setting_name: The app setting name that contains the client secret of the relying party application.
:param pulumi.Input['BuiltInAuthenticationProvider'] default_provider: The default authentication provider to use when multiple providers are configured.
This setting is only needed if multiple providers are configured and the unauthenticated client
action is set to "RedirectToLoginPage".
:param pulumi.Input[bool] enabled: <code>true</code> if the Authentication / Authorization feature is enabled for the current app; otherwise, <code>false</code>.
:param pulumi.Input[str] facebook_app_id: The App ID of the Facebook app used for login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
:param pulumi.Input[str] facebook_app_secret: The App Secret of the Facebook app used for Facebook Login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
:param pulumi.Input[str] facebook_app_secret_setting_name: The app setting name that contains the app secret used for Facebook Login.
:param pulumi.Input[Sequence[pulumi.Input[str]]] facebook_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Facebook Login authentication.
This setting is optional.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
:param pulumi.Input[str] git_hub_client_id: The Client Id of the GitHub app used for login.
This setting is required for enabling Github login
:param pulumi.Input[str] git_hub_client_secret: The Client Secret of the GitHub app used for Github Login.
This setting is required for enabling Github login.
:param pulumi.Input[str] git_hub_client_secret_setting_name: The app setting name that contains the client secret of the Github
app used for GitHub Login.
:param pulumi.Input[Sequence[pulumi.Input[str]]] git_hub_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of GitHub Login authentication.
This setting is optional
:param pulumi.Input[str] google_client_id: The OpenID Connect Client ID for the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
:param pulumi.Input[str] google_client_secret: The client secret associated with the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
:param pulumi.Input[str] google_client_secret_setting_name: The app setting name that contains the client secret associated with
the Google web application.
:param pulumi.Input[Sequence[pulumi.Input[str]]] google_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Google Sign-In authentication.
This setting is optional. If not specified, "openid", "profile", and "email" are used as default scopes.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
:param pulumi.Input[str] is_auth_from_file: "true" if the auth config settings should be read from a file,
"false" otherwise
:param pulumi.Input[str] issuer: The OpenID Connect Issuer URI that represents the entity which issues access tokens for this application.
When using Azure Active Directory, this value is the URI of the directory tenant, e.g. https://sts.windows.net/{tenant-guid}/.
This URI is a case-sensitive identifier for the token issuer.
More information on OpenID Connect Discovery: http://openid.net/specs/openid-connect-discovery-1_0.html
:param pulumi.Input[str] kind: Kind of resource.
:param pulumi.Input[str] microsoft_account_client_id: The OAuth 2.0 client ID that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
:param pulumi.Input[str] microsoft_account_client_secret: The OAuth 2.0 client secret that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
:param pulumi.Input[str] microsoft_account_client_secret_setting_name: The app setting name containing the OAuth 2.0 client secret that was created for the
app used for authentication.
:param pulumi.Input[Sequence[pulumi.Input[str]]] microsoft_account_o_auth_scopes: The OAuth 2.0 scopes that will be requested as part of Microsoft Account authentication.
This setting is optional. If not specified, "wl.basic" is used as the default scope.
Microsoft Account Scopes and permissions documentation: https://msdn.microsoft.com/en-us/library/dn631845.aspx
:param pulumi.Input[str] name: Name of web app.
:param pulumi.Input[str] resource_group_name: Name of the resource group to which the resource belongs.
:param pulumi.Input[str] runtime_version: The RuntimeVersion of the Authentication / Authorization feature in use for the current app.
The setting in this value can control the behavior of certain features in the Authentication / Authorization module.
:param pulumi.Input[float] token_refresh_extension_hours: The number of hours after session token expiration that a session token can be used to
call the token refresh API. The default is 72 hours.
:param pulumi.Input[bool] token_store_enabled: <code>true</code> to durably store platform-specific security tokens that are obtained during login flows; otherwise, <code>false</code>.
The default is <code>false</code>.
:param pulumi.Input[str] twitter_consumer_key: The OAuth 1.0a consumer key of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
:param pulumi.Input[str] twitter_consumer_secret: The OAuth 1.0a consumer secret of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
:param pulumi.Input[str] twitter_consumer_secret_setting_name: The app setting name that contains the OAuth 1.0a consumer secret of the Twitter
application used for sign-in.
:param pulumi.Input['UnauthenticatedClientAction'] unauthenticated_client_action: The action to take when an unauthenticated client attempts to access the app.
:param pulumi.Input[bool] validate_issuer: Gets a value indicating whether the issuer should be a valid HTTPS url and be validated as such.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: WebAppAuthSettingsArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Configuration settings for the Azure App Service Authentication / Authorization feature.
:param str resource_name: The name of the resource.
:param WebAppAuthSettingsArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(WebAppAuthSettingsArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
aad_claims_authorization: Optional[pulumi.Input[str]] = None,
additional_login_params: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_audiences: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
allowed_external_redirect_urls: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
auth_file_path: Optional[pulumi.Input[str]] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret: Optional[pulumi.Input[str]] = None,
client_secret_certificate_thumbprint: Optional[pulumi.Input[str]] = None,
client_secret_setting_name: Optional[pulumi.Input[str]] = None,
default_provider: Optional[pulumi.Input['BuiltInAuthenticationProvider']] = None,
enabled: Optional[pulumi.Input[bool]] = None,
facebook_app_id: Optional[pulumi.Input[str]] = None,
facebook_app_secret: Optional[pulumi.Input[str]] = None,
facebook_app_secret_setting_name: Optional[pulumi.Input[str]] = None,
facebook_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
git_hub_client_id: Optional[pulumi.Input[str]] = None,
git_hub_client_secret: Optional[pulumi.Input[str]] = None,
git_hub_client_secret_setting_name: Optional[pulumi.Input[str]] = None,
git_hub_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
google_client_id: Optional[pulumi.Input[str]] = None,
google_client_secret: Optional[pulumi.Input[str]] = None,
google_client_secret_setting_name: Optional[pulumi.Input[str]] = None,
google_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
is_auth_from_file: Optional[pulumi.Input[str]] = None,
issuer: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
microsoft_account_client_id: Optional[pulumi.Input[str]] = None,
microsoft_account_client_secret: Optional[pulumi.Input[str]] = None,
microsoft_account_client_secret_setting_name: Optional[pulumi.Input[str]] = None,
microsoft_account_o_auth_scopes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
runtime_version: Optional[pulumi.Input[str]] = None,
token_refresh_extension_hours: Optional[pulumi.Input[float]] = None,
token_store_enabled: Optional[pulumi.Input[bool]] = None,
twitter_consumer_key: Optional[pulumi.Input[str]] = None,
twitter_consumer_secret: Optional[pulumi.Input[str]] = None,
twitter_consumer_secret_setting_name: Optional[pulumi.Input[str]] = None,
unauthenticated_client_action: Optional[pulumi.Input['UnauthenticatedClientAction']] = None,
validate_issuer: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = WebAppAuthSettingsArgs.__new__(WebAppAuthSettingsArgs)
__props__.__dict__["aad_claims_authorization"] = aad_claims_authorization
__props__.__dict__["additional_login_params"] = additional_login_params
__props__.__dict__["allowed_audiences"] = allowed_audiences
__props__.__dict__["allowed_external_redirect_urls"] = allowed_external_redirect_urls
__props__.__dict__["auth_file_path"] = auth_file_path
__props__.__dict__["client_id"] = client_id
__props__.__dict__["client_secret"] = client_secret
__props__.__dict__["client_secret_certificate_thumbprint"] = client_secret_certificate_thumbprint
__props__.__dict__["client_secret_setting_name"] = client_secret_setting_name
__props__.__dict__["default_provider"] = default_provider
__props__.__dict__["enabled"] = enabled
__props__.__dict__["facebook_app_id"] = facebook_app_id
__props__.__dict__["facebook_app_secret"] = facebook_app_secret
__props__.__dict__["facebook_app_secret_setting_name"] = facebook_app_secret_setting_name
__props__.__dict__["facebook_o_auth_scopes"] = facebook_o_auth_scopes
__props__.__dict__["git_hub_client_id"] = git_hub_client_id
__props__.__dict__["git_hub_client_secret"] = git_hub_client_secret
__props__.__dict__["git_hub_client_secret_setting_name"] = git_hub_client_secret_setting_name
__props__.__dict__["git_hub_o_auth_scopes"] = git_hub_o_auth_scopes
__props__.__dict__["google_client_id"] = google_client_id
__props__.__dict__["google_client_secret"] = google_client_secret
__props__.__dict__["google_client_secret_setting_name"] = google_client_secret_setting_name
__props__.__dict__["google_o_auth_scopes"] = google_o_auth_scopes
__props__.__dict__["is_auth_from_file"] = is_auth_from_file
__props__.__dict__["issuer"] = issuer
__props__.__dict__["kind"] = kind
__props__.__dict__["microsoft_account_client_id"] = microsoft_account_client_id
__props__.__dict__["microsoft_account_client_secret"] = microsoft_account_client_secret
__props__.__dict__["microsoft_account_client_secret_setting_name"] = microsoft_account_client_secret_setting_name
__props__.__dict__["microsoft_account_o_auth_scopes"] = microsoft_account_o_auth_scopes
if name is None and not opts.urn:
raise TypeError("Missing required property 'name'")
__props__.__dict__["name"] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["runtime_version"] = runtime_version
__props__.__dict__["token_refresh_extension_hours"] = token_refresh_extension_hours
__props__.__dict__["token_store_enabled"] = token_store_enabled
__props__.__dict__["twitter_consumer_key"] = twitter_consumer_key
__props__.__dict__["twitter_consumer_secret"] = twitter_consumer_secret
__props__.__dict__["twitter_consumer_secret_setting_name"] = twitter_consumer_secret_setting_name
__props__.__dict__["unauthenticated_client_action"] = unauthenticated_client_action
__props__.__dict__["validate_issuer"] = validate_issuer
__props__.__dict__["system_data"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:web/v20201001:WebAppAuthSettings"), pulumi.Alias(type_="azure-native:web:WebAppAuthSettings"), pulumi.Alias(type_="azure-nextgen:web:WebAppAuthSettings"), pulumi.Alias(type_="azure-native:web/v20150801:WebAppAuthSettings"), pulumi.Alias(type_="azure-nextgen:web/v20150801:WebAppAuthSettings"), pulumi.Alias(type_="azure-native:web/v20160801:WebAppAuthSettings"), pulumi.Alias(type_="azure-nextgen:web/v20160801:WebAppAuthSettings"), pulumi.Alias(type_="azure-native:web/v20180201:WebAppAuthSettings"), pulumi.Alias(type_="azure-nextgen:web/v20180201:WebAppAuthSettings"), pulumi.Alias(type_="azure-native:web/v20181101:WebAppAuthSettings"), pulumi.Alias(type_="azure-nextgen:web/v20181101:WebAppAuthSettings"), pulumi.Alias(type_="azure-native:web/v20190801:WebAppAuthSettings"), pulumi.Alias(type_="azure-nextgen:web/v20190801:WebAppAuthSettings"), pulumi.Alias(type_="azure-native:web/v20200601:WebAppAuthSettings"), pulumi.Alias(type_="azure-nextgen:web/v20200601:WebAppAuthSettings"), pulumi.Alias(type_="azure-native:web/v20200901:WebAppAuthSettings"), pulumi.Alias(type_="azure-nextgen:web/v20200901:WebAppAuthSettings"), pulumi.Alias(type_="azure-native:web/v20201201:WebAppAuthSettings"), pulumi.Alias(type_="azure-nextgen:web/v20201201:WebAppAuthSettings"), pulumi.Alias(type_="azure-native:web/v20210101:WebAppAuthSettings"), pulumi.Alias(type_="azure-nextgen:web/v20210101:WebAppAuthSettings"), pulumi.Alias(type_="azure-native:web/v20210115:WebAppAuthSettings"), pulumi.Alias(type_="azure-nextgen:web/v20210115:WebAppAuthSettings"), pulumi.Alias(type_="azure-native:web/v20210201:WebAppAuthSettings"), pulumi.Alias(type_="azure-nextgen:web/v20210201:WebAppAuthSettings")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(WebAppAuthSettings, __self__).__init__(
'azure-native:web/v20201001:WebAppAuthSettings',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'WebAppAuthSettings':
"""
Get an existing WebAppAuthSettings resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = WebAppAuthSettingsArgs.__new__(WebAppAuthSettingsArgs)
__props__.__dict__["aad_claims_authorization"] = None
__props__.__dict__["additional_login_params"] = None
__props__.__dict__["allowed_audiences"] = None
__props__.__dict__["allowed_external_redirect_urls"] = None
__props__.__dict__["auth_file_path"] = None
__props__.__dict__["client_id"] = None
__props__.__dict__["client_secret"] = None
__props__.__dict__["client_secret_certificate_thumbprint"] = None
__props__.__dict__["client_secret_setting_name"] = None
__props__.__dict__["default_provider"] = None
__props__.__dict__["enabled"] = None
__props__.__dict__["facebook_app_id"] = None
__props__.__dict__["facebook_app_secret"] = None
__props__.__dict__["facebook_app_secret_setting_name"] = None
__props__.__dict__["facebook_o_auth_scopes"] = None
__props__.__dict__["git_hub_client_id"] = None
__props__.__dict__["git_hub_client_secret"] = None
__props__.__dict__["git_hub_client_secret_setting_name"] = None
__props__.__dict__["git_hub_o_auth_scopes"] = None
__props__.__dict__["google_client_id"] = None
__props__.__dict__["google_client_secret"] = None
__props__.__dict__["google_client_secret_setting_name"] = None
__props__.__dict__["google_o_auth_scopes"] = None
__props__.__dict__["is_auth_from_file"] = None
__props__.__dict__["issuer"] = None
__props__.__dict__["kind"] = None
__props__.__dict__["microsoft_account_client_id"] = None
__props__.__dict__["microsoft_account_client_secret"] = None
__props__.__dict__["microsoft_account_client_secret_setting_name"] = None
__props__.__dict__["microsoft_account_o_auth_scopes"] = None
__props__.__dict__["name"] = None
__props__.__dict__["runtime_version"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["token_refresh_extension_hours"] = None
__props__.__dict__["token_store_enabled"] = None
__props__.__dict__["twitter_consumer_key"] = None
__props__.__dict__["twitter_consumer_secret"] = None
__props__.__dict__["twitter_consumer_secret_setting_name"] = None
__props__.__dict__["type"] = None
__props__.__dict__["unauthenticated_client_action"] = None
__props__.__dict__["validate_issuer"] = None
return WebAppAuthSettings(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="aadClaimsAuthorization")
def aad_claims_authorization(self) -> pulumi.Output[Optional[str]]:
"""
Gets a JSON string containing the Azure AD Acl settings.
"""
return pulumi.get(self, "aad_claims_authorization")
@property
@pulumi.getter(name="additionalLoginParams")
def additional_login_params(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Login parameters to send to the OpenID Connect authorization endpoint when
a user logs in. Each parameter must be in the form "key=value".
"""
return pulumi.get(self, "additional_login_params")
@property
@pulumi.getter(name="allowedAudiences")
def allowed_audiences(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Allowed audience values to consider when validating JWTs issued by
Azure Active Directory. Note that the <code>ClientID</code> value is always considered an
allowed audience, regardless of this setting.
"""
return pulumi.get(self, "allowed_audiences")
@property
@pulumi.getter(name="allowedExternalRedirectUrls")
def allowed_external_redirect_urls(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
External URLs that can be redirected to as part of logging in or logging out of the app. Note that the query string part of the URL is ignored.
This is an advanced setting typically only needed by Windows Store application backends.
Note that URLs within the current domain are always implicitly allowed.
"""
return pulumi.get(self, "allowed_external_redirect_urls")
@property
@pulumi.getter(name="authFilePath")
def auth_file_path(self) -> pulumi.Output[Optional[str]]:
"""
The path of the config file containing auth settings.
If the path is relative, base will the site's root directory.
"""
return pulumi.get(self, "auth_file_path")
@property
@pulumi.getter(name="clientId")
def client_id(self) -> pulumi.Output[Optional[str]]:
"""
The Client ID of this relying party application, known as the client_id.
This setting is required for enabling OpenID Connection authentication with Azure Active Directory or
other 3rd party OpenID Connect providers.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
"""
return pulumi.get(self, "client_id")
@property
@pulumi.getter(name="clientSecret")
def client_secret(self) -> pulumi.Output[Optional[str]]:
"""
The Client Secret of this relying party application (in Azure Active Directory, this is also referred to as the Key).
This setting is optional. If no client secret is configured, the OpenID Connect implicit auth flow is used to authenticate end users.
Otherwise, the OpenID Connect Authorization Code Flow is used to authenticate end users.
More information on OpenID Connect: http://openid.net/specs/openid-connect-core-1_0.html
"""
return pulumi.get(self, "client_secret")
@property
@pulumi.getter(name="clientSecretCertificateThumbprint")
def client_secret_certificate_thumbprint(self) -> pulumi.Output[Optional[str]]:
"""
An alternative to the client secret, that is the thumbprint of a certificate used for signing purposes. This property acts as
a replacement for the Client Secret. It is also optional.
"""
return pulumi.get(self, "client_secret_certificate_thumbprint")
@property
@pulumi.getter(name="clientSecretSettingName")
def client_secret_setting_name(self) -> pulumi.Output[Optional[str]]:
"""
The app setting name that contains the client secret of the relying party application.
"""
return pulumi.get(self, "client_secret_setting_name")
@property
@pulumi.getter(name="defaultProvider")
def default_provider(self) -> pulumi.Output[Optional[str]]:
"""
The default authentication provider to use when multiple providers are configured.
This setting is only needed if multiple providers are configured and the unauthenticated client
action is set to "RedirectToLoginPage".
"""
return pulumi.get(self, "default_provider")
@property
@pulumi.getter
def enabled(self) -> pulumi.Output[Optional[bool]]:
"""
<code>true</code> if the Authentication / Authorization feature is enabled for the current app; otherwise, <code>false</code>.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter(name="facebookAppId")
def facebook_app_id(self) -> pulumi.Output[Optional[str]]:
"""
The App ID of the Facebook app used for login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
"""
return pulumi.get(self, "facebook_app_id")
@property
@pulumi.getter(name="facebookAppSecret")
def facebook_app_secret(self) -> pulumi.Output[Optional[str]]:
"""
The App Secret of the Facebook app used for Facebook Login.
This setting is required for enabling Facebook Login.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
"""
return pulumi.get(self, "facebook_app_secret")
@property
@pulumi.getter(name="facebookAppSecretSettingName")
def facebook_app_secret_setting_name(self) -> pulumi.Output[Optional[str]]:
"""
The app setting name that contains the app secret used for Facebook Login.
"""
return pulumi.get(self, "facebook_app_secret_setting_name")
@property
@pulumi.getter(name="facebookOAuthScopes")
def facebook_o_auth_scopes(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The OAuth 2.0 scopes that will be requested as part of Facebook Login authentication.
This setting is optional.
Facebook Login documentation: https://developers.facebook.com/docs/facebook-login
"""
return pulumi.get(self, "facebook_o_auth_scopes")
@property
@pulumi.getter(name="gitHubClientId")
def git_hub_client_id(self) -> pulumi.Output[Optional[str]]:
"""
The Client Id of the GitHub app used for login.
This setting is required for enabling Github login
"""
return pulumi.get(self, "git_hub_client_id")
@property
@pulumi.getter(name="gitHubClientSecret")
def git_hub_client_secret(self) -> pulumi.Output[Optional[str]]:
"""
The Client Secret of the GitHub app used for Github Login.
This setting is required for enabling Github login.
"""
return pulumi.get(self, "git_hub_client_secret")
@property
@pulumi.getter(name="gitHubClientSecretSettingName")
def git_hub_client_secret_setting_name(self) -> pulumi.Output[Optional[str]]:
"""
The app setting name that contains the client secret of the Github
app used for GitHub Login.
"""
return pulumi.get(self, "git_hub_client_secret_setting_name")
@property
@pulumi.getter(name="gitHubOAuthScopes")
def git_hub_o_auth_scopes(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The OAuth 2.0 scopes that will be requested as part of GitHub Login authentication.
This setting is optional
"""
return pulumi.get(self, "git_hub_o_auth_scopes")
@property
@pulumi.getter(name="googleClientId")
def google_client_id(self) -> pulumi.Output[Optional[str]]:
"""
The OpenID Connect Client ID for the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
"""
return pulumi.get(self, "google_client_id")
@property
@pulumi.getter(name="googleClientSecret")
def google_client_secret(self) -> pulumi.Output[Optional[str]]:
"""
The client secret associated with the Google web application.
This setting is required for enabling Google Sign-In.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
"""
return pulumi.get(self, "google_client_secret")
@property
@pulumi.getter(name="googleClientSecretSettingName")
def google_client_secret_setting_name(self) -> pulumi.Output[Optional[str]]:
"""
The app setting name that contains the client secret associated with
the Google web application.
"""
return pulumi.get(self, "google_client_secret_setting_name")
@property
@pulumi.getter(name="googleOAuthScopes")
def google_o_auth_scopes(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The OAuth 2.0 scopes that will be requested as part of Google Sign-In authentication.
This setting is optional. If not specified, "openid", "profile", and "email" are used as default scopes.
Google Sign-In documentation: https://developers.google.com/identity/sign-in/web/
"""
return pulumi.get(self, "google_o_auth_scopes")
@property
@pulumi.getter(name="isAuthFromFile")
def is_auth_from_file(self) -> pulumi.Output[Optional[str]]:
"""
"true" if the auth config settings should be read from a file,
"false" otherwise
"""
return pulumi.get(self, "is_auth_from_file")
@property
@pulumi.getter
def issuer(self) -> pulumi.Output[Optional[str]]:
"""
The OpenID Connect Issuer URI that represents the entity which issues access tokens for this application.
When using Azure Active Directory, this value is the URI of the directory tenant, e.g. https://sts.windows.net/{tenant-guid}/.
This URI is a case-sensitive identifier for the token issuer.
More information on OpenID Connect Discovery: http://openid.net/specs/openid-connect-discovery-1_0.html
"""
return pulumi.get(self, "issuer")
@property
@pulumi.getter
def kind(self) -> pulumi.Output[Optional[str]]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="microsoftAccountClientId")
def microsoft_account_client_id(self) -> pulumi.Output[Optional[str]]:
"""
The OAuth 2.0 client ID that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
"""
return pulumi.get(self, "microsoft_account_client_id")
@property
@pulumi.getter(name="microsoftAccountClientSecret")
def microsoft_account_client_secret(self) -> pulumi.Output[Optional[str]]:
"""
The OAuth 2.0 client secret that was created for the app used for authentication.
This setting is required for enabling Microsoft Account authentication.
Microsoft Account OAuth documentation: https://dev.onedrive.com/auth/msa_oauth.htm
"""
return pulumi.get(self, "microsoft_account_client_secret")
@property
@pulumi.getter(name="microsoftAccountClientSecretSettingName")
def microsoft_account_client_secret_setting_name(self) -> pulumi.Output[Optional[str]]:
"""
The app setting name containing the OAuth 2.0 client secret that was created for the
app used for authentication.
"""
return pulumi.get(self, "microsoft_account_client_secret_setting_name")
@property
@pulumi.getter(name="microsoftAccountOAuthScopes")
def microsoft_account_o_auth_scopes(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The OAuth 2.0 scopes that will be requested as part of Microsoft Account authentication.
This setting is optional. If not specified, "wl.basic" is used as the default scope.
Microsoft Account Scopes and permissions documentation: https://msdn.microsoft.com/en-us/library/dn631845.aspx
"""
return pulumi.get(self, "microsoft_account_o_auth_scopes")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="runtimeVersion")
def runtime_version(self) -> pulumi.Output[Optional[str]]:
"""
The RuntimeVersion of the Authentication / Authorization feature in use for the current app.
The setting in this value can control the behavior of certain features in the Authentication / Authorization module.
"""
return pulumi.get(self, "runtime_version")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:
"""
The system metadata relating to this resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter(name="tokenRefreshExtensionHours")
def token_refresh_extension_hours(self) -> pulumi.Output[Optional[float]]:
"""
The number of hours after session token expiration that a session token can be used to
call the token refresh API. The default is 72 hours.
"""
return pulumi.get(self, "token_refresh_extension_hours")
@property
@pulumi.getter(name="tokenStoreEnabled")
def token_store_enabled(self) -> pulumi.Output[Optional[bool]]:
"""
<code>true</code> to durably store platform-specific security tokens that are obtained during login flows; otherwise, <code>false</code>.
The default is <code>false</code>.
"""
return pulumi.get(self, "token_store_enabled")
@property
@pulumi.getter(name="twitterConsumerKey")
def twitter_consumer_key(self) -> pulumi.Output[Optional[str]]:
"""
The OAuth 1.0a consumer key of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
"""
return pulumi.get(self, "twitter_consumer_key")
@property
@pulumi.getter(name="twitterConsumerSecret")
def twitter_consumer_secret(self) -> pulumi.Output[Optional[str]]:
"""
The OAuth 1.0a consumer secret of the Twitter application used for sign-in.
This setting is required for enabling Twitter Sign-In.
Twitter Sign-In documentation: https://dev.twitter.com/web/sign-in
"""
return pulumi.get(self, "twitter_consumer_secret")
@property
@pulumi.getter(name="twitterConsumerSecretSettingName")
def twitter_consumer_secret_setting_name(self) -> pulumi.Output[Optional[str]]:
"""
The app setting name that contains the OAuth 1.0a consumer secret of the Twitter
application used for sign-in.
"""
return pulumi.get(self, "twitter_consumer_secret_setting_name")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="unauthenticatedClientAction")
def unauthenticated_client_action(self) -> pulumi.Output[Optional[str]]:
"""
The action to take when an unauthenticated client attempts to access the app.
"""
return pulumi.get(self, "unauthenticated_client_action")
@property
@pulumi.getter(name="validateIssuer")
def validate_issuer(self) -> pulumi.Output[Optional[bool]]:
"""
Gets a value indicating whether the issuer should be a valid HTTPS url and be validated as such.
"""
return pulumi.get(self, "validate_issuer")
| [
"[email protected]"
] | |
1d78b2f287093aaabba4344add7cc6fae44f8d34 | d5aa24b75c2344358752b0af0a47293533820578 | /data_analysis/IO/load_data.py | 6333a0cb4611a6478b56ee4e3cef726e09a8e012 | [] | no_license | ModelDBRepository/234992 | 913da9efaadb704171da907ebd953fe59efe5fb1 | b969a4c623b92c1bd79138f4132885bc424b114c | refs/heads/master | 2020-05-29T18:28:48.883803 | 2019-05-31T03:42:59 | 2019-05-31T03:42:59 | 189,300,851 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,298 | py | import numpy as np
import sys, pathlib
sys.path.append(str(pathlib.Path(__file__).resolve().parents[2]))
import data_analysis.IO.axon_to_python as axon
import data_analysis.IO.binary_to_python as binary
def load_file(filename, zoom=[0,np.inf]):
if filename.endswith('.bin'):
return binary.load_file(filename, zoom=zoom)
elif filename.endswith('.abf'):
print(filename)
return axon.load_file(filename, zoom=zoom)
else:
return None
def get_metadata(filename, infos={}):
print('filename is', filename)
if filename.endswith('.bin'):
return binary.get_metadata(filename, infos=infos)
elif filename.endswith('.abf'):
return axon.get_metadata(filename, infos=infos)
elif filename.endswith('.npz'):
return {'main_protocol':'modeling_work'}
else:
return None
def get_formated_data(filename):
t, VEC = load_file(filename)
meta = get_metadata(filename)
data = {'t':t, 'Vm':VEC[0],
'infos':meta, 'dt':t[1]-t[0]}
return data
if __name__ == '__main__':
import sys
import matplotlib.pylab as plt
filename = sys.argv[-1]
print(get_metadata(filename))
t, data = load_file(filename, zoom=[-5.,np.inf])
plt.plot(t[10000:], data[0][10000:])
plt.show()
| [
"[email protected]"
] | |
bda783c687d550284ea64c93dd66f035fb1f1dfb | fdfd9cab4e26491da5d2a06a15960362ccf01460 | /ex32.py | 2a85257657bbb1b65928785ed5e54f5bf092b766 | [] | no_license | WilliamsHerrmann/MWM15 | c182f7f8eca4f30a41e602a8e907497bc927af81 | 3f17abd57473f328ddd1e1a2a7591423f32da0f8 | refs/heads/master | 2021-07-07T05:01:06.486909 | 2017-10-02T18:18:44 | 2017-10-02T18:18:44 | 103,341,626 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 816 | py | the_count = [1, 2, 3, 4, 5]
fruits = ['apples', 'oranges', 'pears', 'apricots']
change = [1, 'pennies', 2, 'dimes', 3, 'quarters']
# this first kind of for-loop goes through a list
for number in the_count:
print "This is count %d" % number
# same as above
for fruit in fruits:
print "A fruit of type: %s" % fruit
# also we can go through mixed lists too
# notice we have to use %r since we don't know what's in it
for i in change:
print "I got %r" % i
# we can also build lists, first start with an empty oranges
elements = []
# the use the range function to do 0 to 5 counts
for i in range(0, 6):
print "Adding %d to the list." % i
# append is a function that lists understand
elements. append(i)
# now we can print them out too
for i in elements:
print "Element was: %d" % i
| [
"[email protected]"
] | |
c05b2d2d9ecd3eba54b5f2efb976613d93068b2e | 5389214afd2a1607925c2104227395a4f2a2800e | /ajax_guide/urls.py | 453bb0f440546b9de8d098f5eca2b16974c1770b | [] | no_license | vinoyjoshi/bandit | 272081b3c843e85969e1a2217080beb08c2b0df5 | 2421d742bbf31faf9b699bd20058c242cbe68773 | refs/heads/main | 2023-01-06T01:49:58.327732 | 2020-10-15T19:47:39 | 2020-10-15T19:47:39 | 304,411,565 | 1 | 0 | null | 2020-10-15T19:47:40 | 2020-10-15T18:13:48 | Python | UTF-8 | Python | false | false | 1,013 | py | """ajax_guide URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from app1 import views as app1
from django.conf.urls import url
urlpatterns = [
path('admin/', admin.site.urls),
path('',app1.contactPage),
url(r'^ajax/contact-submit/$',app1.contact_submit, name = 'contact_submit'),
path(r'^ajax/get_contact_info/$',app1.get_contact_info,name = 'get_contact_info')
]
| [
"[email protected]"
] | |
7ed93578216aac980f00d00bb895797a9107acd9 | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /018_dictionaries/examples/Python 3 Most Nessesary/9.3.Listing 9.4. Enumerating dictionary elements.py | 68e2d165ac09ae3d6584391151010bbb29be77b9 | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 1,111 | py | d = {"x": 1, "y": 2, "z": 3}
for key in d.keys(): # Использование метода keys()
print("({0} => {1})".format(key, d[key]), end=" ")
# Выведет: (y => 2) (x => 1) (z => 3)
print() # Вставляем символ перевода строки
for key in d: # Словари также поддерживают итерации
print("({0} => {1})".format(key, d[key]), end=" ")
# Выведет: (y => 2) (x => 1) (z => 3)
d = {"x": 1, "y": 2, "z": 3}
k = list(d.keys()) # Получаем список ключей
k.sort() # Сортируем список ключей
for key in k:
print("({0} => {1})".format(key, d[key]), end=" ")
# Выведет: (x => 1) (y => 2) (z => 3)
d = {"x": 1, "y": 2, "z": 3}
for key in sorted(d.keys()):
print("({0} => {1})".format(key, d[key]), end=" ")
# Выведет: (x => 1) (y => 2) (z => 3)
d = {"x": 1, "y": 2, "z": 3}
for key in sorted(d):
print("({0} => {1})".format(key, d[key]), end=" ")
# Выведет: (x => 1) (y => 2) (z => 3) | [
"[email protected]"
] | |
53b5a7771fd57d104ac7621b2ed2b6c9e1c01f96 | aa265e03e73f718d4008cfe30ada7ee32c852eec | /ABC_A/ABC033_A.py | 956f4fb0fc9fc60fd343a72f21a9d322326c5e91 | [
"MIT"
] | permissive | ryosuke0825/atcoder_python | 4fb9de9733cd9ef41c2ad9ad38b3f190f49d3ad5 | 52d037d0bc9ef2c721bf2958c1c2ead558cb0cf5 | refs/heads/master | 2023-03-11T22:47:56.963089 | 2023-03-05T01:21:06 | 2023-03-05T01:21:06 | 181,768,029 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 82 | py | n = input()
if n.count(n[0]) == 4:
print("SAME")
else:
print("DIFFERENT")
| [
"[email protected]"
] | |
def2fc41b751673fb8775b648f289d98ef9a0106 | 51f6443116ef09aa91cca0ac91387c1ce9cb445a | /Curso_Python_3_UDEMY/desafios/desafio_html.py | d5648ffde600f190c5bda1912b3dff47252566db | [
"MIT"
] | permissive | DanilooSilva/Cursos_de_Python | f449f75bc586f7cb5a7e43000583a83fff942e53 | 8f167a4c6e16f01601e23b6f107578aa1454472d | refs/heads/main | 2023-07-30T02:11:27.002831 | 2021-10-01T21:52:15 | 2021-10-01T21:52:15 | 331,683,041 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 523 | py | def tag(tag, *args, **kwargs):
if 'html_class' in kwargs:
kwargs['class'] = kwargs.pop('html_class')
attrs = ''.join(f'{k}="{v}" ' for k, v in kwargs.items())
inner = ''.join(args)
return f'<{tag} {attrs}>{inner}</{tag}>'
if __name__ == '__main__':
print(tag('p',
tag('span', 'Curso de Python 3, por'),
tag('strong', 'Juracy Filho', id='jf'),
tag('span', ' e '),
tag('strong', 'Leonador Leitão', id='ll'),
tag('span', '.'),
html_class='alert')) | [
"[email protected]"
] | |
e7b420a62db0bce2fe381107cc685f1bf88035d8 | f3742f46560486c07c339244f8cf47bb07709561 | /features/steps/test_utils.py | 7cc4d34cc1b40e7598dc65345299f0ee9046838a | [
"MIT"
] | permissive | Azure/azure-event-hubs-python | 55b65920f9d8dbe6cc418d63291ba507ce648d97 | 326f772f5cbe3d3eaf68b24485554aada463430a | refs/heads/master | 2023-03-17T22:03:54.241386 | 2020-04-07T22:33:17 | 2020-04-07T22:33:17 | 91,842,040 | 65 | 66 | MIT | 2020-04-07T22:33:18 | 2017-05-19T20:14:44 | Python | UTF-8 | Python | false | false | 3,540 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import uuid
import time
import asyncio
def create_mgmt_client(credentials, subscription, location='westus'):
from azure.mgmt.resource import ResourceManagementClient
from azure.mgmt.eventhub import EventHubManagementClient
resource_client = ResourceManagementClient(credentials, subscription)
rg_name = 'pytest-{}'.format(uuid.uuid4())
resource_group = resource_client.resource_groups.create_or_update(
rg_name, {'location': location})
eh_client = EventHubManagementClient(credentials, subscription)
namespace = 'pytest-{}'.format(uuid.uuid4())
creator = eh_client.namespaces.create_or_update(
resource_group.name,
namespace)
create.wait()
return resource_group, eh_client
def get_eventhub_config():
config = {}
config['hostname'] = os.environ['EVENT_HUB_HOSTNAME']
config['event_hub'] = os.environ['EVENT_HUB_NAME']
config['key_name'] = os.environ['EVENT_HUB_SAS_POLICY']
config['access_key'] = os.environ['EVENT_HUB_SAS_KEY']
config['consumer_group'] = "$Default"
config['partition'] = "0"
return config
def get_eventhub_100TU_config():
config = {}
config['hostname'] = os.environ['EVENT_HUB_100TU_HOSTNAME']
config['event_hub'] = os.environ['EVENT_HUB_100TU_NAME']
config['key_name'] = os.environ['EVENT_HUB_100TU_SAS_POLICY']
config['access_key'] = os.environ['EVENT_HUB_100TU_SAS_KEY']
config['consumer_group'] = "$Default"
config['partition'] = "0"
return config
def send_constant_messages(sender, timeout, payload=1024):
deadline = time.time()
total = 0
while time.time() < deadline:
data = EventData(body=b"D" * payload)
sender.send(data)
total += 1
return total
def send_constant_async_messages(sender, timeout, batch_size=10000, payload=1024):
deadline = time.time()
total = 0
while time.time() < deadline:
data = EventData(body=b"D" * args.payload)
sender.transfer(data)
total += 1
if total % 10000 == 0:
sender.wait()
return total
def send_constant_async_messages(sender, timeout, batch_size=1, payload=1024):
deadline = time.time()
while time.time() < deadline:
if batch_size > 1:
data = EventData(batch=data_generator())
else:
data = EventData(body=b"D" * payload)
async def receive_pump(receiver, timeout, validation=True):
total = 0
deadline = time.time() + timeout
sequence = 0
offset = None
while time.time() < deadline:
batch = await receiver.receive(timeout=5)
total += len(batch)
if validation:
assert receiver.offset
for event in batch:
next_sequence = event.sequence_number
assert next_sequence > sequence, "Received Event with lower sequence number than previous."
assert (next_sequence - sequence) == 1, "Sequence number skipped by a value great than 1."
sequence = next_sequence
msg_data = b"".join([b for b in event.body]).decode('UTF-8')
assert json.loads(msg_data), "Unable to deserialize Event data."
| [
"[email protected]"
] | |
55fc9a1726e44163be89eb8a2441951491ef7af9 | ef3a7391b0a5c5d8e276355e97cbe4de621d500c | /venv/Lib/site-packages/caffe2/python/layer_model_helper.py | 9bb56400ffb7a26b831eb2f82abdf36f27bdbc1c | [
"Apache-2.0"
] | permissive | countBMB/BenjiRepo | 143f6da5d198ea6f06404b4559e1f4528b71b3eb | 79d882263baaf2a11654ca67d2e5593074d36dfa | refs/heads/master | 2022-12-11T07:37:04.807143 | 2019-12-25T11:26:29 | 2019-12-25T11:26:29 | 230,090,428 | 1 | 1 | Apache-2.0 | 2022-12-08T03:21:09 | 2019-12-25T11:05:59 | Python | UTF-8 | Python | false | false | 28,709 | py | # @package layer_model_helper
# Module caffe2.python.layer_model_helper
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import core, model_helper, schema, scope, utils, muji
from caffe2.python.modeling.parameter_info import (
ParameterInfo,
)
from caffe2.python.modeling.parameter_sharing import (
parameter_sharing_context,
)
from caffe2.python.modeling.net_modifier import NetModifier
from caffe2.python.optimizer import get_param_device, Optimizer
from caffe2.python.regularizer import Regularizer, RegularizationBy
from caffe2.python.layers import layers
from caffe2.proto import caffe2_pb2
from future.utils import viewitems, viewvalues
import logging
import numpy as np
import six
import copy
logger = logging.getLogger(__name__)
class LayerModelHelper(model_helper.ModelHelper):
"""
Model helper for building models on top of layers abstractions.
Each layer is the abstraction that is higher level than Operator. Layer
is responsible for ownership of it's own parameters and can easily be
instantiated in multiple nets possible with different sets of ops.
As an example: one can easily instantiate predict and train nets from
the same set of layers, where predict net will have subset of the
operators from train net.
"""
def __init__(self, name, input_feature_schema, trainer_extra_schema,
keep_blobs=False):
''' TODO(amalevich): more documnetation on input args
'''
super(LayerModelHelper, self).__init__(name=name)
self._layer_names = set()
self._layers = []
self._param_to_shape = {}
# seed default
self._seed = None
self._sequence_seed = True
# optimizer bookkeeping
self.param_to_optim = {}
self.param_to_reg = {}
self._default_optimizer = None
self._loss = None
self._prediction = []
self._output_schema = None
self._post_grad_net_modifiers = []
self._final_net_modifiers = []
# breakdown map; breakdown features are categorical (like dense) but not
# necessarily used to represent data for training
self._breakdown_map = None
# Connect Schema to self.net. That particular instance of schmea will be
# use for generation of the Layers accross the network and would be used
# for connection with Readers.
self._input_feature_schema = schema.NewRecord(
self.net,
input_feature_schema
) if not keep_blobs else input_feature_schema.clone()
self._trainer_extra_schema = schema.NewRecord(
self.net,
trainer_extra_schema
) if not keep_blobs else trainer_extra_schema.clone()
self._metrics_schema = schema.Struct()
self._preproc_output_schema = None
self._init_global_constants()
self.param_init_net = self.create_init_net('param_init_net')
self._initialize_params = True
# additional (hard-coded) diagnose_options to report based on the model
# TODO(xlwang): it's hack!
self.ad_hoc_diagnose_blobs_and_operations = []
self.ad_hoc_plot_blobs = []
def clear_output_schema(self):
self._output_schema = None
def set_initialize_params(self, initialize_params):
self._initialize_params = initialize_params
def add_metric_field(self, name, value):
assert name not in self._metrics_schema.fields, (
"Try to add metric field twice: {}".format(name))
self._metrics_schema = self._metrics_schema + schema.Struct(
(name, value)
)
# an empty white_set will skip everything
def filter_metrics_schema(self, white_set):
logger.info("Filter metric schema with white_set {}".format(white_set))
field_names = self._metrics_schema.field_names()
for name in field_names:
if name not in white_set:
self._metrics_schema = self._metrics_schema - schema.Struct((name, schema.Scalar()))
def add_ad_hoc_plot_blob(self, blob, dtype=None):
assert isinstance(
blob, (six.string_types, core.BlobReference)
), "expect type str or BlobReference, but got {}".format(type(blob))
dtype = dtype or (np.float, (1, ))
self.add_metric_field(str(blob), schema.Scalar(dtype, blob))
self.ad_hoc_plot_blobs.append(blob)
@staticmethod
def _get_global_constant_initializer_op(
blob_name, array=None, dtype=None, initializer=None
):
# to add a global constant to model, one first need to get the
# initializer
if array is not None:
assert initializer is None,\
"Only one from array and initializer should be specified"
if dtype is None:
array = np.array(array)
else:
array = np.array(array, dtype=dtype)
# TODO: make GivenTensor generic
op_name = None
if array.dtype == np.int32:
op_name = 'GivenTensorIntFill'
elif array.dtype == np.int64:
op_name = 'GivenTensorInt64Fill'
elif array.dtype == np.str:
op_name = 'GivenTensorStringFill'
elif array.dtype == np.bool:
op_name = 'GivenTensorBoolFill'
else:
op_name = 'GivenTensorFill'
def initializer(blob_name):
return core.CreateOperator(
op_name, [],
blob_name,
shape=array.shape,
values=array.flatten().tolist()
)
else:
assert initializer is not None
initializer_op = initializer(blob_name)
return initializer_op
def add_global_constant(
self, name, array=None, dtype=None, initializer=None
):
assert isinstance(name, six.string_types), (
'name should be a string as we are using it as map key')
# This is global namescope for constants. They will be created in all
# init_nets and there should be very few of them.
assert name not in self.global_constants, \
"%s already added in global_constants" % name
blob_name = self.net.NextBlob(name)
self.global_constants[name] = blob_name
initializer_op = LayerModelHelper._get_global_constant_initializer_op(
blob_name, array, dtype, initializer
)
assert blob_name not in self.global_constant_initializers, \
"there is already a initializer op associated with blob %s" % \
blob_name
self.global_constant_initializers[blob_name] = initializer_op
return blob_name
def maybe_add_global_constant(self, name, *args, **kwargs):
# To ad hoc add new global constants without duplication
# if the name was already registered in global_constants, it will not be
# added even if the intended value is different from its original value
if name in self.global_constants:
blob_name = self.global_constants[name]
initializer_op = \
LayerModelHelper._get_global_constant_initializer_op(
blob_name, *args, **kwargs
)
# check if the original initializer is the same as the one intended
# now
assert utils.OpAlmostEqual(
initializer_op,
self.global_constant_initializers[blob_name],
'debug_info'
), \
"conflict initializers for global constant %s, " \
"previous %s, now %s" % (
blob_name, str(initializer_op),
str(self.global_constant_initializers[blob_name]))
return blob_name
return self.add_global_constant(name, *args, **kwargs)
def _init_global_constants(self):
self.global_constants = {}
self.global_constant_initializers = {}
self.add_global_constant('ONE', 1.0)
self.add_global_constant('ZERO', 0.0)
self.add_global_constant('ZERO_RANGE', [0, 0], dtype='int32')
def _add_global_constants(self, init_net):
for initializer_op in viewvalues(self.global_constant_initializers):
init_net._net.op.extend([initializer_op])
def create_init_net(self, name):
init_net = core.Net(name)
self._add_global_constants(init_net)
return init_net
def _validate_param_shape(self, param_name, shape):
if param_name not in self._param_to_shape:
return
ref_shape = self._param_to_shape[param_name]
if shape != ref_shape:
raise ValueError(
"Got inconsistent shapes between shared parameters "
"when trying to map a blob in scope {0} to {1}. ref_shape : "
" {2}, shape : {3}".format(
scope.CurrentNameScope(), param_name, ref_shape, shape)
)
def _validate_param_optim(self, param_name, optim):
# there are three possible values for optim:
# 1) None (which will use self._default_optimizer after this layer is instantiated)
# 2) self.NoOptim
# 3) an instance of Optimizer class such as AdagradOptimizer
# this implies this parameter is not shared with any other parameter so far
if param_name not in self.param_to_optim:
return
logger.info("{} shares the same parameter with another parameter. "
"Validating if the same optimizer has been specified for them.".format(
param_name,
))
ref_optim = self.param_to_optim[param_name]
if optim is None:
assert ref_optim == self._default_optimizer, (
"Optim for {} is None which will fall back to use default_optimizer. "
"However, the optimizer that has been specified for this shared parameter "
"is {} which is different from default_optimizer {}. "
"Please check the optimizers specified for parameters shared "
"with {} and the default_optimizer to ensure the consistency.".format(
param_name, ref_optim, self._default_optimizer, param_name
)
)
elif optim == self.NoOptim:
assert ref_optim == self.NoOptim, (
"Optim for {} is NoOptim. However, the optimizer for the parameters "
"shared with {} is {} which is different from NoOptim. "
"Please check the optimizer specified for other parameters in the "
"shared group to ensure consistency.".format(
param_name, param_name, ref_optim
)
)
elif isinstance(optim, Optimizer):
assert isinstance(ref_optim, Optimizer), (
"Optim for {} is an instance of Optimizer. However, the optimizer "
"for the parameters shared with {} is {} which is not an instance "
"of Optimizer. Please check the optimizer specified for other "
" parameters in the shared group to ensure consistency.".format(
param_name, param_name, ref_optim, optim
)
)
assert type(optim) is type(ref_optim) and optim.attributes == ref_optim.attributes, (
"Optim for {} is an instance of Optimizer. However, the optimizer "
"for the parameters shared with {} is {}. "
"This optimizer either doesn't have the same type as the current optimizer: "
"{} vs {}, or its attributes such as learning rate are different from "
"that of current optimizer which is {} vs {}. "
"Please check the optimizer specified for other parameters in the "
"shared group to ensure consistency.".format(
param_name, param_name, ref_optim, type(optim), type(ref_optim), optim.attributes, ref_optim.attributes
)
)
else:
raise ValueError("optim should be either None, NoOptim, or an instance of Optimizer, Got {} ".format(optim))
def create_param(self, param_name, shape, initializer, optimizer=None,
ps_param=None, regularizer=None):
if isinstance(param_name, core.BlobReference):
param_name = str(param_name)
elif isinstance(param_name, six.string_types):
# Parameter name will be equal to current Namescope that got
# resolved with the respect of parameter sharing of the scopes.
param_name = parameter_sharing_context.get_parameter_name(
param_name)
else:
raise ValueError("Unsupported type for param_name")
param_blob = core.BlobReference(param_name)
if len(initializer) == 1:
init_op_args = {}
else:
assert len(initializer) == 2
init_op_args = copy.deepcopy(initializer[1])
if shape is not None:
assert 'shape' not in init_op_args
init_op_args.update({'shape': shape})
initializer_op = None
if self._initialize_params:
initializer_op = core.CreateOperator(
initializer[0],
[],
param_blob,
**init_op_args
)
param = layers.LayerParameter(
parameter=param_blob,
initializer=initializer_op,
optimizer=optimizer,
ps_param=ps_param,
regularizer=regularizer
)
self._validate_param_shape(param_name, shape)
self._validate_param_optim(param_name, optimizer)
self._param_to_shape[param_name] = shape
return param
def next_layer_name(self, prefix):
base_name = core.ScopedName(prefix)
name = base_name
index = 0
while name in self._layer_names:
name = base_name + '_auto_' + str(index)
index += 1
self._layer_names.add(name)
return name
def add_layer(self, layer):
self._layers.append(layer)
for param in layer.get_parameters():
assert isinstance(param.parameter, core.BlobReference)
self.param_to_optim[str(param.parameter)] = \
param.optimizer or self.default_optimizer
self.params.append(param.parameter)
if isinstance(param, layers.LayerParameter):
logger.info("Add parameter regularizer {0}".format(param.parameter))
self.param_to_reg[param.parameter] = param.regularizer
elif isinstance(param, ParameterInfo):
# TODO:
# Currently, LSTM and RNNcells, which use ModelHelper instead of
# LayerModelHelper as super class, are called in pooling_methods
# In ModelHelper, regularization is not supported in create_param
# We will unify the way of create_param of ModelHelper and
# LayerModelHelper in the future.
logger.info('regularization is unsupported for ParameterInfo object')
else:
raise ValueError(
'unknown object type besides ParameterInfo and LayerParameter: {}'
.format(param)
)
# The primary value of adding everything to self.net - generation of the
# operators right away, i.e. if error happens it'll be detected
# immediately. Other than this - create_x_net should be called.
layer.add_operators(self.net, self.param_init_net)
return layer.output_schema
def get_parameter_blobs(self):
param_blobs = []
for layer in self._layers:
for param in layer.get_parameters():
param_blobs.append(param.parameter)
return param_blobs
def add_post_grad_net_modifiers(self, modifier):
assert modifier not in self._post_grad_net_modifiers,\
"{0} is already in {1}".format(modifier, self._post_grad_net_modifiers)
assert isinstance(modifier, NetModifier),\
"{} has to be a NetModifier instance".format(modifier)
self._post_grad_net_modifiers.append(modifier)
def add_final_net_modifiers(self, modifier):
assert modifier not in self._final_net_modifiers,\
"{0} is already in {1}".format(modifier, self._final_net_modifiers)
assert isinstance(modifier, NetModifier),\
"{} has to be a NetModifier instance".format(modifier)
self._final_net_modifiers.append(modifier)
@property
def seed(self):
return self._seed
@property
def sequence_seed(self):
return self._sequence_seed
def store_seed(self, seed, sequence_seed=True):
# Store seed config that will be applied to each op in the net.
self._seed = seed
# If sequence_seed is True, the i-th op has rand_seed=`seed + i`
self._sequence_seed = sequence_seed
def apply_seed(self, net):
if self._seed:
net.set_rand_seed(self._seed, self._sequence_seed)
@property
def default_optimizer(self):
return self._default_optimizer
@default_optimizer.setter
def default_optimizer(self, optimizer):
self._default_optimizer = optimizer
@property
def input_feature_schema(self):
return self._input_feature_schema
@property
def trainer_extra_schema(self):
return self._trainer_extra_schema
@property
def metrics_schema(self):
"""
Returns the schema that represents model output that should be used for
metric reporting.
During the training/evaluation this schema will be appended to the
schema that represents model output.
"""
return self._metrics_schema
@property
def output_schema(self):
assert self._output_schema is not None
return self._output_schema
@output_schema.setter
def output_schema(self, schema):
assert self._output_schema is None
self._output_schema = schema
@property
def preproc_output_schema(self):
assert self._preproc_output_schema is not None
return self._preproc_output_schema
@preproc_output_schema.setter
def preproc_output_schema(self, schema):
assert self._preproc_output_schema is None
self._preproc_output_schema = schema
@property
def prediction(self):
assert self._prediction, "model prediction is empty"
return self._prediction
def add_prediction(self, prediction, weight=1.0):
assert prediction is not None, "Added prediction should not be None"
self._prediction.append((prediction, weight))
@property
def loss(self):
assert self._loss is not None
return self._loss
@loss.setter
def loss(self, loss):
assert self._loss is None
self._loss = loss
def has_loss(self):
return self._loss is not None
def add_loss(self, loss, name='unnamed'):
assert loss is not None, "Added loss should not be None"
assert isinstance(loss, schema.Scalar) or isinstance(
loss, schema.Struct
), "Added loss should be a scalar or a struct"
if self._loss is None:
self._loss = schema.Struct((name, loss))
else:
# loss could've been set through model.loss directly which could be
# a scalar
if isinstance(self._loss, schema.Scalar):
self._loss = schema.Struct(('unnamed', self._loss))
prefix_base = name + '_auto_'
index = 0
prefix = name
while prefix in self._loss:
prefix = prefix_base + str(index)
index += 1
loss_struct = schema.Struct((prefix, loss))
self._loss = self._loss + loss_struct
def add_output_schema(self, name, value):
assert value is not None, \
'Added output schema {} should not be None'.format(name)
assert isinstance(value, schema.Scalar) or \
isinstance(value, schema.Struct), \
'Added output schema {} should be a scalar or a struct.\n\
Now it is {}.'.format(name, type(value))
if self._output_schema is None: # be the first field
self._output_schema = schema.Struct((name, value))
else: # merge with other fields
assert name not in self._output_schema.fields, \
'Output Schema Field {} already exists'.format(name)
self._output_schema = \
self._output_schema + schema.Struct((name, value))
def add_trainer_extra_schema(self, trainer_extra_schema):
trainer_extra_record = schema.NewRecord(self.net, trainer_extra_schema)
self._trainer_extra_schema += trainer_extra_record
def __getattr__(self, layer):
def is_functional_layer(layer):
if core.IsOperator(layer):
return True
elif layer.startswith('FunctionalLayer'):
return True
else:
return False
def resolve_functional_layer(layer):
if core.IsOperator(layer):
return layer
elif layer.startswith('FunctionalLayer'):
return layer[len('FunctionalLayer'):]
else:
raise ValueError(
'%s cannot be resolved as functional layer' % layer
)
if layer.startswith('__'):
raise AttributeError(layer)
# TODO(amalevich): Add add support for ifbpy inline documentation
if layers.layer_exists(layer):
def wrapper(*args, **kwargs):
new_layer = layers.create_layer(layer, self, *args, **kwargs)
if kwargs.get("output_to_metrics", False):
new_layer.export_output_for_metrics()
if kwargs.get("params_to_metrics", False):
new_layer.export_params_for_metrics()
return self.add_layer(new_layer)
return wrapper
elif is_functional_layer(layer):
# TODO(xlwang): Desginated layer shadows the usage of an op as a
# single layer. To enforce using an op (e.g. Split) as functional
# layer, one can call 'model.FunctionalLayerSplit'
layer = resolve_functional_layer(layer)
def wrapper(*args, **kwargs):
def apply_operator(net, in_record, out_record, **kwargs):
# TODO(amalevich): Switch to net.operator as soon as it gets
# landed
net.__getattr__(layer)(in_record.field_blobs(),
out_record.field_blobs(),
**kwargs)
if 'name' not in kwargs:
kwargs['name'] = layer
new_layer = layers.create_layer(
'Functional',
self, *args, function=apply_operator,
**kwargs
)
if kwargs.get("output_to_metrics", False):
new_layer.export_output_for_metrics()
if kwargs.get("params_to_metrics", False):
new_layer.export_params_for_metrics()
return self.add_layer(new_layer)
return wrapper
else:
# this needs to be an AttributeError to fit hasattr semantics
raise AttributeError(
"Trying to create non-registered layer: {}".format(layer))
@property
def layers(self):
return self._layers
def apply_regularizers_on_loss(
self,
train_net,
train_init_net,
blob_to_device=None,
):
logger.info("apply regularizer on loss")
for param, regularizer in viewitems(self.param_to_reg):
if regularizer is None:
continue
logger.info("add regularizer {0} for param {1} to loss".format(regularizer, param))
assert isinstance(regularizer, Regularizer)
added_loss_blob = regularizer(train_net, train_init_net, param, grad=None,
by=RegularizationBy.ON_LOSS)
logger.info(added_loss_blob)
if added_loss_blob is not None:
self.add_loss(
schema.Scalar(blob=added_loss_blob),
str(added_loss_blob)
)
def apply_regularizers_after_optimizer(
self,
train_net,
train_init_net,
grad_map,
blob_to_device=None,
):
logger.info("apply regularizer after optimizer")
CPU = muji.OnCPU()
# if given, blob_to_device is a map from blob to device_option
blob_to_device = blob_to_device or {}
for param, regularizer in viewitems(self.param_to_reg):
if regularizer is None:
continue
assert isinstance(regularizer, Regularizer)
logger.info("add regularizer {0} for param {1} to optimizer".format(regularizer, param))
device = get_param_device(
param,
grad_map.get(str(param)),
param_to_device=blob_to_device,
default_device=CPU,
)
with core.DeviceScope(device):
regularizer(
train_net, train_init_net, param, grad=grad_map.get(str(param)),
by=RegularizationBy.AFTER_OPTIMIZER
)
def apply_post_grad_net_modifiers(
self,
trainer_net,
trainer_init_net,
grad_map,
blob_to_device=None,
modify_output_record=False,
):
param_grad_map = {param: grad_map[param]
for param in self.param_to_optim.keys() if param in grad_map}
for modifier in self._post_grad_net_modifiers:
modifier(trainer_net, trainer_init_net, param_grad_map,
blob_to_device=blob_to_device,
modify_output_record=modify_output_record)
def apply_final_net_modifiers(
self,
trainer_net,
trainer_init_net,
grad_map,
blob_to_device=None,
modify_output_record=False,
):
for modifier in self._final_net_modifiers:
modifier(trainer_net, trainer_init_net, grad_map,
blob_to_device=blob_to_device,
modify_output_record=modify_output_record)
def apply_optimizers(
self,
train_net,
train_init_net,
grad_map,
blob_to_device=None,
):
CPU = muji.OnCPU()
# if given, blob_to_device is a map from blob to device_option
blob_to_device = blob_to_device or {}
for param, optimizer in viewitems(self.param_to_optim):
assert optimizer is not None, \
"default optimizer must have been set in add_layer"
# note that not all params has gradient and thus we sent None if
# gradient does not exists
device = get_param_device(
param,
grad_map.get(str(param)),
param_to_device=blob_to_device,
default_device=CPU,
)
if device is not None:
# extra info is not applicable for optimizers
del device.extra_info[:]
with core.DeviceScope(device):
optimizer(
train_net, train_init_net, param, grad_map.get(str(param)))
def _GetOne(self):
return self.global_constants['ONE']
# An optimizer which allows us to do NO optimization
def NoOptim(self, *args, **kwargs):
pass
@property
def breakdown_map(self):
return self._breakdown_map
@breakdown_map.setter
def breakdown_map(self, breakdown_map):
# TODO(xlwang): provide more rich feature information in breakdown_map;
# and change the assertion accordingly
assert isinstance(breakdown_map, dict)
assert all(isinstance(k, six.string_types) for k in breakdown_map)
assert sorted(breakdown_map.values()) == list(range(len(breakdown_map)))
self._breakdown_map = breakdown_map
| [
"[email protected]"
] | |
9dcf9692cc7ae29a7f56fa27e78cdc21365c70ba | 8a5ab3d33e3b653c4c64305d81a85f6a4582d7ac | /PySide/QtGui/QStyleOptionTab.py | 264b7c0e040a27794ebb0a067eb56b9436f49f11 | [
"Apache-2.0"
] | permissive | sonictk/python-skeletons | be09526bf490856bb644fed6bf4e801194089f0d | 49bc3fa51aacbc2c7f0c7ab86dfb61eefe02781d | refs/heads/master | 2020-04-06T04:38:01.918589 | 2016-06-09T20:37:43 | 2016-06-09T20:37:43 | 56,334,503 | 0 | 0 | null | 2016-04-15T16:30:42 | 2016-04-15T16:30:42 | null | UTF-8 | Python | false | false | 1,122 | py | # encoding: utf-8
# module PySide.QtGui
# from /corp.blizzard.net/BFD/Deploy/Packages/Published/ThirdParty/Qt4.8.4/2015-05-15.163857/prebuilt/linux_x64_gcc41_python2.7_ucs4/PySide/QtGui.so
# by generator 1.138
# no doc
# imports
import PySide.QtCore as __PySide_QtCore
from QStyleOption import QStyleOption
class QStyleOptionTab(QStyleOption):
# no doc
def __init__(self, *more): # real signature unknown; restored from __doc__
""" x.__init__(...) initializes x; see help(type(x)) for signature """
pass
Beginning = None
CornerWidget = None
cornerWidgets = None
CornerWidgets = None
End = None
icon = None
LeftCornerWidget = None
Middle = None
NextIsSelected = None
NoCornerWidgets = None
NotAdjacent = None
OnlyOneTab = None
position = None
PreviousIsSelected = None
RightCornerWidget = None
row = None
selectedPosition = None
SelectedPosition = None
shape = None
StyleOptionType = None
StyleOptionVersion = None
TabPosition = None
text = None
Type = None
Version = None
__new__ = None
| [
"[email protected]"
] | |
857ea37b6d65aa8708b7aa91520822899db9bbaa | b3e90c765a70d0c68a3c703c31ebfbcc67d8f83e | /AN_Bridging/box_ws/src/multi_box/src/Algs/SoftActorCritic.py | 6147e9cd1403a16b426ef7d1722e30eefb48531e | [] | no_license | ronf-ucb/MultiRobot | 619fe1750dd25c336f7ef793e43983d992cbf519 | b509e9c43e330e737135298ea4cfbd4190222328 | refs/heads/master | 2021-07-19T20:39:47.835985 | 2020-09-07T14:32:39 | 2020-09-07T14:32:39 | 211,394,959 | 1 | 2 | null | 2019-10-30T22:21:20 | 2019-09-27T20:15:07 | C++ | UTF-8 | Python | false | false | 3,825 | py | #! /usr/bin/env python
import numpy as np
import torch
import torch.nn as nn
import math
import rospy
from std_msgs.msg import String, Int8
from geometry_msgs.msg import Vector3
import vrep
import matplotlib.pyplot as plt
import torch.optim as optim
from Networks.network import Network
from Networks.softNetwork import SoftNetwork
from agent import Agent
from Buffers.CounterFactualBuffer import Memory
cuda_avail = torch.cuda.is_available()
device = torch.device("cuda" if cuda_avail else "cpu")
class SAC(Agent):
def __init__(self, params, name, task):
super(SAC, self).__init__(params, name, task)
self.aPars = params['actPars']
self.aTrain = params['actTrain']
self.qPars = params['qPars']
self.qTrain = params['qTrain']
if self.trainMode:
self.QNet = Network(self.qPars, self.qTrain).to(device)
self.VNet = Network(self.vPars, self.vTrain).to(device)
self.VTar = Network(self.vPars, self.vTrain).to(device)
self.policyNet = SoftNetwork(self.aPars, self.aTrain).to(device)
else:
print('Not implemented')
for target_param, param in zip(self.VTar.parameters(), self.VNet.parameters()):
target_param.data.copy_(param)
self.expSize = self.vTrain['buffer']
self.actions = self.aPars['neurons'][-1]
self.state = self.aPars['neurons'][0]
self.exp = ReplayBuffer(self.expSize, self.actions, np.float32, self.state, np.float32)
task.initAgent(self)
while(not self.stop):
x = 1+1
task.postTraining()
def load_nets(self):
pass
def saveModel(self):
pass
def get_action(self, s):
action, _ , _, _, _= self.policyNet(torch.FloatTensor(s))
action = np.ravel(action.detach().numpy())
return action
def send_to_device(self, s, a, r, next_s, d):
s = torch.FloatTensor(s).to(device)
a = torch.FloatTensor(a).to(device)
r = torch.FloatTensor(r).unsqueeze(1).to(device)
next_s = torch.FloatTensor(next_s).to(device)
d = torch.FloatTensor(np.float32(d)).unsqueeze(1).to(device)
return s, a, r, next_s, d
def train(self):
if len(self.exp) > 750:
s, a, r, next_s, d = self.exp.sample_batch(self.batch_size)
s, a, r, next_s, d = self.send_to_device(s, a, r, next_s, d)
q = self.QNet(torch.cat([s, a], dim = 1))
v = self.VNet(s)
new_a, log_prob, z, mean, log_std = self.policyNet(s)
target_v = self.VTar(next_s)
next_q = r + (1 - d) * self.discount * target_v
q_loss = self.QNet.get_loss(q, next_q.detach())
new_q = self.QNet(torch.cat([s, new_a], dim=1))
next_v = new_q - log_prob * self.alpha
v_loss = self.VNet.get_loss(v, next_v.detach())
target = new_q - v
actor_loss = (log_prob * (log_prob*self.alpha - target).detach()).mean()
mean_loss = 1e-3 * mean.pow(2).mean()
std_loss = 1e-3 * log_std.pow(2).mean()
actor_loss += mean_loss + std_loss
self.VNet.optimizer.zero_grad()
v_loss.backward()
self.VNet.optimizer.step()
self.QNet.optimizer.zero_grad()
q_loss.backward()
self.QNet.optimizer.step()
self.policyNet.optimizer.zero_grad()
actor_loss.backward()
self.policyNet.optimizer.step()
for target_param, param in zip(self.VTar.parameters(), self.VNet.parameters()):
target_param.data.copy_(target_param.data * (1.0 - 5*1e-3) + param.data * 5*1e-3)
self.totalSteps += 1
| [
"[email protected]"
] | |
c7c9ab5555c62ef7bca526ca8069b83788f07dc4 | a3e26112cb5d6b64c30b44f775750653a1daf0dc | /Q910_Smallest-Range-II.py | 550d96ea406915f5883d2b9d5cea91a4561727d1 | [
"MIT"
] | permissive | xiaosean/leetcode_python | 938f1df379b518d99a778e2da8093ff0371e35d4 | d6fc52d13946895d2b2928ef9962af0610b1d1e8 | refs/heads/master | 2023-04-05T07:07:01.561010 | 2023-03-25T19:17:21 | 2023-03-25T19:17:21 | 150,637,066 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 511 | py | class Solution:
def smallestRangeII(self, A: List[int], K: int) -> int:
A.sort()
min_range = A[-1] - A[0]
min_val = A[0]
max_val_sub_k = A[-1] - K
min_val = A[0] + K
for idx in range(len(A)-1):
cur_val = A[idx] + K
next_val = A[idx+1] - K
min_range = min(min_range, max(max_val_sub_k, cur_val) - min(min_val, next_val))
# min_range = min(min_range, max_val_sub_k-min(cur_val, next_val))
return min_range | [
"[email protected]"
] | |
855fb15b15d33fbe562973352dba115c1014db55 | 251e8bfec0bfc5b6094f7db8ee6bdfe1ca7f6a5b | /bookmanager/venv/bin/python-config | 759cfeb2748510449a11b8162c1a3830533ca6fc | [] | no_license | googleliyang/Django_meiduo | 543042e08cc5eeb1dce8432b4ea2cca996f35c06 | 46f48ecf7bd6e9e2796eac1c3d54787f5571a9a7 | refs/heads/master | 2020-04-24T19:22:42.295324 | 2019-02-28T05:24:23 | 2019-02-28T05:24:23 | 172,209,685 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,362 | #!/Users/ly/Programmer/django/bookmanager/venv/bin/python
import sys
import getopt
import sysconfig
valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
'ldflags', 'help']
if sys.version_info >= (3, 2):
valid_opts.insert(-1, 'extension-suffix')
valid_opts.append('abiflags')
if sys.version_info >= (3, 3):
valid_opts.append('configdir')
def exit_with_usage(code=1):
sys.stderr.write("Usage: {0} [{1}]\n".format(
sys.argv[0], '|'.join('--'+opt for opt in valid_opts)))
sys.exit(code)
try:
opts, args = getopt.getopt(sys.argv[1:], '', valid_opts)
except getopt.error:
exit_with_usage()
if not opts:
exit_with_usage()
pyver = sysconfig.get_config_var('VERSION')
getvar = sysconfig.get_config_var
opt_flags = [flag for (flag, val) in opts]
if '--help' in opt_flags:
exit_with_usage(code=0)
for opt in opt_flags:
if opt == '--prefix':
print(sysconfig.get_config_var('prefix'))
elif opt == '--exec-prefix':
print(sysconfig.get_config_var('exec_prefix'))
elif opt in ('--includes', '--cflags'):
flags = ['-I' + sysconfig.get_path('include'),
'-I' + sysconfig.get_path('platinclude')]
if opt == '--cflags':
flags.extend(getvar('CFLAGS').split())
print(' '.join(flags))
elif opt in ('--libs', '--ldflags'):
abiflags = getattr(sys, 'abiflags', '')
libs = ['-lpython' + pyver + abiflags]
libs += getvar('LIBS').split()
libs += getvar('SYSLIBS').split()
# add the prefix/lib/pythonX.Y/config dir, but only if there is no
# shared library in prefix/lib/.
if opt == '--ldflags':
if not getvar('Py_ENABLE_SHARED'):
libs.insert(0, '-L' + getvar('LIBPL'))
if not getvar('PYTHONFRAMEWORK'):
libs.extend(getvar('LINKFORSHARED').split())
print(' '.join(libs))
elif opt == '--extension-suffix':
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
if ext_suffix is None:
ext_suffix = sysconfig.get_config_var('SO')
print(ext_suffix)
elif opt == '--abiflags':
if not getattr(sys, 'abiflags', None):
exit_with_usage()
print(sys.abiflags)
elif opt == '--configdir':
print(sysconfig.get_config_var('LIBPL'))
| [
"[email protected]"
] | ||
6a3de9a2997e3969d187a691ddd2deb96e6635a7 | bc9f66258575dd5c8f36f5ad3d9dfdcb3670897d | /lib/googlecloudsdk/generated_clients/apis/ondemandscanning/v1beta1/ondemandscanning_v1beta1_client.py | 0ee06b63db758efe5802e29267514cc6f5aa75df | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | google-cloud-sdk-unofficial/google-cloud-sdk | 05fbb473d629195f25887fc5bfaa712f2cbc0a24 | 392abf004b16203030e6efd2f0af24db7c8d669e | refs/heads/master | 2023-08-31T05:40:41.317697 | 2023-08-23T18:23:16 | 2023-08-23T18:23:16 | 335,182,594 | 9 | 2 | NOASSERTION | 2022-10-29T20:49:13 | 2021-02-02T05:47:30 | Python | UTF-8 | Python | false | false | 13,062 | py | """Generated client library for ondemandscanning version v1beta1."""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.py import base_api
from googlecloudsdk.generated_clients.apis.ondemandscanning.v1beta1 import ondemandscanning_v1beta1_messages as messages
class OndemandscanningV1beta1(base_api.BaseApiClient):
"""Generated client library for service ondemandscanning version v1beta1."""
MESSAGES_MODULE = messages
BASE_URL = 'https://ondemandscanning.googleapis.com/'
MTLS_BASE_URL = 'https://ondemandscanning.mtls.googleapis.com/'
_PACKAGE = 'ondemandscanning'
_SCOPES = ['https://www.googleapis.com/auth/cloud-platform']
_VERSION = 'v1beta1'
_CLIENT_ID = 'CLIENT_ID'
_CLIENT_SECRET = 'CLIENT_SECRET'
_USER_AGENT = 'google-cloud-sdk'
_CLIENT_CLASS_NAME = 'OndemandscanningV1beta1'
_URL_VERSION = 'v1beta1'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None, response_encoding=None):
"""Create a new ondemandscanning handle."""
url = url or self.BASE_URL
super(OndemandscanningV1beta1, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers,
response_encoding=response_encoding)
self.projects_locations_operations = self.ProjectsLocationsOperationsService(self)
self.projects_locations_scans_vulnerabilities = self.ProjectsLocationsScansVulnerabilitiesService(self)
self.projects_locations_scans = self.ProjectsLocationsScansService(self)
self.projects_locations = self.ProjectsLocationsService(self)
self.projects = self.ProjectsService(self)
class ProjectsLocationsOperationsService(base_api.BaseApiService):
"""Service class for the projects_locations_operations resource."""
_NAME = 'projects_locations_operations'
def __init__(self, client):
super(OndemandscanningV1beta1.ProjectsLocationsOperationsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running operation. The server makes a best effort to cancel the operation, but success is not guaranteed. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Clients can use Operations.GetOperation or other methods to check whether the cancellation succeeded or whether the operation completed despite cancellation. On successful cancellation, the operation is not deleted; instead, it becomes an operation with an Operation.error value with a google.rpc.Status.code of 1, corresponding to `Code.CANCELLED`.
Args:
request: (OndemandscanningProjectsLocationsOperationsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Empty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:cancel',
http_method='POST',
method_id='ondemandscanning.projects.locations.operations.cancel',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}:cancel',
request_field='',
request_type_name='OndemandscanningProjectsLocationsOperationsCancelRequest',
response_type_name='Empty',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running operation. This method indicates that the client is no longer interested in the operation result. It does not cancel the operation. If the server doesn't support this method, it returns `google.rpc.Code.UNIMPLEMENTED`.
Args:
request: (OndemandscanningProjectsLocationsOperationsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Empty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}',
http_method='DELETE',
method_id='ondemandscanning.projects.locations.operations.delete',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='OndemandscanningProjectsLocationsOperationsDeleteRequest',
response_type_name='Empty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
Args:
request: (OndemandscanningProjectsLocationsOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}',
http_method='GET',
method_id='ondemandscanning.projects.locations.operations.get',
ordered_params=['name'],
path_params=['name'],
query_params=[],
relative_path='v1beta1/{+name}',
request_field='',
request_type_name='OndemandscanningProjectsLocationsOperationsGetRequest',
response_type_name='Operation',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists operations that match the specified filter in the request. If the server doesn't support this method, it returns `UNIMPLEMENTED`.
Args:
request: (OndemandscanningProjectsLocationsOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListOperationsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/operations',
http_method='GET',
method_id='ondemandscanning.projects.locations.operations.list',
ordered_params=['name'],
path_params=['name'],
query_params=['filter', 'pageSize', 'pageToken'],
relative_path='v1beta1/{+name}/operations',
request_field='',
request_type_name='OndemandscanningProjectsLocationsOperationsListRequest',
response_type_name='ListOperationsResponse',
supports_download=False,
)
def Wait(self, request, global_params=None):
r"""Waits until the specified long-running operation is done or reaches at most a specified timeout, returning the latest state. If the operation is already done, the latest state is immediately returned. If the timeout specified is greater than the default HTTP/RPC timeout, the HTTP/RPC timeout is used. If the server does not support this method, it returns `google.rpc.Code.UNIMPLEMENTED`. Note that this method is on a best-effort basis. It may return the latest state before the specified timeout (including immediately), meaning even an immediate response is no guarantee that the operation is done.
Args:
request: (OndemandscanningProjectsLocationsOperationsWaitRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Wait')
return self._RunMethod(
config, request, global_params=global_params)
Wait.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/operations/{operationsId}:wait',
http_method='POST',
method_id='ondemandscanning.projects.locations.operations.wait',
ordered_params=['name'],
path_params=['name'],
query_params=['timeout'],
relative_path='v1beta1/{+name}:wait',
request_field='',
request_type_name='OndemandscanningProjectsLocationsOperationsWaitRequest',
response_type_name='Operation',
supports_download=False,
)
class ProjectsLocationsScansVulnerabilitiesService(base_api.BaseApiService):
"""Service class for the projects_locations_scans_vulnerabilities resource."""
_NAME = 'projects_locations_scans_vulnerabilities'
def __init__(self, client):
super(OndemandscanningV1beta1.ProjectsLocationsScansVulnerabilitiesService, self).__init__(client)
self._upload_configs = {
}
def List(self, request, global_params=None):
r"""Lists vulnerabilities resulting from a successfully completed scan.
Args:
request: (OndemandscanningProjectsLocationsScansVulnerabilitiesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ListVulnerabilitiesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/scans/{scansId}/vulnerabilities',
http_method='GET',
method_id='ondemandscanning.projects.locations.scans.vulnerabilities.list',
ordered_params=['parent'],
path_params=['parent'],
query_params=['pageSize', 'pageToken'],
relative_path='v1beta1/{+parent}/vulnerabilities',
request_field='',
request_type_name='OndemandscanningProjectsLocationsScansVulnerabilitiesListRequest',
response_type_name='ListVulnerabilitiesResponse',
supports_download=False,
)
class ProjectsLocationsScansService(base_api.BaseApiService):
"""Service class for the projects_locations_scans resource."""
_NAME = 'projects_locations_scans'
def __init__(self, client):
super(OndemandscanningV1beta1.ProjectsLocationsScansService, self).__init__(client)
self._upload_configs = {
}
def AnalyzePackages(self, request, global_params=None):
r"""Initiates an analysis of the provided packages.
Args:
request: (OndemandscanningProjectsLocationsScansAnalyzePackagesRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('AnalyzePackages')
return self._RunMethod(
config, request, global_params=global_params)
AnalyzePackages.method_config = lambda: base_api.ApiMethodInfo(
flat_path='v1beta1/projects/{projectsId}/locations/{locationsId}/scans:analyzePackages',
http_method='POST',
method_id='ondemandscanning.projects.locations.scans.analyzePackages',
ordered_params=['parent'],
path_params=['parent'],
query_params=[],
relative_path='v1beta1/{+parent}/scans:analyzePackages',
request_field='analyzePackagesRequest',
request_type_name='OndemandscanningProjectsLocationsScansAnalyzePackagesRequest',
response_type_name='Operation',
supports_download=False,
)
class ProjectsLocationsService(base_api.BaseApiService):
"""Service class for the projects_locations resource."""
_NAME = 'projects_locations'
def __init__(self, client):
super(OndemandscanningV1beta1.ProjectsLocationsService, self).__init__(client)
self._upload_configs = {
}
class ProjectsService(base_api.BaseApiService):
"""Service class for the projects resource."""
_NAME = 'projects'
def __init__(self, client):
super(OndemandscanningV1beta1.ProjectsService, self).__init__(client)
self._upload_configs = {
}
| [
"[email protected]"
] | |
bbd06e970f33e0fd3225569ff5aedc8b24bb6c63 | 8b9e9de996cedd31561c14238fe655c202692c39 | /recursion/Tail_Recursion.py | 24b88b9901ef2a299306341c11b8f90bb3107b39 | [] | no_license | monkeylyf/interviewjam | 0049bc1d79e6ae88ca6d746b05d07b9e65bc9983 | 33c623f226981942780751554f0593f2c71cf458 | refs/heads/master | 2021-07-20T18:25:37.537856 | 2021-02-19T03:26:16 | 2021-02-19T03:26:16 | 6,741,986 | 59 | 31 | null | null | null | null | UTF-8 | Python | false | false | 1,005 | py | # Explain what is tail recursion and implement reverse a list using functional programming style
def rev(a):
"""Tail recursion.
rev([0, 1, 2, 3])
nested([], [0, 1, 2, 3])
nested([0] + [], [1, 2, 3])
nested([1] + [0], [2, 3])
nested([2] + [1, 0], [3])
nested([3], [2, 1, 0], [])
[3, 2, 1, 0]
[3, 2, 1, 0]
"""
# Nested function.
def nested(acc, a):
# Notice that [a[0]] + acc instead of [a[0]] + acc
return nested([a[0]] + acc, a[1:]) if a else acc
return nested([], a)
def re(a):
"""None tail recursion.
What happens in call stack.
re([0, 1, 2, 3])
re([1, 2, 3,]) + 0
(re([2, 3,]) + 1) + 0
((re([3]) + 2) + 1) + 0
(((re([]) + 3) + 2) + 1) + 0
(((3) + 2) + 1) + 0
((5) + 1) + 0
6 + 0
6
"""
return re(a[1:]) + [a[0]] if a else []
def main():
n = 500
# Test case
print rev(range(n))
print re(range(n))
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
da1f403db95e038688dc9c46c5fa9a028823e73c | 564d6a4d305a8ac6a7e01c761831fb2081c02d0f | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_04_01/aio/operations/_network_management_client_operations.py | b5ef06c1443c98c5807d9023cf421816c15aa794 | [
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later",
"MIT"
] | permissive | paultaiton/azure-sdk-for-python | 69af4d889bac8012b38f5b7e8108707be679b472 | d435a1a25fd6097454b7fdfbbdefd53e05029160 | refs/heads/master | 2023-01-30T16:15:10.647335 | 2020-11-14T01:09:50 | 2020-11-14T01:09:50 | 283,343,691 | 0 | 0 | MIT | 2020-07-28T22:43:43 | 2020-07-28T22:43:43 | null | UTF-8 | Python | false | false | 47,161 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class NetworkManagementClientOperationsMixin:
async def _put_bastion_shareable_link_initial(
self,
resource_group_name: str,
bastion_host_name: str,
bsl_request: "models.BastionShareableLinkListRequest",
**kwargs
) -> Optional["models.BastionShareableLinkListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.BastionShareableLinkListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._put_bastion_shareable_link_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(bsl_request, 'BastionShareableLinkListRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BastionShareableLinkListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_put_bastion_shareable_link_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}/createShareableLinks'} # type: ignore
async def begin_put_bastion_shareable_link(
self,
resource_group_name: str,
bastion_host_name: str,
bsl_request: "models.BastionShareableLinkListRequest",
**kwargs
) -> AsyncLROPoller[AsyncItemPaged["models.BastionShareableLinkListResult"]]:
"""Creates a Bastion Shareable Links for all the VMs specified in the request.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param bastion_host_name: The name of the Bastion Host.
:type bastion_host_name: str
:param bsl_request: Post request for all the Bastion Shareable Link endpoints.
:type bsl_request: ~azure.mgmt.network.v2020_04_01.models.BastionShareableLinkListRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns an iterator like instance of either BastionShareableLinkListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_04_01.models.BastionShareableLinkListResult]]
:raises ~azure.core.exceptions.HttpResponseError:
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.BastionShareableLinkListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.put_bastion_shareable_link.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(bsl_request, 'BastionShareableLinkListRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(bsl_request, 'BastionShareableLinkListRequest')
body_content_kwargs['content'] = body_content
request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('BastionShareableLinkListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.BastionShareableLinkListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._put_bastion_shareable_link_initial(
resource_group_name=resource_group_name,
bastion_host_name=bastion_host_name,
bsl_request=bsl_request,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
async def internal_get_next(next_link=None):
if next_link is None:
return pipeline_response
else:
return await get_next(next_link)
return AsyncItemPaged(
internal_get_next, extract_data
)
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_put_bastion_shareable_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}/createShareableLinks'} # type: ignore
async def _delete_bastion_shareable_link_initial(
self,
resource_group_name: str,
bastion_host_name: str,
bsl_request: "models.BastionShareableLinkListRequest",
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._delete_bastion_shareable_link_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(bsl_request, 'BastionShareableLinkListRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_bastion_shareable_link_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}/deleteShareableLinks'} # type: ignore
async def begin_delete_bastion_shareable_link(
self,
resource_group_name: str,
bastion_host_name: str,
bsl_request: "models.BastionShareableLinkListRequest",
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the Bastion Shareable Links for all the VMs specified in the request.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param bastion_host_name: The name of the Bastion Host.
:type bastion_host_name: str
:param bsl_request: Post request for all the Bastion Shareable Link endpoints.
:type bsl_request: ~azure.mgmt.network.v2020_04_01.models.BastionShareableLinkListRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_bastion_shareable_link_initial(
resource_group_name=resource_group_name,
bastion_host_name=bastion_host_name,
bsl_request=bsl_request,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete_bastion_shareable_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}/deleteShareableLinks'} # type: ignore
def get_bastion_shareable_link(
self,
resource_group_name: str,
bastion_host_name: str,
bsl_request: "models.BastionShareableLinkListRequest",
**kwargs
) -> AsyncIterable["models.BastionShareableLinkListResult"]:
"""Return the Bastion Shareable Links for all the VMs specified in the request.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param bastion_host_name: The name of the Bastion Host.
:type bastion_host_name: str
:param bsl_request: Post request for all the Bastion Shareable Link endpoints.
:type bsl_request: ~azure.mgmt.network.v2020_04_01.models.BastionShareableLinkListRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either BastionShareableLinkListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_04_01.models.BastionShareableLinkListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.BastionShareableLinkListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_bastion_shareable_link.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(bsl_request, 'BastionShareableLinkListRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(bsl_request, 'BastionShareableLinkListRequest')
body_content_kwargs['content'] = body_content
request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('BastionShareableLinkListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
get_bastion_shareable_link.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}/getShareableLinks'} # type: ignore
async def _get_active_sessions_initial(
self,
resource_group_name: str,
bastion_host_name: str,
**kwargs
) -> Optional["models.BastionActiveSessionListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.BastionActiveSessionListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self._get_active_sessions_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BastionActiveSessionListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_active_sessions_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}/getActiveSessions'} # type: ignore
async def begin_get_active_sessions(
self,
resource_group_name: str,
bastion_host_name: str,
**kwargs
) -> AsyncLROPoller[AsyncItemPaged["models.BastionActiveSessionListResult"]]:
"""Returns the list of currently active sessions on the Bastion.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param bastion_host_name: The name of the Bastion Host.
:type bastion_host_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns an iterator like instance of either BastionActiveSessionListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_04_01.models.BastionActiveSessionListResult]]
:raises ~azure.core.exceptions.HttpResponseError:
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.BastionActiveSessionListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.get_active_sessions.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('BastionActiveSessionListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.BastionActiveSessionListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_active_sessions_initial(
resource_group_name=resource_group_name,
bastion_host_name=bastion_host_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
async def internal_get_next(next_link=None):
if next_link is None:
return pipeline_response
else:
return await get_next(next_link)
return AsyncItemPaged(
internal_get_next, extract_data
)
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_active_sessions.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}/getActiveSessions'} # type: ignore
def disconnect_active_sessions(
self,
resource_group_name: str,
bastion_host_name: str,
session_ids: "models.SessionIds",
**kwargs
) -> AsyncIterable["models.BastionSessionDeleteResult"]:
"""Returns the list of currently active sessions on the Bastion.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param bastion_host_name: The name of the Bastion Host.
:type bastion_host_name: str
:param session_ids: The list of sessionids to disconnect.
:type session_ids: ~azure.mgmt.network.v2020_04_01.models.SessionIds
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either BastionSessionDeleteResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_04_01.models.BastionSessionDeleteResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.BastionSessionDeleteResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = "application/json"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.disconnect_active_sessions.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'bastionHostName': self._serialize.url("bastion_host_name", bastion_host_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(session_ids, 'SessionIds')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(session_ids, 'SessionIds')
body_content_kwargs['content'] = body_content
request = self._client.get(url, query_parameters, header_parameters, **body_content_kwargs)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('BastionSessionDeleteResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
disconnect_active_sessions.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/bastionHosts/{bastionHostName}/disconnectActiveSessions'} # type: ignore
async def check_dns_name_availability(
self,
location: str,
domain_name_label: str,
**kwargs
) -> "models.DnsNameAvailabilityResult":
"""Checks whether a domain name in the cloudapp.azure.com zone is available for use.
:param location: The location of the domain name.
:type location: str
:param domain_name_label: The domain name to be verified. It must conform to the following
regular expression: ^[a-z][a-z0-9-]{1,61}[a-z0-9]$.
:type domain_name_label: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: DnsNameAvailabilityResult, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_04_01.models.DnsNameAvailabilityResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.DnsNameAvailabilityResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self.check_dns_name_availability.metadata['url'] # type: ignore
path_format_arguments = {
'location': self._serialize.url("location", location, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['domainNameLabel'] = self._serialize.query("domain_name_label", domain_name_label, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('DnsNameAvailabilityResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
check_dns_name_availability.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/locations/{location}/CheckDnsNameAvailability'} # type: ignore
async def supported_security_providers(
self,
resource_group_name: str,
virtual_wan_name: str,
**kwargs
) -> "models.VirtualWanSecurityProviders":
"""Gives the supported security providers for the virtual wan.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param virtual_wan_name: The name of the VirtualWAN for which supported security providers are
needed.
:type virtual_wan_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualWanSecurityProviders, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_04_01.models.VirtualWanSecurityProviders
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.VirtualWanSecurityProviders"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self.supported_security_providers.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualWANName': self._serialize.url("virtual_wan_name", virtual_wan_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualWanSecurityProviders', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
supported_security_providers.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualWans/{virtualWANName}/supportedSecurityProviders'} # type: ignore
async def _generatevirtualwanvpnserverconfigurationvpnprofile_initial(
self,
resource_group_name: str,
virtual_wan_name: str,
vpn_client_params: "models.VirtualWanVpnProfileParameters",
**kwargs
) -> Optional["models.VpnProfileResponse"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["models.VpnProfileResponse"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._generatevirtualwanvpnserverconfigurationvpnprofile_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualWANName': self._serialize.url("virtual_wan_name", virtual_wan_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(vpn_client_params, 'VirtualWanVpnProfileParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VpnProfileResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_generatevirtualwanvpnserverconfigurationvpnprofile_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualWans/{virtualWANName}/GenerateVpnProfile'} # type: ignore
async def begin_generatevirtualwanvpnserverconfigurationvpnprofile(
self,
resource_group_name: str,
virtual_wan_name: str,
vpn_client_params: "models.VirtualWanVpnProfileParameters",
**kwargs
) -> AsyncLROPoller["models.VpnProfileResponse"]:
"""Generates a unique VPN profile for P2S clients for VirtualWan and associated
VpnServerConfiguration combination in the specified resource group.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param virtual_wan_name: The name of the VirtualWAN whose associated VpnServerConfigurations is
needed.
:type virtual_wan_name: str
:param vpn_client_params: Parameters supplied to the generate VirtualWan VPN profile generation
operation.
:type vpn_client_params: ~azure.mgmt.network.v2020_04_01.models.VirtualWanVpnProfileParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VpnProfileResponse or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_04_01.models.VpnProfileResponse]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["models.VpnProfileResponse"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._generatevirtualwanvpnserverconfigurationvpnprofile_initial(
resource_group_name=resource_group_name,
virtual_wan_name=virtual_wan_name,
vpn_client_params=vpn_client_params,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VpnProfileResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_generatevirtualwanvpnserverconfigurationvpnprofile.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualWans/{virtualWANName}/GenerateVpnProfile'} # type: ignore
| [
"[email protected]"
] | |
d38a64980bb07c06db2cda09547eb0528b777da0 | e206cc00299804ce2271eb5d1513620e44ee9a9b | /course1-algorithm-toolbox/assignments/assignment_003_quick_sort3_way_partrition/sorting.py | 7c0e0b028b58859d54188ef65c487733d890039d | [] | no_license | dmitri-mamrukov/coursera-data-structures-and-algorithms | 15459cd160f7bbae5464bf53d995bca868a0b415 | 01dd6f0dadf62a520bcafafddf7bf2b79e8e2603 | refs/heads/master | 2020-05-24T18:27:00.665642 | 2019-05-21T20:45:37 | 2019-05-21T20:45:37 | 187,410,737 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,537 | py | #!/usr/bin/python3
import sys
import random
def __partition3(data, left, right):
"""This function partitions a[] in three parts:
a) a[left..l - 1] contains all elements smaller than the pivot element
b) a[l..r] contains all occurrences of the pivot element
c) a[r + 1..right] contains all elements greater than the pivot element
"""
l = left
r = right
k = left + 1
pivot_value = data[left]
while k <= r:
if data[k] < pivot_value:
data[l], data[k] = data[k], data[l]
l += 1
k += 1
elif data[k] > pivot_value:
data[k], data[r] = data[r], data[k]
r -= 1
else:
k += 1
return (l - 1, r + 1)
def __partition2(data, left, right):
x = data[left]
k = left;
for i in range(left + 1, right + 1):
if data[i] <= x:
k += 1
data[i], data[k] = data[k], data[i]
data[left], data[k] = data[k], data[left]
return k
def __randomized_quick_sort(data, left, right):
if left >= right:
return
k = random.randint(left, right)
data[left], data[k] = data[k], data[left]
i, j = __partition3(data, left, right)
__randomized_quick_sort(data, left, i);
__randomized_quick_sort(data, j, right);
def solve(data):
__randomized_quick_sort(data, 0, len(data) - 1)
if __name__ == '__main__':
input = sys.stdin.read()
n, *a = list(map(int, input.split()))
solve(a)
for x in a:
print(x, end = ' ')
print()
| [
"[email protected]"
] | |
a984fa5280233327fd1997479acb19840c8efd63 | c8d7c4ba5e949b0af2f5aa234c3ae594b1a49950 | /Python接口自动化/auto_test_old/common/scripts/temp_db_file/xj_recon_model.py | beaa278e766c8975c7a679ebc4bb38c46a2d89d4 | [] | no_license | chase001/chase_learning | 00b7396a6775fb6e2fd80950f8acf3f1737b162e | c5ee2473e49923c781212eb3f9f50341c0bc80c8 | refs/heads/master | 2022-12-12T00:08:08.407671 | 2020-03-06T14:17:39 | 2020-03-06T14:17:39 | 221,268,073 | 0 | 0 | null | 2022-12-08T05:26:49 | 2019-11-12T16:56:34 | Python | UTF-8 | Python | false | false | 55,014 | py | from common.db.MyFields import *
from common.db.func import init_database
from peewee import *
database = init_database('db_hot')
class UnknownField(object):
def __init__(self, *_, **__): pass
class BaseModel(Model):
class Meta:
database = database
class AreaCode(BaseModel):
code = BigAutoField()
level = IntegerField(null=True)
name = CharField(null=True)
parent_code = BigIntegerField(index=True, null=True)
class Meta:
table_name = 'area_code'
class BatchOrderCustomer(BaseModel):
company_id = IntegerField(constraints=[SQL("DEFAULT 100")])
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
hj_user_id = BigIntegerField(null=True)
id = BigAutoField()
order_id = BigIntegerField(null=True)
receive_address = CharField(null=True)
receive_name = CharField(null=True)
receive_phone = CharField(null=True)
reference_order_id = BigIntegerField(null=True)
rerification_status = IntegerField(null=True)
ship_to_city = CharField(null=True)
ship_to_country = CharField(null=True)
ship_to_province = CharField(null=True)
ship_to_town = CharField(null=True)
task_id = BigIntegerField(index=True, null=True)
user_name = CharField(null=True)
class Meta:
table_name = 'batch_order_customer'
class BatchOrderProduct(BaseModel):
business_product_id = BigIntegerField(null=True)
combin_discount_amount = DecimalField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
id = BigAutoField()
is_master_product = MyBitField(null=True) # bit
manual_discount = DecimalField(null=True)
master_product_id = BigIntegerField(null=True)
product_id = BigIntegerField(null=True)
product_name = CharField(null=True)
product_type = IntegerField(null=True)
promotion_discount_amount = DecimalField(null=True)
quantity = IntegerField(null=True)
shipping_fee = DecimalField(null=True)
task_id = BigIntegerField(null=True)
unit_price = DecimalField(null=True)
class Meta:
table_name = 'batch_order_product'
class BatchOrderTask(BaseModel):
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
end_date = MyDateTimeField(null=True)
operator = CharField(null=True)
operator_user_id = BigIntegerField(null=True)
order_department_id = IntegerField(null=True)
order_memo = CharField(null=True)
order_project_code = CharField(null=True)
order_reason_id = IntegerField(null=True)
start_date = MyDateTimeField(null=True)
status = IntegerField(null=True)
task_id = BigAutoField()
task_name = CharField(null=True)
class Meta:
table_name = 'batch_order_task'
class BiBusiness(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_business'
class BiCouponType(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_coupon_type'
class BiDeviceType(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_device_type'
class BiOrderReason(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_order_reason'
class BiOrderSalesChannel(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_order_sales_channel'
class BiOrderSource(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_order_source'
class BiOrderType(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_order_type'
class BiPayMethod(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
is_active = MyBitField(constraints=[SQL("DEFAULT b'1'")]) # bit
pay_method_foe = CharField(null=True)
class Meta:
table_name = 'bi_pay_method'
class BiPlatformType(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_platform_type'
class BiProductStatus(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_product_status'
class BiProductType(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_product_type'
class BiSourceType(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_source_type'
class BiSupplierType(BaseModel):
code = CharField(null=True)
description = CharField(null=True)
id = BigAutoField()
class Meta:
table_name = 'bi_supplier_type'
class DepartmentCode(BaseModel):
department_id = BigIntegerField(unique=True)
department_name = CharField()
id = BigAutoField()
is_active = MyBitField(constraints=[SQL("DEFAULT b'1'")]) # bit
class Meta:
table_name = 'department_code'
class EsIndexOrderLog(BaseModel):
create_date = MyDateTimeField(constraints=[SQL("DEFAULT 0000-00-00 00:00:00")], index=True)
custom_data = CharField(null=True)
from_ = BigIntegerField(column_name='from', null=True)
id = BigAutoField()
is_valid = MyBitField(constraints=[SQL("DEFAULT b'1'")], null=True) # bit
last_order_date = MyDateTimeField(null=True)
last_order_id = BigIntegerField(null=True)
size = IntegerField(null=True)
total_records = IntegerField(null=True)
class Meta:
table_name = 'es_index_order_log'
indexes = (
(('last_order_id', 'from_', 'create_date'), False),
)
class GroupBuyCategory(BaseModel):
added_date = DateField(null=True)
alias = CharField(null=True)
id = BigAutoField()
is_valid = MyBitField(null=True) # bit
name = CharField(null=True)
parent_id = BigIntegerField(null=True)
path = CharField(null=True)
class Meta:
table_name = 'group_buy_category'
class GroupBuyCategoryAdmin(BaseModel):
added_date = DateField(null=True)
description = CharField(null=True)
id = BigAutoField()
is_valid = MyBitField(null=True) # bit
name = CharField(null=True)
class Meta:
table_name = 'group_buy_category_admin'
class GroupBuyCoupon(BaseModel):
added_date = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
batch_id = BigIntegerField(index=True)
batch_size = BigIntegerField(null=True)
description = CharField(null=True)
id = BigAutoField()
is_active = MyBitField(null=True) # bit
mail_format = CharField(null=True)
title = CharField(null=True)
class Meta:
table_name = 'group_buy_coupon'
class GroupBuyCouponDetail(BaseModel):
added_date = MyDateTimeField(null=True)
batch_id = BigIntegerField(null=True)
batch_type = IntegerField(null=True)
coupon_code = CharField(null=True)
expired_date = DateField(null=True)
extended = CharField(null=True)
group_buy_id = BigIntegerField(null=True)
id = BigAutoField()
is_active = MyBitField(null=True) # bit
send_date = MyDateTimeField(null=True)
user_id = BigIntegerField(null=True)
class Meta:
table_name = 'group_buy_coupon_detail'
class GroupBuyGlobalSettings(BaseModel):
display_a4_list_page = MyBitField(null=True) # bit
class Meta:
table_name = 'group_buy_global_settings'
primary_key = False
class GroupBuyLuckOrders(BaseModel):
email = CharField(null=True)
group_buy_id = BigIntegerField(null=True)
invitor_user_id = BigIntegerField(null=True)
join_date = MyDateTimeField(null=True)
join_reason = CharField(null=True)
lucky_number = BigIntegerField(null=True)
user_id = BigIntegerField(null=True)
class Meta:
table_name = 'group_buy_luck_orders'
primary_key = False
class GroupBuyProduct(BaseModel):
_360_cate = CharField(column_name='360_cate', null=True)
_360_display = MyBitField(column_name='360_display', null=True) # bit
_360_hot_bus_spot_name = CharField(column_name='360_hot_bus_spot_name', null=True)
_360_img = CharField(column_name='360_img', null=True)
_360_latitude = CharField(column_name='360_latitude', null=True)
_360_longitude = CharField(column_name='360_longitude', null=True)
_360_merchant_addr = CharField(column_name='360_merchant_addr', null=True)
_360_merchant_name = CharField(column_name='360_merchant_name', null=True)
_360_merchant_phone = CharField(column_name='360_merchant_phone', null=True)
_360_spent_end_time = MyDateTimeField(column_name='360_spent_end_time', null=True)
_360_spent_start_time = MyDateTimeField(column_name='360_spent_start_time', null=True)
_360_title = CharField(column_name='360_title', null=True)
admin_memo = TextField(null=True)
big_img_name = CharField(null=True)
bulo_display_img_url = CharField(null=True)
buy_only_once = MyBitField(null=True) # bit
cate_id = BigIntegerField(null=True)
cate_id_admin = BigIntegerField(null=True)
class_id = BigIntegerField(null=True)
ct_product_code = CharField(null=True)
display_by_bulo = MyBitField(null=True) # bit
end_time = MyDateTimeField(null=True)
free_buy_type = BigIntegerField(null=True)
full_num = BigIntegerField(null=True)
group_buy_price = DecimalField(null=True)
group_buy_type = BigIntegerField(null=True)
has_notice_by_mail = MyBitField(null=True) # bit
has_notice_by_sms = MyBitField(null=True) # bit
id = BigAutoField()
is_active = MyBitField(null=True) # bit
is_free_by_count = MyBitField(null=True) # bit
is_free_delivery = MyBitField(null=True) # bit
is_hide = MyBitField(null=True) # bit
is_new_version = MyBitField(null=True) # bit
is_take_by_customer = MyBitField(null=True) # bit
is_valid = MyBitField(null=True) # bit
is_view = MyBitField(null=True) # bit
key_words = CharField(null=True)
last_notice_time_mail = MyDateTimeField(null=True)
last_notice_time_sms = MyDateTimeField(null=True)
last_update_time = MyDateTimeField(null=True)
list_price = DecimalField(null=True)
low_cate_id = BigIntegerField(null=True)
mark = BigIntegerField(null=True)
max_buy_amount = BigIntegerField(null=True)
mention = TextField(null=True)
mini_product_name = CharField(null=True)
prevision_img_name = CharField(null=True)
product_desc = TextField(null=True)
product_id = BigIntegerField(null=True)
product_name = CharField(null=True)
quantity = BigIntegerField(null=True)
related_coupon_batch = BigIntegerField(null=True)
related_coupon_batch_type = IntegerField(null=True)
related_income = DecimalField(null=True)
related_staff = CharField(null=True)
room_id = BigIntegerField(null=True)
short_product_name = CharField(null=True)
small_img_name = CharField(null=True)
sort_index = BigIntegerField(null=True)
start_time = MyDateTimeField(null=True)
supplier_id = BigIntegerField(null=True)
supplier_type = BigIntegerField(null=True)
system_remark = TextField(null=True)
tags = CharField(null=True)
time_up_warning = MyBitField(null=True) # bit
total_buy_amount = BigIntegerField(null=True)
touch_product_desc = TextField(null=True)
unit_cost = DecimalField(null=True)
unit_delivery_cost = DecimalField(null=True)
user_ce_hua = CharField(null=True)
user_ce_hua_id = BigIntegerField(null=True)
user_comment = TextField(null=True)
user_design_id = BigIntegerField(null=True)
user_tui_guang = CharField(null=True)
user_tui_guang_id = BigIntegerField(null=True)
user_wen_an = CharField(null=True)
user_wen_an_id = BigIntegerField(null=True)
virtual_buyer_amount = BigIntegerField(null=True)
class Meta:
table_name = 'group_buy_product'
class GroupBuyProductDetail(BaseModel):
class_unit_cost = DecimalField(null=True)
group_buy_id = BigIntegerField(null=True)
id = BigAutoField()
is_active = MyBitField(null=True) # bit
product_id = BigIntegerField(null=True)
quantity = BigIntegerField(null=True)
unit_cost = DecimalField(null=True)
class Meta:
table_name = 'group_buy_product_detail'
class GroupBuyProductWarehouse(BaseModel):
group_buy_product_id = BigIntegerField(null=True)
id = BigAutoField()
warehouse_id = CharField(null=True)
warehouse_product_id = CharField(null=True)
class Meta:
table_name = 'group_buy_product_warehouse'
class InvoiceManage(BaseModel):
account_bank = CharField(null=True)
account_number = CharField(null=True)
apply_user_name = CharField(null=True)
company_address = CharField(null=True)
company_id = IntegerField(null=True)
company_name = CharField(null=True)
company_phone = CharField(null=True)
courier_number = BigIntegerField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], null=True)
create_user_id = BigIntegerField(null=True)
express_name = CharField(null=True)
express_pay_method = IntegerField(null=True)
ext_param = CharField(null=True)
id = BigAutoField()
ident_number = CharField(null=True)
invoice_content = IntegerField(null=True)
invoice_fee = DecimalField(null=True)
invoice_header = CharField(null=True)
invoice_header_type = IntegerField(null=True)
invoice_status = IntegerField(constraints=[SQL("DEFAULT 1")], null=True)
invoice_type = IntegerField(null=True)
is_print = MyBitField(constraints=[SQL("DEFAULT b'0'")], null=True) # bit
is_send = MyBitField(null=True) # bit
order_id = BigIntegerField(null=True)
recipient = CharField(null=True)
recipient_address = CharField(null=True)
recipient_city = CharField(null=True)
recipient_phone = CharField(null=True)
recipient_province = CharField(null=True)
recipient_town = CharField(null=True)
remark = CharField(null=True)
update_time = MyDateTimeField(null=True)
update_user_id = BigIntegerField(null=True)
class Meta:
table_name = 'invoice_manage'
class JdHjOrders(BaseModel):
create_date = MyDateTimeField(null=True)
customer_address = CharField(null=True)
customer_phone = CharField(null=True)
hj_deal_fee = DecimalField(null=True)
hj_order_date = MyDateTimeField(null=True)
hj_order_id = BigIntegerField(null=True)
id = BigAutoField()
is_processed = MyBitField(null=True) # bit
is_same = MyBitField(null=True) # bit
jd_order_date = MyDateTimeField(null=True)
jd_order_id = CharField(unique=True)
jd_seller_price = DecimalField(null=True)
memo = CharField(null=True)
class Meta:
table_name = 'jd_hj_orders'
class OrderArchiveDetailLog(BaseModel):
archive_batch_code = CharField(index=True)
archive_time = MyDateTimeField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
delete_time = MyDateTimeField(null=True)
id = BigAutoField()
is_archive = MyBitField(null=True) # bit
is_delete = MyBitField(null=True) # bit
is_to_es = MyBitField(null=True) # bit
order_id = BigIntegerField(index=True)
to_es_time = MyDateTimeField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
class Meta:
table_name = 'order_archive_detail_log'
class OrderArchiveMasterLog(BaseModel):
archive_batch_code = CharField(index=True)
archive_order_quantity = BigIntegerField(null=True)
archive_status = MyBitField(null=True) # bit
begin_order_id = BigIntegerField(index=True, null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True)
delete_status = MyBitField(null=True) # bit
end_order_id = BigIntegerField(null=True)
id = BigAutoField()
to_es_status = MyBitField(null=True) # bit
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
class Meta:
table_name = 'order_archive_master_log'
class OrderAssessment(BaseModel):
business_product_id = BigIntegerField(null=True)
deposit_discount_amount = DecimalField(null=True)
id = BigAutoField()
manual_discount_amount = DecimalField(null=True)
multi_product_id = BigIntegerField(null=True)
new_product_id = BigIntegerField(null=True)
order_id = BigIntegerField(index=True, null=True)
product_id = BigIntegerField(null=True)
quantity = IntegerField(null=True)
share_card_fee = DecimalField(null=True)
share_card_income = DecimalField(null=True)
share_combine_fee = DecimalField(null=True)
share_cost = DecimalField(null=True)
share_coupon_fee = DecimalField(null=True)
share_coupon_income = DecimalField(null=True)
share_course_code_fee = DecimalField(null=True)
share_course_code_income = DecimalField(null=True)
share_discount_fee = DecimalField(null=True)
share_handling_fee = DecimalField(null=True)
share_income = DecimalField(null=True)
share_preincome = DecimalField(null=True)
share_purchase_xb = DecimalField(null=True)
share_recharge_xb = DecimalField(null=True)
share_reward_xb = DecimalField(null=True)
share_shipping_fee = DecimalField(null=True)
share_user_handling_fee = DecimalField(null=True)
share_vipcard_fee = DecimalField(null=True)
share_vipcard_income = DecimalField(null=True)
unit_price = DecimalField(null=True)
class Meta:
table_name = 'order_assessment'
indexes = (
(('order_id', 'product_id', 'multi_product_id'), False),
)
class OrderBaseUser(BaseModel):
address = CharField(null=True)
answer = CharField(null=True)
bbs_user_id = BigIntegerField(index=True, null=True)
buy_times = IntegerField(null=True)
cellphone = CharField(null=True)
charge = DecimalField(null=True)
display_pwd = CharField(null=True)
email = CharField(null=True)
expired_date = MyDateTimeField(null=True)
fee_mark = IntegerField(null=True)
froze_late_fee = DecimalField(null=True)
gender = IntegerField(null=True)
gold = IntegerField(null=True)
has_validate_cellphone = MyBitField(null=True) # bit
icon_name = CharField(null=True)
id_card_num = CharField(null=True)
last_login_ip = CharField(null=True)
last_login_time = MyDateTimeField(null=True)
late_fee = DecimalField(null=True)
lock_flag = IntegerField(null=True)
login_times = IntegerField(null=True)
phone = CharField(null=True)
question = CharField(null=True)
rank = IntegerField(null=True)
rank_mark = IntegerField(null=True)
reg_date = MyDateTimeField(null=True)
reg_ip = CharField(null=True)
sina_weibo_account = BigIntegerField(null=True)
timestamp = MyDateTimeField(null=True)
true_name = CharField(null=True)
user_custom_cata_list = CharField(null=True)
user_fav_cata_list = CharField(null=True)
user_id = BigAutoField()
user_name = CharField(null=True)
user_pwd = CharField(null=True)
user_top_custom_cata_list = CharField(null=True)
veri_code = CharField(null=True)
vip_level = IntegerField(null=True)
vip_total_days = IntegerField(null=True)
zipcode = CharField(null=True)
class Meta:
table_name = 'order_base_user'
class OrderBusinessExtend(BaseModel):
business_org_code = CharField(null=True)
company_id = IntegerField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
id = BigAutoField()
key = CharField(null=True)
order_id = BigIntegerField(index=True, null=True)
values = CharField(null=True)
class Meta:
table_name = 'order_business_extend'
indexes = (
(('key', 'order_id', 'business_org_code'), False),
(('key', 'values'), False),
)
class OrderCancelLog(BaseModel):
cancel_type = IntegerField(null=True)
create_date = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
id = BigAutoField()
ip = CharField(null=True)
operator_user_id = BigIntegerField(null=True)
operator_user_name = CharField(null=True)
order_id = BigIntegerField(null=True)
remark = CharField(null=True)
source_id = IntegerField(null=True)
status = IntegerField(null=True)
class Meta:
table_name = 'order_cancel_log'
class OrderCarriedForward(BaseModel):
company_id = IntegerField(index=True, null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
create_user_id = BigIntegerField(null=True)
id = BigAutoField()
income = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
order_id = BigIntegerField(index=True, null=True)
preincome = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
purchase_xb = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
recharge_xb = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
reward_xb = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
xb_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
class Meta:
table_name = 'order_carried_forward'
class OrderCarriedForwardMulti(BaseModel):
business_id = IntegerField(null=True)
category_id = IntegerField(null=True)
company_id = IntegerField(index=True, null=True)
id = BigAutoField()
income = DecimalField()
multi_product_id = BigIntegerField(null=True)
order_id = BigIntegerField(index=True)
preincome = DecimalField()
product_id = BigIntegerField()
product_type = IntegerField(null=True)
purchase_xb = DecimalField()
quantity = IntegerField()
recharge_xb = DecimalField()
reward_xb = DecimalField()
seller_id = BigIntegerField(null=True)
unit_price = DecimalField()
xb_fee = DecimalField()
class Meta:
table_name = 'order_carried_forward_multi'
indexes = (
(('business_id', 'company_id', 'seller_id'), False),
(('multi_product_id', 'product_id'), False),
)
class OrderChangeLog(BaseModel):
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
create_user_company_id = IntegerField(null=True)
create_user_id = BigIntegerField(null=True)
create_user_name = CharField(null=True)
id = BigAutoField()
old_ship_to_name = CharField(null=True)
old_ship_to_zip = CharField(null=True)
order_id = BigIntegerField(null=True)
ship_to_name = CharField(null=True)
ship_to_zip = CharField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
update_user_company_id = IntegerField(null=True)
update_user_id = BigIntegerField(null=True)
update_user_name = CharField(null=True)
user_id = BigIntegerField(null=True)
class Meta:
table_name = 'order_change_log'
class OrderConfig(BaseModel):
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
create_user_id = BigIntegerField(null=True)
create_user_name = CharField(null=True)
id = BigAutoField()
is_active = MyBitField(null=True) # bit
is_delete = MyBitField(null=True) # bit
key = CharField(index=True)
remark = CharField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
update_user_id = BigIntegerField(null=True)
update_user_name = CharField(null=True)
value = CharField(null=True)
class Meta:
table_name = 'order_config'
class OrderCouponConsum(BaseModel):
company_id = IntegerField(null=True)
cost = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
coupon_code = CharField(index=True, null=True)
coupon_discount = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
coupon_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
coupon_income = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
coupon_name = CharField(null=True)
coupon_type = IntegerField(null=True)
id = BigAutoField()
order_id = BigIntegerField(index=True)
class Meta:
table_name = 'order_coupon_consum'
class OrderDealMemo(BaseModel):
deal_date = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True)
deal_memo = CharField(null=True)
deal_user = CharField(null=True)
deal_user_company_id = IntegerField(null=True)
id = BigAutoField()
order_id = BigIntegerField(index=True)
class Meta:
table_name = 'order_deal_memo'
indexes = (
(('order_id', 'deal_user'), False),
)
class OrderDeliver(BaseModel):
batch_id = IntegerField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
delivery_failed_qty = IntegerField(null=True)
delivery_qty = IntegerField(null=True)
delivery_status = IntegerField(null=True)
id = BigAutoField()
master_product_id = BigIntegerField(null=True)
order_id = BigIntegerField()
product_id = BigIntegerField()
product_type = IntegerField(null=True)
quantity = IntegerField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True)
class Meta:
table_name = 'order_deliver'
indexes = (
(('order_id', 'product_id'), False),
)
class OrderDetail(BaseModel):
account_date = MyDateTimeField(null=True)
batch_id = IntegerField(null=True)
business_id = IntegerField(null=True)
business_product_id = BigIntegerField(index=True, null=True)
category_id = IntegerField(null=True)
combine_discount_amount = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
company_id = IntegerField(null=True)
coupon_code = CharField(null=True)
deposit_discount_amount = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
discount_amount = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
id = BigAutoField()
is_master_product = MyBitField() # bit
is_refunded = MyBitField(null=True) # bit
manual_discount_amount = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
nsource = CharField(null=True)
order_id = BigIntegerField(null=True)
point_discount = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
product_cate = IntegerField(null=True)
product_cost = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
product_id = BigIntegerField()
product_name = CharField(null=True)
product_type = IntegerField(null=True)
promotion_info = CharField(null=True)
quantity = IntegerField()
seller_id = BigIntegerField(null=True)
timestamp = MyDateTimeField(null=True)
unit_price = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
warehouse_id = IntegerField(null=True)
class Meta:
table_name = 'order_detail'
indexes = (
(('order_id', 'product_id'), False),
(('order_id', 'product_type'), False),
(('order_id', 'quantity'), False),
)
class OrderDetailAttached(BaseModel):
company_id = IntegerField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
create_user_company_id = IntegerField(null=True)
create_user_id = BigIntegerField(null=True)
id = BigAutoField()
master_product_id = BigIntegerField(index=True)
order_id = BigIntegerField(index=True)
product_id = BigIntegerField(index=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
update_user_company_id = IntegerField(null=True)
update_user_id = BigIntegerField(null=True)
class Meta:
table_name = 'order_detail_attached'
class OrderDetailCoupon(BaseModel):
batch_id = IntegerField(null=True)
coupon_code = CharField(index=True, null=True)
coupon_type = IntegerField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
id = BigAutoField()
is_verificationed = MyBitField(null=True) # bit
multi_product_id = BigIntegerField(null=True)
order_id = BigIntegerField()
product_id = BigIntegerField(null=True)
reference_verify_id = BigIntegerField(index=True, null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True)
verify_time = MyDateTimeField(null=True)
class Meta:
table_name = 'order_detail_coupon'
indexes = (
(('order_id', 'multi_product_id', 'product_id'), False),
)
class OrderDetailDiscount(BaseModel):
business_product_id = BigIntegerField(null=True)
company_id = IntegerField(null=True)
discount_amount = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
discount_dec = CharField(null=True)
discount_xb = DecimalField(null=True)
end_time = MyDateTimeField(null=True)
id = BigAutoField()
order_id = BigIntegerField(index=True, null=True)
product_business_id = IntegerField(null=True)
product_id = BigIntegerField(index=True, null=True)
product_seller_id = BigIntegerField(null=True)
source_code = CharField(index=True, null=True)
source_id = BigIntegerField(null=True)
source_type = IntegerField(null=True)
start_time = MyDateTimeField(null=True)
class Meta:
table_name = 'order_detail_discount'
class OrderDetailMulti(BaseModel):
add_to_cart_url = CharField(null=True)
batch_id = IntegerField(null=True)
business_id = IntegerField(null=True)
business_product_id = BigIntegerField(index=True, null=True)
category_id = IntegerField(null=True)
combine_discount_amount = DecimalField(null=True)
company_id = IntegerField(null=True)
coupon_code = CharField(null=True)
coupon_type = IntegerField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
deposit_discount_amount = DecimalField(null=True)
discount_amount = DecimalField(null=True)
has_subtotal_value = MyBitField(null=True) # bit
id = BigAutoField()
manual_discount_amount = DecimalField(null=True)
multi_product_id = BigIntegerField(null=True)
order_id = BigIntegerField(null=True)
point_discount = DecimalField(null=True)
product_cost = DecimalField(null=True)
product_id = BigIntegerField(index=True, null=True)
product_name = CharField(null=True)
product_type = IntegerField(null=True)
quantity = IntegerField(null=True)
seller_id = BigIntegerField(null=True)
share_card_fee = DecimalField(null=True)
share_card_income = DecimalField(null=True)
share_coupon_fee = DecimalField(null=True)
share_coupon_income = DecimalField(null=True)
share_course_code_fee = DecimalField(null=True)
share_course_code_income = DecimalField(null=True)
share_discount_fee = DecimalField(null=True)
share_handling_fee = DecimalField(null=True)
share_income = DecimalField(null=True)
share_invite_code_fee = DecimalField(null=True)
share_preincome = DecimalField(null=True)
share_purchase_xb = DecimalField(null=True)
share_recharge_xb = DecimalField(null=True)
share_reward_xb = DecimalField(null=True)
share_shipping_fee = DecimalField(null=True)
share_user_handling_fee = DecimalField(null=True)
share_vipcard_fee = DecimalField(null=True)
share_vipcard_income = DecimalField(null=True)
sid = CharField(null=True)
ssid = CharField(null=True)
subtotal_card_fee = DecimalField(null=True)
subtotal_card_income = DecimalField(null=True)
subtotal_coupon_fee = DecimalField(null=True)
subtotal_coupon_income = DecimalField(null=True)
subtotal_course_code_fee = DecimalField(null=True)
subtotal_course_code_income = DecimalField(null=True)
subtotal_discount_amount = DecimalField(null=True)
subtotal_handling_fee = DecimalField(null=True)
subtotal_income = DecimalField(null=True)
subtotal_invite_code_fee = DecimalField(null=True)
subtotal_pre_income = DecimalField(null=True)
subtotal_purchase_xb = DecimalField(null=True)
subtotal_recharge_xb = DecimalField(null=True)
subtotal_reward_xb = DecimalField(null=True)
subtotal_shipping_fee = DecimalField(null=True)
subtotal_user_handling_fee = DecimalField(null=True)
subtotal_vipcard_fee = DecimalField(null=True)
subtotal_vipcard_income = DecimalField(null=True)
uid = CharField(null=True)
unit_price = DecimalField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True)
warehouse_id = IntegerField(null=True)
class Meta:
table_name = 'order_detail_multi'
indexes = (
(('order_id', 'multi_product_id'), False),
(('order_id', 'product_type'), False),
)
class OrderFromTop(BaseModel):
added_date = MyDateTimeField(null=True)
ali_trade_no = BigIntegerField(null=True)
id = BigAutoField()
import_order = TextField(null=True)
operator = CharField(null=True)
order_id = BigIntegerField(null=True)
platform_id = IntegerField(null=True)
taobao_token = CharField(null=True)
class Meta:
table_name = 'order_from_top'
class OrderHjUser(BaseModel):
bbs_user_id = BigIntegerField(index=True, null=True)
company_id = IntegerField(null=True)
department_id = IntegerField(null=True)
email = CharField(null=True)
id = BigAutoField()
nick_name = CharField(null=True)
true_name = CharField(null=True)
user_name = CharField(null=True)
class Meta:
table_name = 'order_hj_user'
class OrderIncome(BaseModel):
batch_id = BigIntegerField(null=True)
coupon_code = CharField(index=True, null=True)
income_date = MyDateTimeField(index=True, null=True)
income_id = BigAutoField()
income_type = IntegerField(null=True)
last_update_date = MyDateTimeField(null=True)
master_product_id = BigIntegerField(null=True)
old_refund_id = BigIntegerField(null=True)
operater_type = IntegerField(null=True)
order_type = IntegerField(null=True)
product_name = CharField(null=True)
product_type = IntegerField(null=True)
quantity = IntegerField(null=True)
reference_income_id = BigIntegerField(null=True)
share_income_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
share_purchase_xb = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
share_recharege_xb = DecimalField()
share_reward_xb = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
source_order_id = BigIntegerField(null=True)
source_rma_id = BigIntegerField(index=True, null=True)
status = IntegerField(null=True)
sub_product_id = BigIntegerField(null=True)
user_id = BigIntegerField(null=True)
class Meta:
table_name = 'order_income'
indexes = (
(('source_order_id', 'sub_product_id'), False),
)
class OrderIncomeStaging(BaseModel):
create_time = MyDateTimeField(constraints=[SQL("DEFAULT 0000-00-00 00:00:00")], index=True)
id = BigAutoField()
rma_id = BigIntegerField(index=True, null=True)
source_order_id = BigIntegerField(index=True)
status = IntegerField(constraints=[SQL("DEFAULT 0")])
update_time = MyDateTimeField(null=True)
class Meta:
table_name = 'order_income_staging'
class OrderMaster(BaseModel):
ali_trade_no = CharField(null=True)
bank_code = CharField(null=True)
bill_date = MyDateTimeField(null=True)
bill_no = CharField(null=True)
cancel_date = MyDateTimeField(null=True)
cell_phone = CharField(null=True)
chest_fee = DecimalField(null=True)
city_id = IntegerField(null=True)
combine_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
company_id = IntegerField(null=True)
coupon_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
coupon_income = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
create_user_company_id = IntegerField(null=True)
create_user_id = BigIntegerField(null=True)
deal_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
deal_memo = CharField(null=True)
deal_user = CharField(null=True)
deliver_id = CharField(null=True)
delivery_result = IntegerField(null=True)
delivery_status = IntegerField(null=True)
deposit_discount_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
discount_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
email = CharField(index=True, null=True)
express = IntegerField(null=True)
express_id = IntegerField(null=True)
extend_bill_status = IntegerField(null=True)
fee_memo = CharField(null=True)
from_ip = CharField(null=True)
handling_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
hj_user_id = BigIntegerField(index=True, null=True)
income = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
installment_number = IntegerField(null=True)
invite_code_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
is_active = MyBitField() # bit
is_audit = MyBitField(null=True) # bit
is_bill = MyBitField() # bit
is_cancel = MyBitField() # bit
is_child = MyBitField(constraints=[SQL("DEFAULT b'0'")], null=True) # bit
is_inside = MyBitField(null=True) # bit
is_notify = MyBitField(null=True) # bit
is_phone = MyBitField(null=True) # bit
is_print = MyBitField(null=True) # bit
is_test = MyBitField(null=True) # bit
is_trace = MyBitField(null=True) # bit
is_unusual = MyBitField(constraints=[SQL("DEFAULT b'0'")], null=True) # bit
is_valid = MyBitField() # bit
manual_discount_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
mark = IntegerField(null=True)
msn = CharField(null=True)
notify_mark = CharField(null=True)
nsource = CharField(null=True)
operator_company_id = IntegerField(null=True)
operator_user_id = BigIntegerField(null=True)
order_date = MyDateTimeField(null=True)
order_device_id = IntegerField(null=True)
order_id = BigAutoField()
order_number = CharField(index=True, null=True)
order_type = IntegerField()
outer_trade_no = CharField(index=True, null=True)
parent_order_id = BigIntegerField(null=True)
pay_card_type = IntegerField(null=True)
pay_device_id = IntegerField(null=True)
pay_method = CharField(null=True)
payment_bank_discount = DecimalField(null=True)
phone_date = MyDateTimeField(null=True)
platform_id = IntegerField(null=True)
point_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
pre_income = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
province_id = IntegerField(null=True)
purchase_xb = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
qq = CharField(null=True)
recharge_xb = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
refer_source_id = IntegerField(null=True)
refer_url = CharField(null=True)
refund_type = CharField(null=True)
related_order_id = BigIntegerField(index=True, null=True)
reward_xb = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
seller_id = BigIntegerField(index=True, null=True)
ship_date = MyDateTimeField(null=True)
ship_flag = IntegerField(null=True)
ship_method = CharField(null=True)
ship_to_addr = CharField(null=True)
ship_to_city = CharField(null=True)
ship_to_country = CharField(constraints=[SQL("DEFAULT '中国'")], null=True)
ship_to_name = CharField(index=True, null=True)
ship_to_phone = CharField(index=True, null=True)
ship_to_province = CharField(null=True)
ship_to_time = CharField(null=True)
ship_to_town = CharField(null=True)
ship_to_zip = CharField(null=True)
shipping_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
temp_order_version = IntegerField(null=True)
timestamp = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
total_cost = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
total_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
total_order_today = IntegerField(null=True)
town_id = IntegerField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True)
update_user_company_id = IntegerField(null=True)
update_user_id = BigIntegerField(null=True)
user_coupon_id = IntegerField(null=True)
user_handling_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
user_id = BigIntegerField(null=True)
user_memo = CharField(null=True)
user_reg_date = MyDateTimeField(null=True)
user_source = CharField(null=True)
user_title = CharField(null=True)
xb_fee = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
class Meta:
table_name = 'order_master'
indexes = (
(('bill_date', 'company_id', 'deal_fee', 'order_type', 'is_bill', 'is_cancel'), False),
(('order_date', 'order_type', 'is_bill', 'ship_flag', 'is_cancel'), False),
(('platform_id', 'temp_order_version'), False),
)
class OrderMessageLog(BaseModel):
id = BigAutoField()
message_content = TextField(null=True)
message_id = CharField(null=True)
produce_id = CharField(null=True)
send_date_time = MyDateTimeField(null=True)
send_machine_ip = CharField(null=True)
class Meta:
table_name = 'order_message_log'
class OrderPayInfo(BaseModel):
bank_code = CharField(null=True)
begin_time = MyDateTimeField(null=True)
bill_amount = DecimalField()
child_order_id = BigIntegerField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
create_user_id = BigIntegerField(null=True)
end_time = MyDateTimeField(null=True)
ext_param = CharField(null=True)
id = BigAutoField()
order_id = BigIntegerField()
order_type = IntegerField(null=True)
origin_order_id = BigIntegerField(null=True)
pay_channel = CharField(null=True)
pay_device_id = IntegerField(null=True)
pay_method = IntegerField(null=True)
pay_num = CharField(null=True)
pay_status = IntegerField(null=True)
pay_time = MyDateTimeField(null=True)
pay_type = IntegerField(null=True)
purchase_xb = DecimalField()
recharge_xb = DecimalField()
remark = CharField(null=True)
reward_xb = DecimalField()
trans_seq_no = CharField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True)
update_user_id = BigIntegerField(null=True)
xb_fee = DecimalField()
class Meta:
table_name = 'order_pay_info'
indexes = (
(('end_time', 'begin_time'), False),
(('order_id', 'child_order_id'), False),
(('order_id', 'order_type'), False),
(('pay_time', 'pay_type'), False),
)
class OrderProductGroupbuy(BaseModel):
a_360_cate = CharField()
a_360_display = MyBitField(constraints=[SQL("DEFAULT b'0'")]) # bit
a_360_hot_bus_spot_name = CharField()
a_360_img = CharField()
a_360_latitude = CharField()
a_360_longitude = CharField()
a_360_merchant_addr = CharField()
a_360_merchant_name = CharField()
a_360_merchant_phone = CharField()
a_360_spent_end_time = MyDateTimeField(null=True)
a_360_spent_start_time = MyDateTimeField(null=True)
a_360_title = CharField()
admin_memo = CharField()
big_img_name = CharField()
bulo_display_img_url = CharField(null=True)
buy_only_once = MyBitField(constraints=[SQL("DEFAULT b'0'")]) # bit
cate_id = IntegerField(constraints=[SQL("DEFAULT 0")])
cate_idadmin = IntegerField(constraints=[SQL("DEFAULT 0")])
class_id = IntegerField(constraints=[SQL("DEFAULT 0")])
ctproduct_code = CharField(null=True)
display_by_bulo = MyBitField(constraints=[SQL("DEFAULT b'0'")]) # bit
end_time = MyDateTimeField()
free_buy_type = IntegerField(constraints=[SQL("DEFAULT 0")])
full_num = IntegerField(constraints=[SQL("DEFAULT 0")])
group_buy_price = DecimalField()
groupbuy_type = IntegerField()
has_notice_by_mail = MyBitField(constraints=[SQL("DEFAULT b'0'")]) # bit
has_notice_by_sms = MyBitField(constraints=[SQL("DEFAULT b'0'")]) # bit
id = BigAutoField()
is_active = IntegerField(constraints=[SQL("DEFAULT 0")])
is_free_by_count = MyBitField(constraints=[SQL("DEFAULT b'0'")]) # bit
is_free_delivery = MyBitField(constraints=[SQL("DEFAULT b'0'")]) # bit
is_hide = MyBitField() # bit
is_new_version = MyBitField(null=True) # bit
is_takeby_customer = MyBitField(constraints=[SQL("DEFAULT b'0'")]) # bit
is_valid = MyBitField(constraints=[SQL("DEFAULT b'1'")]) # bit
is_view = MyBitField(constraints=[SQL("DEFAULT b'0'")]) # bit
keywords = CharField(null=True)
last_notice_time_mail = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
last_notice_time_sms = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
last_update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
list_price = DecimalField()
low_cate_id = IntegerField(null=True)
mark = IntegerField()
max_buy_amount = IntegerField(constraints=[SQL("DEFAULT 0")])
mention = CharField()
mini_product_name = CharField(null=True)
prevision_img_name = CharField()
product_desc = TextField()
product_id = BigIntegerField(index=True)
product_name = CharField(null=True)
quantity = IntegerField()
related_coupon_batch = IntegerField(constraints=[SQL("DEFAULT 0")])
related_coupon_batch_type = IntegerField(null=True)
related_income = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
related_staff = CharField(null=True)
room_id = IntegerField(null=True)
short_product_name = CharField(null=True)
small_img_name = CharField()
sort_index = IntegerField(constraints=[SQL("DEFAULT 0")])
start_time = MyDateTimeField()
supplier_id = IntegerField(constraints=[SQL("DEFAULT 0")])
supplier_type = IntegerField(constraints=[SQL("DEFAULT 0")])
system_remark = TextField(null=True)
tags = CharField(null=True)
timeup_warning = MyBitField(constraints=[SQL("DEFAULT b'1'")]) # bit
total_buy_amount = IntegerField(constraints=[SQL("DEFAULT 0")])
touch_product_desc = TextField(null=True)
unit_cost = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
unit_delivery_cost = DecimalField(constraints=[SQL("DEFAULT 0.0000")])
user_ce_hua = CharField()
user_ce_hua_id = IntegerField(null=True)
user_comment = TextField()
user_design_id = IntegerField(null=True)
user_tui_guang = CharField()
user_tui_guang_id = IntegerField(null=True)
user_wen_an = CharField()
user_wen_an_id = IntegerField(null=True)
virtual_buyer_amount = IntegerField()
class Meta:
table_name = 'order_product_groupbuy'
class OrderSplitIndex(BaseModel):
begin_order_id = BigIntegerField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
database_index = CharField(null=True)
end_order_id = BigIntegerField(null=True)
id = BigAutoField()
last_order_id = BigIntegerField(null=True)
table_index = CharField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
class Meta:
table_name = 'order_split_index'
indexes = (
(('begin_order_id', 'end_order_id'), False),
)
class OrderStageRetry(BaseModel):
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], null=True)
order_id = BigIntegerField()
retry_times = IntegerField(constraints=[SQL("DEFAULT 1")])
stage = IntegerField()
status = IntegerField(constraints=[SQL("DEFAULT 0")])
update_time = MyDateTimeField(null=True)
class Meta:
table_name = 'order_stage_retry'
indexes = (
(('order_id', 'stage'), True),
)
primary_key = CompositeKey('order_id', 'stage')
class OrderTester(BaseModel):
company_id = IntegerField(null=True)
hj_user_id = BigIntegerField(index=True, null=True)
id = BigAutoField()
status = MyBitField(null=True) # bit
user_id = BigIntegerField(null=True)
user_name = CharField(null=True)
class Meta:
table_name = 'order_tester'
class OrderTracking(BaseModel):
add_to_cart_url = CharField(null=True)
app_id = CharField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
device_id = CharField(null=True)
ext_param = CharField(null=True)
from_ip = CharField(null=True)
id = BigAutoField()
order_department_id = IntegerField(null=True)
order_device_id = IntegerField(null=True)
order_id = BigIntegerField()
order_reason_id = IntegerField(null=True)
order_source_id = IntegerField(null=True)
pay_device_id = IntegerField(null=True)
refer_url = CharField(null=True)
reference_order_id = BigIntegerField(index=True, null=True)
rma_flag = IntegerField(null=True)
sales_channel_id = IntegerField(null=True)
sales_platform_id = IntegerField(null=True)
sid = CharField(null=True)
solution_code = CharField(null=True)
ssid = CharField(null=True)
swap_solution_code = CharField(null=True)
uid = CharField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True, null=True)
class Meta:
table_name = 'order_tracking'
indexes = (
(('order_id', 'order_source_id', 'solution_code', 'sales_platform_id'), False),
)
class OrderUserAddressLog(BaseModel):
address = CharField(null=True)
change_date = MyDateTimeField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
create_user_company_id = IntegerField(null=True)
create_user_id = BigIntegerField(null=True)
id = BigAutoField()
old_address = CharField(null=True)
operator = CharField(null=True)
order_id = BigIntegerField(null=True)
shop_user_id = BigIntegerField(null=True)
user_id = BigIntegerField(null=True)
class Meta:
table_name = 'order_user_address_log'
class OrderUserPhoneLog(BaseModel):
change_date = MyDateTimeField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
create_user_company_id = IntegerField(null=True)
create_user_id = BigIntegerField(null=True)
id = BigAutoField()
old_phone = CharField(null=True)
operator = CharField(null=True)
order_id = BigIntegerField(null=True)
phone = CharField(null=True)
shop_user_id = BigIntegerField(null=True)
type = IntegerField(null=True)
user_id = BigIntegerField(null=True)
class Meta:
table_name = 'order_user_phone_log'
class OrderVirtualDeliver(BaseModel):
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
id = BigAutoField()
order_deliver_id = BigIntegerField(index=True, null=True)
order_id = BigIntegerField(index=True, null=True)
send_code = CharField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
class Meta:
table_name = 'order_virtual_deliver'
class TempOrderMetaData(BaseModel):
hj_user_id = BigIntegerField(null=True)
id = BigAutoField()
product_id = BigIntegerField(null=True)
user_domain = CharField(null=True)
class Meta:
table_name = 'temp_order_meta_data'
class TempOrderSellerCc(BaseModel):
id = BigAutoField()
seller_id = BigIntegerField(null=True)
class Meta:
table_name = 'temp_order_seller_cc'
class TempOrderUserCc(BaseModel):
hj_user_id = BigIntegerField(null=True)
id = BigAutoField()
class Meta:
table_name = 'temp_order_user_cc'
class TradeControl(BaseModel):
compensate_action = CharField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True)
create_user_id = BigIntegerField(null=True)
has_cancel = IntegerField(null=True)
has_commit = IntegerField(null=True)
has_compensate = IntegerField(index=True, null=True)
has_freeze = IntegerField(null=True)
id = BigAutoField()
order_id = BigIntegerField(index=True, null=True)
trade_number = CharField(unique=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
update_user_id = BigIntegerField(null=True)
class Meta:
table_name = 'trade_control'
class TradeResourceStatus(BaseModel):
cancel_time = MyDateTimeField(null=True)
commit_time = MyDateTimeField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")], index=True)
create_user_id = BigIntegerField(null=True)
freeze_time = MyDateTimeField(null=True)
has_cancel = IntegerField(null=True)
has_commit = IntegerField(null=True)
has_freeze = IntegerField(null=True)
id = BigAutoField()
order_id = BigIntegerField(null=True)
resource_code = CharField(null=True, unique=True)
resource_type = IntegerField(null=True)
retry_count = IntegerField(null=True)
retry_time = MyDateTimeField(null=True)
trade_number = CharField(index=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
update_user_id = BigIntegerField(null=True)
class Meta:
table_name = 'trade_resource_status'
class UserAddress(BaseModel):
city_id = IntegerField(null=True)
create_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
create_user_company_id = IntegerField(null=True)
create_user_id = BigIntegerField(null=True)
id = BigAutoField()
is_default = MyBitField(null=True) # bit
msn = CharField(null=True)
province_id = IntegerField(null=True)
qq = CharField(null=True)
ship_to_address = CharField(null=True)
ship_to_cellphone = CharField(null=True)
ship_to_email = CharField(null=True)
ship_to_name = CharField(null=True)
ship_to_phone = CharField(null=True)
ship_to_zip = CharField(null=True)
shop_user_id = BigIntegerField(null=True)
town_id = IntegerField(null=True)
update_time = MyDateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
update_user_company_id = IntegerField(null=True)
update_user_id = BigIntegerField(null=True)
class Meta:
table_name = 'user_address'
| [
"[email protected]"
] | |
7f9e6d7b2d645fcd5aa6bd33457e423a8acbaae7 | 485784cea86f52c2acda0a495942689104cd391e | /schedule/migrations/0002_rinkschedule_schedule_date.py | 9b86d692f4df4d3b153c2be9115884978a11c438 | [] | no_license | BrianC68/OIC_Web_Apps | 50ec9f46868ad37dc809548d2d362a4573320539 | e75b9439b11cf2325675d76dacac38806156fb16 | refs/heads/master | 2023-08-09T07:39:33.066601 | 2023-08-07T13:22:39 | 2023-08-07T13:22:39 | 197,438,661 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 472 | py | # Generated by Django 2.2.1 on 2019-10-08 23:50
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('schedule', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='rinkschedule',
name='schedule_date',
field=models.DateField(default=django.utils.timezone.now),
preserve_default=False,
),
]
| [
"[email protected]"
] | |
a5ce8e3808360b1d98652c7e2b7c2658debc42d3 | 559f3dec0964d2e0f86c6c871371fe779cf3726c | /contrib/MedicalSeg/tools/preprocess_utils/dataset_json.py | 5b3372963ae365689314ba2a9ae2d83d7a9307a9 | [
"Apache-2.0"
] | permissive | PaddlePaddle/PaddleSeg | 319ab26665ea492527a1949671650135123ffc39 | 2c8c35a8949fef74599f5ec557d340a14415f20d | refs/heads/release/2.8 | 2023-08-31T09:08:06.724717 | 2023-08-18T01:59:56 | 2023-08-18T01:59:56 | 204,380,779 | 8,531 | 1,866 | Apache-2.0 | 2023-09-12T02:30:42 | 2019-08-26T02:32:22 | Python | UTF-8 | Python | false | false | 478 | py | import json
def parse_msd_basic_info(json_path):
"""
get dataset basic info from msd dataset.json
"""
dict = json.loads(open(json_path, "r").read())
info = {}
info["modalities"] = tuple(dict["modality"].values())
info["labels"] = dict["labels"]
info["dataset_name"] = dict["name"]
info["dataset_description"] = dict["description"]
info["license_desc"] = dict["licence"]
info["dataset_reference"] = dict["reference"]
return info
| [
"[email protected]"
] | |
cc30e3e61bd5839e2a0450e37255e918cb0b15cc | 814e4ad96172a76d9b72ac35697553980d0db5f1 | /pyalp/gs_interface/generate_certificates.py | e7aa246d4bb2851366daaf5f91a5fe555ce9c5c2 | [
"MIT"
] | permissive | Mause/pyalp | 29785037d3b4ebc2822e6ec74621aa65f621bb8e | fb0f723070e11f8c9ed57e2475eb963599f442a6 | refs/heads/master | 2022-12-05T12:05:01.804305 | 2014-07-02T13:04:21 | 2014-07-02T13:04:21 | 15,419,434 | 0 | 0 | MIT | 2022-11-22T00:24:05 | 2013-12-24T14:00:26 | PHP | UTF-8 | Python | false | false | 692 | py | #!/usr/bin/env python
"""
Generate client and server CURVE certificate files then move them into the
appropriate store directory, private_keys or public_keys. The certificates
generated by this script are used by the stonehouse and ironhouse examples.
In practice this would be done by hand or some out-of-band process.
Author: Chris Laws
"""
import zmq.auth
from __init__ import KEYS_DIR
def generate_certificates():
''' Generate client and server CURVE certificate files'''
# create new keys in certificates dir
zmq.auth.create_certificates(KEYS_DIR, "server")
zmq.auth.create_certificates(KEYS_DIR, "client")
if __name__ == '__main__':
generate_certificates()
| [
"[email protected]"
] | |
1434600aa23894fe97502d7f97ad8e909d58c0ec | 9249947c07f8addf64dd3d2a2f9f37d379f83921 | /libs/gluon/contrib/aes.py | cecf2d9073cb71ee75006f771492d0cb4b5943e6 | [
"MIT"
] | permissive | operepo/ope | eb71aa763d157416009d7c3052ace11852660e0a | 018c82af46845315795c67c36801e2a128f515d5 | refs/heads/master | 2023-08-08T15:05:28.592589 | 2023-07-25T00:22:24 | 2023-07-25T00:22:24 | 96,855,111 | 12 | 11 | MIT | 2023-03-03T15:10:34 | 2017-07-11T05:42:14 | Perl | UTF-8 | Python | false | false | 16,671 | py | """Simple AES cipher implementation in pure Python following PEP-272 API
Homepage: https://bitbucket.org/intgr/pyaes/
The goal of this module is to be as fast as reasonable in Python while still
being Pythonic and readable/understandable. It is licensed under the permissive
MIT license.
Hopefully the code is readable and commented enough that it can serve as an
introduction to the AES cipher for Python coders. In fact, it should go along
well with the Stick Figure Guide to AES:
http://www.moserware.com/2009/09/stick-figure-guide-to-advanced.html
Contrary to intuition, this implementation numbers the 4x4 matrices from top to
bottom for efficiency reasons::
0 4 8 12
1 5 9 13
2 6 10 14
3 7 11 15
Effectively it's the transposition of what you'd expect. This actually makes
the code simpler -- except the ShiftRows step, but hopefully the explanation
there clears it up.
"""
####
# Copyright (c) 2010 Marti Raudsepp <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
####
from array import array
# Globals mandated by PEP 272:
# http://www.python.org/dev/peps/pep-0272/
MODE_ECB = 1
MODE_CBC = 2
#MODE_CTR = 6
block_size = 16
key_size = None
def new(key, mode=MODE_CBC, IV=None):
if mode == MODE_ECB:
return ECBMode(AES(key))
elif mode == MODE_CBC:
if IV is None:
raise ValueError("CBC mode needs an IV value!")
return CBCMode(AES(key), IV)
else:
raise NotImplementedError
#### AES cipher implementation
class AES(object):
block_size = 16
def __init__(self, key):
self.setkey(key)
def setkey(self, key):
"""Sets the key and performs key expansion."""
self.key = key
self.key_size = len(key)
if self.key_size == 16:
self.rounds = 10
elif self.key_size == 24:
self.rounds = 12
elif self.key_size == 32:
self.rounds = 14
else:
raise ValueError("Key length must be 16, 24 or 32 bytes")
self.expand_key()
def expand_key(self):
"""Performs AES key expansion on self.key and stores in self.exkey"""
# The key schedule specifies how parts of the key are fed into the
# cipher's round functions. "Key expansion" means performing this
# schedule in advance. Almost all implementations do this.
#
# Here's a description of AES key schedule:
# http://en.wikipedia.org/wiki/Rijndael_key_schedule
# The expanded key starts with the actual key itself
exkey = array('B', self.key)
# extra key expansion steps
if self.key_size == 16:
extra_cnt = 0
elif self.key_size == 24:
extra_cnt = 2
else:
extra_cnt = 3
# 4-byte temporary variable for key expansion
word = exkey[-4:]
# Each expansion cycle uses 'i' once for Rcon table lookup
for i in xrange(1, 11):
#### key schedule core:
# left-rotate by 1 byte
word = word[1:4] + word[0:1]
# apply S-box to all bytes
for j in xrange(4):
word[j] = aes_sbox[word[j]]
# apply the Rcon table to the leftmost byte
word[0] = word[0] ^ aes_Rcon[i]
#### end key schedule core
for z in xrange(4):
for j in xrange(4):
# mix in bytes from the last subkey
word[j] ^= exkey[-self.key_size + j]
exkey.extend(word)
# Last key expansion cycle always finishes here
if len(exkey) >= (self.rounds+1) * self.block_size:
break
# Special substitution step for 256-bit key
if self.key_size == 32:
for j in xrange(4):
# mix in bytes from the last subkey XORed with S-box of
# current word bytes
word[j] = aes_sbox[word[j]] ^ exkey[-self.key_size + j]
exkey.extend(word)
# Twice for 192-bit key, thrice for 256-bit key
for z in xrange(extra_cnt):
for j in xrange(4):
# mix in bytes from the last subkey
word[j] ^= exkey[-self.key_size + j]
exkey.extend(word)
self.exkey = exkey
def add_round_key(self, block, round):
"""AddRoundKey step in AES. This is where the key is mixed into plaintext"""
offset = round * 16
exkey = self.exkey
for i in xrange(16):
block[i] ^= exkey[offset + i]
#print 'AddRoundKey:', block
def sub_bytes(self, block, sbox):
"""SubBytes step, apply S-box to all bytes
Depending on whether encrypting or decrypting, a different sbox array
is passed in.
"""
for i in xrange(16):
block[i] = sbox[block[i]]
#print 'SubBytes :', block
def shift_rows(self, b):
"""ShiftRows step. Shifts 2nd row to left by 1, 3rd row by 2, 4th row by 3
Since we're performing this on a transposed matrix, cells are numbered
from top to bottom::
0 4 8 12 -> 0 4 8 12 -- 1st row doesn't change
1 5 9 13 -> 5 9 13 1 -- row shifted to left by 1 (wraps around)
2 6 10 14 -> 10 14 2 6 -- shifted by 2
3 7 11 15 -> 15 3 7 11 -- shifted by 3
"""
b[1], b[5], b[ 9], b[13] = b[ 5], b[ 9], b[13], b[ 1]
b[2], b[6], b[10], b[14] = b[10], b[14], b[ 2], b[ 6]
b[3], b[7], b[11], b[15] = b[15], b[ 3], b[ 7], b[11]
#print 'ShiftRows :', b
def shift_rows_inv(self, b):
"""Similar to shift_rows above, but performed in inverse for decryption."""
b[ 5], b[ 9], b[13], b[ 1] = b[1], b[5], b[ 9], b[13]
b[10], b[14], b[ 2], b[ 6] = b[2], b[6], b[10], b[14]
b[15], b[ 3], b[ 7], b[11] = b[3], b[7], b[11], b[15]
#print 'ShiftRows :', b
def mix_columns(self, block):
"""MixColumns step. Mixes the values in each column"""
# Cache global multiplication tables (see below)
mul_by_2 = gf_mul_by_2
mul_by_3 = gf_mul_by_3
# Since we're dealing with a transposed matrix, columns are already
# sequential
for i in xrange(4):
col = i * 4
#v0, v1, v2, v3 = block[col : col+4]
v0, v1, v2, v3 = (block[col], block[col + 1], block[col + 2],
block[col + 3])
block[col ] = mul_by_2[v0] ^ v3 ^ v2 ^ mul_by_3[v1]
block[col+1] = mul_by_2[v1] ^ v0 ^ v3 ^ mul_by_3[v2]
block[col+2] = mul_by_2[v2] ^ v1 ^ v0 ^ mul_by_3[v3]
block[col+3] = mul_by_2[v3] ^ v2 ^ v1 ^ mul_by_3[v0]
#print 'MixColumns :', block
def mix_columns_inv(self, block):
"""Similar to mix_columns above, but performed in inverse for decryption."""
# Cache global multiplication tables (see below)
mul_9 = gf_mul_by_9
mul_11 = gf_mul_by_11
mul_13 = gf_mul_by_13
mul_14 = gf_mul_by_14
# Since we're dealing with a transposed matrix, columns are already
# sequential
for i in xrange(4):
col = i * 4
v0, v1, v2, v3 = (block[col], block[col + 1], block[col + 2],
block[col + 3])
#v0, v1, v2, v3 = block[col:col+4]
block[col ] = mul_14[v0] ^ mul_9[v3] ^ mul_13[v2] ^ mul_11[v1]
block[col+1] = mul_14[v1] ^ mul_9[v0] ^ mul_13[v3] ^ mul_11[v2]
block[col+2] = mul_14[v2] ^ mul_9[v1] ^ mul_13[v0] ^ mul_11[v3]
block[col+3] = mul_14[v3] ^ mul_9[v2] ^ mul_13[v1] ^ mul_11[v0]
#print 'MixColumns :', block
def encrypt_block(self, block):
"""Encrypts a single block. This is the main AES function"""
# For efficiency reasons, the state between steps is transmitted via a
# mutable array, not returned.
self.add_round_key(block, 0)
for round in xrange(1, self.rounds):
self.sub_bytes(block, aes_sbox)
self.shift_rows(block)
self.mix_columns(block)
self.add_round_key(block, round)
self.sub_bytes(block, aes_sbox)
self.shift_rows(block)
# no mix_columns step in the last round
self.add_round_key(block, self.rounds)
def decrypt_block(self, block):
"""Decrypts a single block. This is the main AES decryption function"""
# For efficiency reasons, the state between steps is transmitted via a
# mutable array, not returned.
self.add_round_key(block, self.rounds)
# count rounds down from 15 ... 1
for round in xrange(self.rounds-1, 0, -1):
self.shift_rows_inv(block)
self.sub_bytes(block, aes_inv_sbox)
self.add_round_key(block, round)
self.mix_columns_inv(block)
self.shift_rows_inv(block)
self.sub_bytes(block, aes_inv_sbox)
self.add_round_key(block, 0)
# no mix_columns step in the last round
#### ECB mode implementation
class ECBMode(object):
"""Electronic CodeBook (ECB) mode encryption.
Basically this mode applies the cipher function to each block individually;
no feedback is done. NB! This is insecure for almost all purposes
"""
def __init__(self, cipher):
self.cipher = cipher
self.block_size = cipher.block_size
def ecb(self, data, block_func):
"""Perform ECB mode with the given function"""
if len(data) % self.block_size != 0:
raise ValueError("Plaintext length must be multiple of 16")
block_size = self.block_size
data = array('B', data)
for offset in xrange(0, len(data), block_size):
block = data[offset : offset+block_size]
block_func(block)
data[offset : offset+block_size] = block
return data.tostring()
def encrypt(self, data):
"""Encrypt data in ECB mode"""
return self.ecb(data, self.cipher.encrypt_block)
def decrypt(self, data):
"""Decrypt data in ECB mode"""
return self.ecb(data, self.cipher.decrypt_block)
#### CBC mode
class CBCMode(object):
"""Cipher Block Chaining (CBC) mode encryption. This mode avoids content leaks.
In CBC encryption, each plaintext block is XORed with the ciphertext block
preceding it; decryption is simply the inverse.
"""
# A better explanation of CBC can be found here:
# http://en.wikipedia.org/wiki/Block_cipher_modes_of_operation#Cipher-block_chaining_.28CBC.29
def __init__(self, cipher, IV):
self.cipher = cipher
self.block_size = cipher.block_size
self.IV = array('B', IV)
def encrypt(self, data):
"""Encrypt data in CBC mode"""
block_size = self.block_size
if len(data) % block_size != 0:
raise ValueError("Plaintext length must be multiple of 16")
data = array('B', data)
IV = self.IV
for offset in xrange(0, len(data), block_size):
block = data[offset : offset+block_size]
# Perform CBC chaining
for i in xrange(block_size):
block[i] ^= IV[i]
self.cipher.encrypt_block(block)
data[offset : offset+block_size] = block
IV = block
self.IV = IV
return data.tostring()
def decrypt(self, data):
"""Decrypt data in CBC mode"""
block_size = self.block_size
if len(data) % block_size != 0:
raise ValueError("Ciphertext length must be multiple of 16")
data = array('B', data)
IV = self.IV
for offset in xrange(0, len(data), block_size):
ctext = data[offset : offset+block_size]
block = ctext[:]
self.cipher.decrypt_block(block)
# Perform CBC chaining
#for i in xrange(block_size):
# data[offset + i] ^= IV[i]
for i in xrange(block_size):
block[i] ^= IV[i]
data[offset : offset+block_size] = block
IV = ctext
#data[offset : offset+block_size] = block
self.IV = IV
return data.tostring()
####
def galois_multiply(a, b):
"""Galois Field multiplicaiton for AES"""
p = 0
while b:
if b & 1:
p ^= a
a <<= 1
if a & 0x100:
a ^= 0x1b
b >>= 1
return p & 0xff
# Precompute the multiplication tables for encryption
gf_mul_by_2 = array('B', [galois_multiply(x, 2) for x in range(256)])
gf_mul_by_3 = array('B', [galois_multiply(x, 3) for x in range(256)])
# ... for decryption
gf_mul_by_9 = array('B', [galois_multiply(x, 9) for x in range(256)])
gf_mul_by_11 = array('B', [galois_multiply(x, 11) for x in range(256)])
gf_mul_by_13 = array('B', [galois_multiply(x, 13) for x in range(256)])
gf_mul_by_14 = array('B', [galois_multiply(x, 14) for x in range(256)])
####
# The S-box is a 256-element array, that maps a single byte value to another
# byte value. Since it's designed to be reversible, each value occurs only once
# in the S-box
#
# More information: http://en.wikipedia.org/wiki/Rijndael_S-box
aes_sbox = array('B',
'637c777bf26b6fc53001672bfed7ab76'
'ca82c97dfa5947f0add4a2af9ca472c0'
'b7fd9326363ff7cc34a5e5f171d83115'
'04c723c31896059a071280e2eb27b275'
'09832c1a1b6e5aa0523bd6b329e32f84'
'53d100ed20fcb15b6acbbe394a4c58cf'
'd0efaafb434d338545f9027f503c9fa8'
'51a3408f929d38f5bcb6da2110fff3d2'
'cd0c13ec5f974417c4a77e3d645d1973'
'60814fdc222a908846eeb814de5e0bdb'
'e0323a0a4906245cc2d3ac629195e479'
'e7c8376d8dd54ea96c56f4ea657aae08'
'ba78252e1ca6b4c6e8dd741f4bbd8b8a'
'703eb5664803f60e613557b986c11d9e'
'e1f8981169d98e949b1e87e9ce5528df'
'8ca1890dbfe6426841992d0fb054bb16'.decode('hex')
)
# This is the inverse of the above. In other words:
# aes_inv_sbox[aes_sbox[val]] == val
aes_inv_sbox = array('B',
'52096ad53036a538bf40a39e81f3d7fb'
'7ce339829b2fff87348e4344c4dee9cb'
'547b9432a6c2233dee4c950b42fac34e'
'082ea16628d924b2765ba2496d8bd125'
'72f8f66486689816d4a45ccc5d65b692'
'6c704850fdedb9da5e154657a78d9d84'
'90d8ab008cbcd30af7e45805b8b34506'
'd02c1e8fca3f0f02c1afbd0301138a6b'
'3a9111414f67dcea97f2cfcef0b4e673'
'96ac7422e7ad3585e2f937e81c75df6e'
'47f11a711d29c5896fb7620eaa18be1b'
'fc563e4bc6d279209adbc0fe78cd5af4'
'1fdda8338807c731b11210592780ec5f'
'60517fa919b54a0d2de57a9f93c99cef'
'a0e03b4dae2af5b0c8ebbb3c83539961'
'172b047eba77d626e169146355210c7d'.decode('hex')
)
# The Rcon table is used in AES's key schedule (key expansion)
# It's a pre-computed table of exponentation of 2 in AES's finite field
#
# More information: http://en.wikipedia.org/wiki/Rijndael_key_schedule
aes_Rcon = array('B',
'8d01020408102040801b366cd8ab4d9a'
'2f5ebc63c697356ad4b37dfaefc59139'
'72e4d3bd61c29f254a943366cc831d3a'
'74e8cb8d01020408102040801b366cd8'
'ab4d9a2f5ebc63c697356ad4b37dfaef'
'c5913972e4d3bd61c29f254a943366cc'
'831d3a74e8cb8d01020408102040801b'
'366cd8ab4d9a2f5ebc63c697356ad4b3'
'7dfaefc5913972e4d3bd61c29f254a94'
'3366cc831d3a74e8cb8d010204081020'
'40801b366cd8ab4d9a2f5ebc63c69735'
'6ad4b37dfaefc5913972e4d3bd61c29f'
'254a943366cc831d3a74e8cb8d010204'
'08102040801b366cd8ab4d9a2f5ebc63'
'c697356ad4b37dfaefc5913972e4d3bd'
'61c29f254a943366cc831d3a74e8cb'.decode('hex')
)
| [
"[email protected]"
] | |
ee63b12e238a4138f9963f331b11ffc93c1e0fa0 | 5979cf3c79daa04706c8fef1595574c6e36c14a1 | /vgl/home/urls.py | 0547341057b158de3626eb131277cda6b03c92a1 | [] | no_license | rahuezo/valley-green-landscape-inc | f675b5242ed7a80e457b236a253fb9ed0602829c | 6dac5ed2202336a69a86c6dcafee892cbadaa5b3 | refs/heads/master | 2021-08-15T16:48:50.594629 | 2017-11-18T00:08:49 | 2017-11-18T00:08:49 | 110,613,620 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 135 | py | from django.conf.urls import url
from . import views
app_name = 'home'
urlpatterns = [
url(r'^$', views.index, name='index'),
]
| [
"[email protected]"
] | |
3bad13cc5bddd857215b2177bebc8b7cae6f2551 | 20c80f722c451b64d05cc027b66a81e1976c3253 | /commons/libs/pyblish_qml/rpc/__init__.py | a461a54bd8d5068ae347c7d751ec764fdb30bacd | [] | no_license | flypotatojun/Barbarian | 2d3fcb6fcb1b4495b6d62fc5e32634abf4638312 | efe14dd24c65b4852997dad1290e503211bcc419 | refs/heads/master | 2021-07-18T01:43:14.443911 | 2017-10-24T03:37:43 | 2017-10-24T03:37:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 105 | py | from . import client, server, service
__all__ = [
"client",
"server",
"service",
]
| [
"[email protected]"
] | |
dc89107f7dcfdfa9cd7401d4281ed7ea790232a3 | 0ad7f553df6b210b5ac004fbf490ed651a21d55e | /algos/discrete_esay_control_lib_01.py | ef19dd246fb029fd2da77e0c6b9a839eebbfc2a8 | [] | no_license | MarianoDel/spyder_python | fa00987eb9aa1ef61d7224679a84c05a217c6c35 | 5f5896df68f95eb860bc08c21ae2b19516432cdc | refs/heads/master | 2020-05-23T06:14:57.329478 | 2020-04-23T14:58:16 | 2020-04-23T14:58:16 | 84,753,428 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,354 | py | # -*- coding: utf-8 -*-
#usando libreria de control
import numpy as np
from scipy import signal
b = [0.125, 0.125, 0.125, 0.125, 0.125, 0.125, 0.125, 0.125]
tf1 = (b, [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 0.001) #ver que dt coincida con el step de tiempo en discreto
#w, h = signal.freqz(b)
#w, h = signal.freqz(tf1)
w, h = signal.freqz((b, [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 0.001))
import matplotlib.pyplot as plt
fig = plt.figure()
plt.title('Digital filter frequency response')
ax1 = fig.add_subplot(111)
plt.plot(w, 20 * np.log10(abs(h)), 'b')
plt.ylabel('Amplitude [dB]', color='b')
plt.xlabel('Frequency [rad/sample]')
ax2 = ax1.twinx()
angles = np.unwrap(np.angle(h))
plt.plot(w, angles, 'g')
plt.ylabel('Angle (radians)', color='g')
plt.grid()
plt.axis('tight')
plt.show()
plt.figure(2)
plt.clf()
tf = (b, [1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], 0.001) #ver que dt coincida con el step de tiempo en discreto
#ademas le genero un multilpe polo en origen para que no me diga que num > den
t_in = np.arange(0.0, 0.1, 0.001)
#t_in = np.arange(0.0, 4.0, 1.0)
#u = np.asarray([0.0, 0.0, 1.0, 1.0])
u = np.ones(np.size(t_in))
t_out, y = signal.dlsim(tf, u, t=t_in)
plt.plot(t_out, y, 'b')
plt.plot(t_out, u+0.1, 'g')
plt.show()
| [
"[email protected]"
] | |
67f12d8933ae63eef4aa93f09cc44f61d8f48c3d | 7801b0356b60de5a4fa6b214717a1c04942b5b62 | /crm/migrations/0003_userinfo_user.py | 2daf274b45a19a80d357f35f9323cbef54a43799 | [] | no_license | hqs2212586/CRM_demo | 365652c61c991a2098d32b5db318d55cf29baa0b | 941a896aef598d81750a96074bc63ccfaaadf0a5 | refs/heads/master | 2020-03-27T17:43:40.110992 | 2018-08-31T09:20:01 | 2018-08-31T09:20:01 | 146,869,963 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 517 | py | # Generated by Django 2.0.6 on 2018-08-31 03:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('rbac', '0004_permission_action'),
('crm', '0002_customerdistrbute'),
]
operations = [
migrations.AddField(
model_name='userinfo',
name='user',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.CASCADE, to='rbac.User'),
),
]
| [
"[email protected]"
] | |
2a02caa7558f764522bd58b00871216e796676d8 | d42dea822871be6027fadbf8b167be1c0b38d9c7 | /BST/debug.py | 2cc73f8d9784aa9f8a21c2b7530ff531d0bb1e4b | [] | no_license | siddhantprateek/Python-in-Practice | d8412c46dec57d512d8abd87cb0a33b71070c5ee | 0ad806f02fecb87de20078ef956f8e23bb38e342 | refs/heads/main | 2023-06-26T22:34:55.172882 | 2021-07-29T15:14:09 | 2021-07-29T15:14:09 | 354,875,226 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,479 | py | class BSTreeNode:
def __init__(self, val):
self.value = val
self.left, self.right = None, None
class BinaryST:
def __init__(self):
self.root = None
def insert(self, val):
self.root = self.insertHelp(val, self.root)
def insertHelp(self, value, node):
if node == None:
node = BSTreeNode(value)
return node
if node.value > value:
node.left = self.insertHelp(value, node.left)
if node.value < value:
node.right = self.insertHelp(value, node.right)
return node
def Sum(self):
return self.sumHelp(self.root)
def sumHelp(self, node):
if node == None:
return 0
return node.value + self.sumHelp(node.left) + self.sumHelp(node.right)
def display(self):
self.displayHelper(self.root, "Root Node: ")
def displayHelper(self, node, details):
if node == None:
return
print(details, node.value)
self.displayHelper(node.left, "left child of " + str(node.value) + ":")
self.displayHelper(node.right, "right child of " + str(node.value) + ":")
# nums = [4, 5, 2, 7, 6, 1]
if __name__ == '__main__':
bst = BinaryST()
bst.insert(4)
bst.insert(5)
bst.insert(2)
bst.insert(7)
bst.insert(6)
bst.display() | [
"[email protected]"
] | |
5ae9b28df851a85fea96edf6169e6cf8f14c6a50 | 07f92805a75dc91b8be2ac14c238394245eda9ea | /Python生物信息学数据管理/python-for-biologists/03-modular_programming/10-functions/calc_atom_atom_distance.py | 5156d4af63c5626cef0355e63c950a6aecc07d18 | [] | no_license | 08zhangyi/Some-thing-interesting-for-me | 6ea7366ef1f0812397300259b2e9d0e7217bcba0 | f4cbda341ada98753c57a3ba07653163522dd023 | refs/heads/master | 2023-01-11T22:54:03.396911 | 2023-01-06T05:47:41 | 2023-01-06T05:47:41 | 136,426,995 | 7 | 6 | null | null | null | null | UTF-8 | Python | false | false | 917 | py | '''
Find two alpha-C atoms in a PDB structure and calculate their distance.
-----------------------------------------------------------
(c) 2013 Allegra Via and Kristian Rother
Licensed under the conditions of the Python License
This code appears in section 10.4.4 of the book
"Managing Biological Data with Python".
-----------------------------------------------------------
'''
from math import sqrt
from distance import calc_dist
from parse_pdb import parse_atom_line
pdb = open('3G5U.pdb')
points = []
while len(points) < 2:
line = pdb.readline()
if line.startswith("ATOM"):
chain, res_type, res_num, atom, x, y, z = parse_atom_line(line)
if res_num == '123' and chain == 'A' and atom == 'CA':
points.append((x, y, z))
if res_num == '209' and chain == 'A' and atom == 'CA':
points.append((x, y, z))
print calc_dist(points[0], points[1])
| [
"[email protected]"
] | |
21393f5ec3107ae718cce881c013ad295cbc9e74 | bf74f773f0c69e0ce7c5cc57a5897ca86cca6587 | /custom_collections/tree.py | b2452b64db079d1947db82cf94a240f15c822c36 | [
"BSD-3-Clause"
] | permissive | weijia/custom_collections | a532b01b18049f0e0aad9920f8e90d45e3c24812 | e9b7bcc25f83f6a9adfbee94c825835414799aab | refs/heads/master | 2016-09-06T09:01:05.969014 | 2014-09-20T17:42:48 | 2014-09-20T17:42:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,068 | py | import uuid
from django.utils import timezone
import django.db.utils
from django.contrib.auth.models import User, Group
from django.contrib import admin
from django.conf import settings
from obj_sys.models import UfsObj
from guardian.admin import GuardedModelAdmin
from guardian.shortcuts import assign_perm
try:
from models import CollectionItem
except:
pass
gRootUuid = u"4a5e8673-f2a2-4cf2-af6c-461fa9f31a15"
def register(objectClass, group_name = "scheduling"):
module_name = objectClass.__module__.split(".")[0].lower()
class_name = objectClass.__name__.lower()
url = u"view://admin/%s/%s/add"%(module_name, class_name)
try:
for i in UfsObj.objects.filter(ufs_url = url):
return
except django.db.utils.DatabaseError:
#Database is not created yet, just return, items will be created after syncdb is executed
return
o = UfsObj(ufs_url = url, uuid = unicode(uuid.uuid4()), timestamp=timezone.now(), user=User.objects.filter(username="AnonymousUser")[0])
o.save()
c = CollectionItem(obj = o, uuid = gRootUuid, id_in_col="%s_%s_add"%(module_name, class_name),
timestamp=timezone.now(), user=User.objects.filter(username="AnonymousUser")[0])
c.save()
#Add to group
try:
group = Group.objects.filter(name=group_name)[0]
except:
#Group not exist, create it
group = Group.objects.create(name=group_name)
#print 'assigning: ', group, c
assign_perm('view_collection_item', group, c)
def get_item_id(parent_path):
subitem_list = parent_path.split("/")
parent_item_uuid = gRootUuid
for i in subitem_list:
#print 'getting uuid for item: ', i, ', parent:', parent_item_uuid, 'end'
if i == "":
continue
parent_item_uuid = CollectionItem.objects.filter(uuid = parent_item_uuid, id_in_col = i)[0].obj.uuid
#print 'returning parent', parent_item_uuid
return parent_item_uuid
def register_menu(subitem_url, subitem_text, parent_path = "/", permmited_group = None):
"""
If subitem_test contains dynamic, subitem_url is not used.
Otherwise, subitem_url is the content of this menu item.
Register a menu item in the left tree in object manager, the info is stored in obj_sys.models.Collection.
:param subitem_url: menu item's URL. When the item is clicked, the URL will be loaded to the content pane
:param subitem_text: menu item's text. It is stored in to id_in_col field for Collection and if it is
"dynamic://xxxx", the parent item's children will be dynamically generated by opening
URL: xxxx. xxxx should return a collection of items as in tags.tag_list. The format is
described in tags.tag_list as well.
:param parent_path: the parent for this menu item. Root item is "/", sub menus should start with "/" as well.
:param permmited_group:
:return: N/A
"""
try:
root_uuid = get_item_id(parent_path)
url = u"view://%s"%(subitem_url)
qs = UfsObj.objects.filter(ufs_url = url)
if 0 == qs.count():
print 'creating new ufs obj'
o = UfsObj(ufs_url = url, uuid = unicode(uuid.uuid4()), timestamp=timezone.now(), user=User.objects.filter(username="AnonymousUser")[0])
o.save()
else:
#print 'use existing item'
o = qs[0]
except django.db.utils.DatabaseError:
#Database is not created yet, just return, items will be created after syncdb is executed
return
#print 'creating collection item for root: ', root_uuid
if permmited_group is None:
#If no permission requested, set anonymous user accessable.
permitted_user_or_group = User.objects.filter(pk=settings.ANONYMOUS_USER_ID)[0]
else:
try:
permitted_user_or_group = Group.objects.filter(name = permmited_group)[0]
except:
#Group not exist, create it
permitted_user_or_group = Group.objects.create(name = permmited_group)
collqs = CollectionItem.objects.filter(uuid = root_uuid, id_in_col = subitem_text)
if 0 == collqs.count():
c = CollectionItem(obj = o, uuid = root_uuid, id_in_col = subitem_text,
timestamp=timezone.now(), user=User.objects.filter(username="AnonymousUser")[0])
c.save()
else:
c = collqs[0]
#Assign group permission
assign_perm('view_collection_item', permitted_user_or_group, c)
def register_to_sys(class_inst, admin_class = None):
if admin_class is None:
admin_class = type(class_inst.__name__+"Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i) | [
"[email protected]"
] | |
43589f610d031b46faaa40ca7ed51622d5c8345d | 8c9c27cb88a2d210a5e2fb5803fe89204dba95ef | /phy/gui/qt.py | fee9e2549519dba466c131d6a190d3459af0496c | [] | no_license | arnefmeyer/phy | c13b1eceb70ee72cf0ff9c4a273e195f122fabc4 | 14663e1f2baad421d6bc9f420d34170c6c969bbe | refs/heads/master | 2020-12-07T15:42:49.605432 | 2016-04-20T21:10:38 | 2016-04-20T21:10:38 | 56,718,986 | 1 | 0 | null | 2016-04-20T20:32:18 | 2016-04-20T20:32:18 | null | UTF-8 | Python | false | false | 4,107 | py | # -*- coding: utf-8 -*-
"""Qt utilities."""
# -----------------------------------------------------------------------------
# Imports
# -----------------------------------------------------------------------------
from contextlib import contextmanager
from functools import wraps
import logging
import sys
logger = logging.getLogger(__name__)
# -----------------------------------------------------------------------------
# PyQt import
# -----------------------------------------------------------------------------
from PyQt4.QtCore import (Qt, QByteArray, QMetaObject, QObject, # noqa
QVariant, QEventLoop, QTimer,
pyqtSignal, pyqtSlot, QSize, QUrl)
try:
from PyQt4.QtCore import QPyNullVariant # noqa
except: # pragma: no cover
QPyNullVariant = None
try:
from PyQt4.QtCore import QString # noqa
except: # pragma: no cover
QString = None
from PyQt4.QtGui import (QKeySequence, QAction, QStatusBar, # noqa
QMainWindow, QDockWidget, QWidget,
QMessageBox, QApplication, QMenuBar,
QInputDialog,
)
from PyQt4.QtWebKit import QWebView, QWebPage, QWebSettings # noqa
# -----------------------------------------------------------------------------
# Utility functions
# -----------------------------------------------------------------------------
def _button_enum_from_name(name):
return getattr(QMessageBox, name.capitalize())
def _button_name_from_enum(enum):
names = dir(QMessageBox)
for name in names:
if getattr(QMessageBox, name) == enum:
return name.lower()
def _prompt(message, buttons=('yes', 'no'), title='Question'):
buttons = [(button, _button_enum_from_name(button)) for button in buttons]
arg_buttons = 0
for (_, button) in buttons:
arg_buttons |= button
box = QMessageBox()
box.setWindowTitle(title)
box.setText(message)
box.setStandardButtons(arg_buttons)
box.setDefaultButton(buttons[0][1])
return box
def _show_box(box): # pragma: no cover
return _button_name_from_enum(box.exec_())
def _input_dialog(title, sentence):
return QInputDialog.getText(None, title, sentence)
@contextmanager
def _wait_signal(signal, timeout=None):
"""Block loop until signal emitted, or timeout (ms) elapses."""
# http://jdreaver.com/posts/2014-07-03-waiting-for-signals-pyside-pyqt.html
loop = QEventLoop()
signal.connect(loop.quit)
yield
if timeout is not None:
QTimer.singleShot(timeout, loop.quit)
loop.exec_()
# -----------------------------------------------------------------------------
# Qt app
# -----------------------------------------------------------------------------
def require_qt(func):
"""Specify that a function requires a Qt application.
Use this decorator to specify that a function needs a running
Qt application before it can run. An error is raised if that is not
the case.
"""
@wraps(func)
def wrapped(*args, **kwargs):
if not QApplication.instance(): # pragma: no cover
raise RuntimeError("A Qt application must be created.")
return func(*args, **kwargs)
return wrapped
# Global variable with the current Qt application.
QT_APP = None
def create_app():
"""Create a Qt application."""
global QT_APP
QT_APP = QApplication.instance()
if QT_APP is None: # pragma: no cover
QT_APP = QApplication(sys.argv)
return QT_APP
@require_qt
def run_app(): # pragma: no cover
"""Run the Qt application."""
global QT_APP
return QT_APP.exit(QT_APP.exec_())
# -----------------------------------------------------------------------------
# Testing utilities
# -----------------------------------------------------------------------------
def _debug_trace(): # pragma: no cover
"""Set a tracepoint in the Python debugger that works with Qt."""
from PyQt4.QtCore import pyqtRemoveInputHook
from pdb import set_trace
pyqtRemoveInputHook()
set_trace()
| [
"[email protected]"
] | |
7c6e0f6234acef62dcc182e1e93468181f99ce5c | 187a6558f3c7cb6234164677a2bda2e73c26eaaf | /jdcloud_sdk/services/apigateway/apis/DescribeIsDeployApiGroupsRequest.py | 5734f9aeb75d8ebb2d719eb77989012b428bf204 | [
"Apache-2.0"
] | permissive | jdcloud-api/jdcloud-sdk-python | 4d2db584acc2620b7a866af82d21658cdd7cc227 | 3d1c50ed9117304d3b77a21babe899f939ae91cd | refs/heads/master | 2023-09-04T02:51:08.335168 | 2023-08-30T12:00:25 | 2023-08-30T12:00:25 | 126,276,169 | 18 | 36 | Apache-2.0 | 2023-09-07T06:54:49 | 2018-03-22T03:47:02 | Python | UTF-8 | Python | false | false | 1,460 | py | # coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
from jdcloud_sdk.core.jdcloudrequest import JDCloudRequest
class DescribeIsDeployApiGroupsRequest(JDCloudRequest):
"""
查询分组
"""
def __init__(self, parameters, header=None, version="v1"):
super(DescribeIsDeployApiGroupsRequest, self).__init__(
'/regions/{regionId}/apiGroups:isDeploy', 'GET', header, version)
self.parameters = parameters
class DescribeIsDeployApiGroupsParameters(object):
def __init__(self, regionId, ):
"""
:param regionId: 地域ID
"""
self.regionId = regionId
self.filters = None
def setFilters(self, filters):
"""
:param filters: (Optional) deployStatus - 发布状态,已发布:1,未发布:0
"""
self.filters = filters
| [
"[email protected]"
] | |
8b7641767d7456a30a42aaefeb9cee8c4c607de4 | 51888119e10cdff12dafb060a54824632edccf3f | /Folders/Python/BlackSailSubmit.py | fb3aabf18c996ca4a44e2ffda207c3e9e2ed6b01 | [
"BSD-2-Clause"
] | permissive | kuchinal/lamakaha | b64511ad8c6d2b36da5a84a266b9e7a69acd3106 | 24e3b2ff53bcac2ad1c0e5a3b9afd4593d85f22d | refs/heads/master | 2023-09-01T17:55:56.551183 | 2023-07-31T19:32:04 | 2023-07-31T19:32:04 | 182,849,747 | 0 | 0 | null | 2021-09-10T06:34:22 | 2019-04-22T19:00:02 | Python | UTF-8 | Python | false | false | 519 | py |
import nuke
import rrSubmit_Nuke_5
def BlackSailSubmit():
try :
g = nuke.selectedNode()
f = nuke.allNodes("Write")
f= nuke.allNodes("AutoWrite")+f
for a in f:
sel = a['selected'].value()
if sel == 1:
a['disable'].setValue(0)
else:
a['disable'].setValue(1)
print "selected"
rrSubmit_Nuke_5.rrSubmit_Nuke_5()
except:
rrSubmit_Nuke_5.rrSubmit_Nuke_5()
print "all" | [
"[email protected]"
] | |
6c8a442acb14c856d7f761064e44561c82b10f6c | 212028581b4875ac2fefa9acd7b17b88b4b8eccd | /ulugugu/values.py | e5fbdd042dbd0dbd5cebbe5d437a2fa3c34403d1 | [] | no_license | jonashaag/ulugugu | 65a3b55c2fa2d624ba7cc72cc5186eb353e7b016 | 509e3ceadbb50aad34c585b63d33284357a21ed6 | refs/heads/master | 2016-08-06T07:10:53.578924 | 2015-07-25T12:54:15 | 2015-07-25T12:54:15 | 37,680,621 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 533 | py | class Value:
pass
class Integer(Value):
def __init__(self, value):
self.value = value
def __repr__(self):
return '<Integer %d>' % self.value
class String(Value):
def __init__(self, value):
self.value = value
def __repr__(self):
return '<String %r>' % self.value
class Application(Value):
def __init__(self, operation, op1, op2):
self.operation = operation
self.op1 = op1
self.op2 = op2
def __repr__(self):
return '<Application %s(%s, %s)>' % (self.operation, self.op1, self.op2)
| [
"[email protected]"
] | |
924c4786c418c5f6ba94014768067df9fd08892e | 62009d7843d120beddd696258686c8b479713384 | /tribune/urls.py | 960254574acbddbcb52308d302a510e032a17349 | [] | no_license | Janice-M/-The-Moringa-Tribune | a81d5e94b325254c78a6c0eea3c48e031935bdba | c960fa8dac1444f6fd4ea1a823f63433ae1645e4 | refs/heads/master | 2021-06-16T20:06:18.140544 | 2019-08-20T09:54:41 | 2019-08-20T09:54:41 | 203,201,486 | 1 | 1 | null | 2021-06-10T21:53:04 | 2019-08-19T15:38:57 | Python | UTF-8 | Python | false | false | 764 | py | """tribune URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
| [
"[email protected]"
] | |
42fdb67ff09320fd1ae7ed48b1d7252395a78674 | 93db4b48741ff4ab0a3895813a6c7543e01821ea | /leetcode/Python/875_bananas.py | ce09bb5605a8310b0d9dfe00aaffbe27d27ed1c7 | [] | no_license | shubham14/Coding_Contest_solutions | f884c458d3316bdafc6f1b1a52cf3e962c58bc47 | 1b67497f35b892c25e3d9600214fa37a738ffd40 | refs/heads/master | 2021-06-22T13:34:10.581101 | 2019-10-09T02:56:01 | 2019-10-09T02:56:01 | 131,326,516 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 440 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Jul 24 00:09:31 2018
@author: Shubham
"""
class Solution(object):
def minEatingSpeed(self, piles, H):
def possible(K):
return sum((p - 1)/ K + 1 for p in piles) <= H
s, e = 1, max(piles)
while s < e:
m = (s + e)/2
if not possible(m):
s = m + 1
else:
e = m
return s
| [
"[email protected]"
] | |
3cce302430399686b13d6cc49040ace97eb052a2 | 632eee486e432d1bc2a7c771db7e9a06f7cad7a9 | /2812-laercio.py | 3d976d923a342bfca9597d558079aab8c6f0269b | [] | no_license | L4res/URI-Python | d1c578d87201151540876a6b8eca2aecd833a953 | 2f59387ca38e16f6396a6ea677d71f7c2c919fc2 | refs/heads/master | 2023-03-25T17:34:57.635187 | 2020-06-15T18:20:03 | 2020-06-15T18:20:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 864 | py | for i in range(int(input())):
n = int(input())
lista = list(map(int, input().split()))
lista.sort()
impares = []
resultado = []
# Pegando os valores impares:
for j in range(n):
if lista[j] % 2 != 0:
impares.append(lista[j])
# Pegando os valores na ordem maior e menor, sucessivamente:
while len(impares) != 0:
try:
resultado.append(impares[len(impares)-1])
impares.pop()
resultado.append(impares[0])
impares.pop(0)
except IndexError:
break
# Printando o resultado:
if len(resultado) == 0:
print()
else:
for k in range(len(resultado)):
if k != len(resultado)-1:
print(resultado[k], end=" ")
else:
print(resultado[k]) | [
"[email protected]"
] | |
bccecc6995a1fcb323de9ae3254a02a54008d43a | e1a2c6ed4a4b93b4697974e3b0a32a4d67daa6f6 | /venv/Lib/site-packages/pybrain3/rl/environments/ode/tasks/ccrl.py | 74047621ad42288a4b7988d703bcde82599a35e9 | [
"MIT"
] | permissive | ishatserka/MachineLearningAndDataAnalysisCoursera | cdf0f23a58617e17d6b938e3a9df17daae8585e4 | e82e772df2f4aec162cb34ac6127df10d14a625a | refs/heads/master | 2021-09-11T01:39:26.228392 | 2018-04-05T14:33:39 | 2018-04-05T14:33:39 | 117,153,454 | 0 | 0 | MIT | 2018-03-27T05:20:37 | 2018-01-11T21:05:33 | Python | UTF-8 | Python | false | false | 15,329 | py | __author__ = 'Frank Sehnke, [email protected]'
from pybrain3.rl.environments import EpisodicTask
from pybrain3.rl.environments.ode.sensors import SpecificBodyPositionSensor
from scipy import tanh, zeros, array, random, sqrt, asarray
#Basic class for all ccrl tasks
class CCRLTask(EpisodicTask):
def __init__(self, env):
EpisodicTask.__init__(self, env)
#Overall maximal tourque - is multiplied with relative max tourque for individual joint.
self.maxPower = 100.0
self.reward_history = []
self.count = 0 #timestep counter
self.epiLen = 1500 #suggestet episodic length for normal Johnnie tasks
self.incLearn = 0 #counts the task resets for incrementall learning
self.env.FricMu = 20.0 #We need higher friction for CCRL
self.env.dt = 0.002 #We also need more timly resolution
# normalize standard sensors to (-1, 1)
self.sensor_limits = []
#Angle sensors
for i in range(self.env.actLen):
self.sensor_limits.append((self.env.cLowList[i], self.env.cHighList[i]))
# Joint velocity sensors
for i in range(self.env.actLen):
self.sensor_limits.append((-20, 20))
#Norm all actor dimensions to (-1, 1)
self.actor_limits = [(-1, 1)] * env.actLen
self.oldAction = zeros(env.actLen, float)
self.dist = zeros(9, float)
self.dif = array([0.0, 0.0, 0.0])
self.target = array([-6.5, 1.75, -10.5])
self.grepRew = 0.0
self.tableFlag = 0.0
self.env.addSensor(SpecificBodyPositionSensor(['objectP00'], "glasPos"))
self.env.addSensor(SpecificBodyPositionSensor(['palmLeft'], "palmPos"))
self.env.addSensor(SpecificBodyPositionSensor(['fingerLeft1'], "finger1Pos"))
self.env.addSensor(SpecificBodyPositionSensor(['fingerLeft2'], "finger2Pos"))
#we changed sensors so we need to update environments sensorLength variable
self.env.obsLen = len(self.env.getSensors())
#normalization for the task spezific sensors
for i in range(self.env.obsLen - 2 * self.env.actLen):
self.sensor_limits.append((-4, 4))
def getObservation(self):
""" a filtered mapping to getSample of the underlying environment. """
sensors = self.env.getSensors()
#Sensor hand to target object
for i in range(3):
self.dist[i] = ((sensors[self.env.obsLen - 9 + i] + sensors[self.env.obsLen - 6 + i] + sensors[self.env.obsLen - 3 + i]) / 3.0 - (sensors[self.env.obsLen - 12 + i] + self.dif[i])) * 4.0 #sensors[self.env.obsLen-12+i]
#Sensor hand angle to horizontal plane X-Axis
for i in range(3):
self.dist[i + 3] = (sensors[self.env.obsLen - 3 + i] - sensors[self.env.obsLen - 6 + i]) * 5.0
#Sensor hand angle to horizontal plane Y-Axis
for i in range(3):
self.dist[i + 6] = ((sensors[self.env.obsLen - 3 + i] + sensors[self.env.obsLen - 6 + i]) / 2.0 - sensors[self.env.obsLen - 9 + i]) * 10.0
if self.sensor_limits:
sensors = self.normalize(sensors)
sens = []
for i in range(self.env.obsLen - 12):
sens.append(sensors[i])
for i in range(9):
sens.append(self.dist[i])
for i in self.oldAction:
sens.append(i)
return sens
def performAction(self, action):
#Filtered mapping towards performAction of the underlying environment
#The standard CCRL task uses a PID controller to controll directly angles instead of forces
#This makes most tasks much simpler to learn
self.oldAction = action
#Grasping as reflex depending on the distance to target - comment in for more easy grasping
#if abs(self.dist[2])<2.0: action[15]=(1.0+2.0*action[15])*.3333 #self.grepRew=action[15]*.01
#else: action[15]=(-1.0+2.0*action[15])*.3333 #self.grepRew=action[15]*-.03
isJoints=array(self.env.getSensorByName('JointSensor')) #The joint angles
isSpeeds=array(self.env.getSensorByName('JointVelocitySensor')) #The joint angular velocitys
act=(action+1.0)/2.0*(self.env.cHighList-self.env.cLowList)+self.env.cLowList #norm output to action intervall
action=tanh((act-isJoints-0.9*isSpeeds*self.env.tourqueList)*16.0)*self.maxPower*self.env.tourqueList #simple PID
EpisodicTask.performAction(self, action)
#self.env.performAction(action)
def isFinished(self):
#returns true if episode timesteps has reached episode length and resets the task
if self.count > self.epiLen:
self.res()
return True
else:
self.count += 1
return False
def res(self):
#sets counter and history back, increases incremental counter
self.count = 0
self.incLearn += 1
self.reward_history.append(self.getTotalReward())
self.tableFlag = 0.0
def getReward(self):
#rewarded for approaching the object
dis = sqrt((self.dist[0:3] ** 2).sum())
return (25.0 - dis) / float(self.epiLen) - float(self.env.tableSum) * 0.1
#Learn to grasp a glas at a fixed location
class CCRLGlasTask(CCRLTask):
def __init__(self, env):
CCRLTask.__init__(self, env)
self.dif = array([0.0, 0.0, 0.0])
self.epiLen = 1000 #suggestet episodic length for normal Johnnie tasks
def isFinished(self):
#returns true if episode timesteps has reached episode length and resets the task
if self.count > self.epiLen:
self.res()
return True
else:
if self.count == 1: self.pertGlasPos(0)
self.count += 1
return False
def pertGlasPos(self, num):
if num == 0: self.env.pert = asarray([0.0, 0.0, 0.5])
def getReward(self):
if self.env.glasSum >= 2: grip = 1.0 + float(self.env.glasSum - 2)
else: grip = 0.0
if self.env.tableSum > 0: self.tableFlag = 10.0
self.dist[3] = 0.0
self.dist[8] = 0.0
dis = sqrt((self.dist ** 2).sum())
nig = (abs(self.dist[4]) + 1.0)
if self.env.stepCounter == self.epiLen:
return 25.0 + grip / nig - dis - self.tableFlag #-dis
else:
return (25.0 - dis) / float(self.epiLen) + (grip / nig - float(self.env.tableSum)) * 0.1 #+self.grepRew (10.0-dis)/float(self.epiLen)+
#Learn to grasp a plate at a fixed location
class CCRLPlateTask(CCRLTask):
def __init__(self, env):
CCRLTask.__init__(self, env)
self.dif = array([0.0, 0.2, 0.8])
self.epiLen = 1000 #suggestet episodic length for normal Johnnie tasks
def isFinished(self):
#returns true if episode timesteps has reached episode length and resets the task
if self.count > self.epiLen:
self.res()
return True
else:
if self.count == 1: self.pertGlasPos(0)
self.count += 1
return False
def pertGlasPos(self, num):
if num == 0: self.env.pert = asarray([0.0, 0.0, 0.5])
def getReward(self):
if self.env.glasSum >= 2: grip = 1.0
else: grip = 0.0
if self.env.tableSum > 0: self.tableFlag = 10.0
#self.dist[4]=0.0
#self.dist[8]=0.0
dis = sqrt((self.dist[0:3] ** 2).sum())
if self.count == self.epiLen:
return 25.0 + grip - dis - self.tableFlag #/nig
else:
return (25.0 - dis) / float(self.epiLen) + (grip - float(self.env.tableSum)) * 0.1 #/nig -(1.0+self.oldAction[15])
#Learn to grasp a glas at 5 different locations
class CCRLGlasVarTask(CCRLGlasTask):
def __init__(self, env):
CCRLGlasTask.__init__(self, env)
self.epiLen = 5000 #suggestet episodic length for normal Johnnie tasks
def isFinished(self):
#returns true if episode timesteps has reached episode length and resets the task
if self.count > self.epiLen:
self.res()
return True
else:
if self.count == 1:
self.pertGlasPos(0)
if self.count == self.epiLen / 5 + 1:
self.env.reset()
self.pertGlasPos(1)
if self.count == 2 * self.epiLen / 5 + 1:
self.env.reset()
self.pertGlasPos(2)
if self.count == 3 * self.epiLen / 5 + 1:
self.env.reset()
self.pertGlasPos(3)
if self.count == 4 * self.epiLen / 5 + 1:
self.env.reset()
self.pertGlasPos(4)
self.count += 1
return False
def pertGlasPos(self, num):
if num == 0: self.env.pert = asarray([1.0, 0.0, 0.5])
if num == 1: self.env.pert = asarray([-1.0, 0.0, 0.5])
if num == 2: self.env.pert = asarray([1.0, 0.0, 0.0])
if num == 3: self.env.pert = asarray([-1.0, 0.0, 0.0])
if num == 4: self.env.pert = asarray([0.0, 0.0, 0.25])
def getReward(self):
if self.env.glasSum >= 2: grip = 1.0
else: grip = 0.0
if self.env.tableSum > 0: self.tableFlag = 10.0
self.dist[3] = 0.0
self.dist[8] = 0.0
dis = sqrt((self.dist ** 2).sum())
nig = (abs(self.dist[4]) + 1.0)
if self.count == self.epiLen or self.count == self.epiLen / 5 or self.count == 2 * self.epiLen / 5 or self.count == 3 * self.epiLen / 5 or self.count == 4 * self.epiLen / 5:
return 25.0 + grip / nig - dis - self.tableFlag #/nig
else:
return (25.0 - dis) / float(self.epiLen) + (grip / nig - float(self.env.tableSum)) * 0.1 #/nig
#Learn to grasp a glas at random locations
class CCRLGlasVarRandTask(CCRLGlasVarTask):
def pertGlasPos(self, num):
self.env.pert = asarray([random.random()*2.0 - 1.0, 0.0, random.random()*0.5 + 0.5])
#Some experimental stuff
class CCRLPointTask(CCRLGlasVarTask):
def __init__(self, env):
CCRLGlasVarTask.__init__(self, env)
self.epiLen = 1000 #suggestet episodic length for normal Johnnie tasks
def isFinished(self):
#returns true if episode timesteps has reached episode length and resets the task
if self.count > self.epiLen:
self.res()
return True
else:
if self.count == 1:
self.pertGlasPos(0)
self.count += 1
return False
def getObservation(self):
""" a filtered mapping to getSample of the underlying environment. """
sensors = self.env.getSensors()
sensSort = []
#Angle and angleVelocity
for i in range(32):
sensSort.append(sensors[i])
#Angles wanted (old action)
for i in self.oldAction:
sensSort.append(i)
#Hand position
for i in range(3):
sensSort.append((sensors[38 + i] + sensors[41 + i]) / 2)
#Hand orientation (Hack - make correkt!!!!)
sensSort.append((sensors[38] - sensors[41]) / 2 - sensors[35]) #pitch
sensSort.append((sensors[38 + 1] - sensors[41 + 1]) / 2 - sensors[35 + 1]) #yaw
sensSort.append((sensors[38 + 1] - sensors[41 + 1])) #roll
#Target position
for i in range(3):
sensSort.append(self.target[i])
#Target orientation
for i in range(3):
sensSort.append(0.0)
#Object type (start with random)
sensSort.append(float(random.randint(-1, 1))) #roll
#normalisation
if self.sensor_limits:
sensors = self.normalize(sensors)
sens = []
for i in range(32):
sens.append(sensors[i])
for i in range(29):
sens.append(sensSort[i + 32])
#calc dist to target
self.dist = array([(sens[54] - sens[48]), (sens[55] - sens[49]), (sens[56] - sens[50]), sens[51], sens[52], sens[53], sens[15]])
return sens
def pertGlasPos(self, num):
if num == 0: self.target = asarray([0.0, 0.0, 1.0])
self.env.pert = self.target.copy()
self.target = self.target.copy() + array([-6.5, 1.75, -10.5])
def getReward(self):
dis = sqrt((self.dist ** 2).sum())
return (25.0 - dis) / float(self.epiLen) - float(self.env.tableSum) * 0.1
class CCRLPointVarTask(CCRLPointTask):
def __init__(self, env):
CCRLPointTask.__init__(self, env)
self.epiLen = 2000 #suggestet episodic length for normal Johnnie tasks
def isFinished(self):
#returns true if episode timesteps has reached episode length and resets the task
if self.count > self.epiLen:
self.res()
return True
else:
if self.count == 1:
self.pertGlasPos(0)
if self.count == self.epiLen / 2 + 1:
self.env.reset()
self.pertGlasPos(1)
self.count += 1
return False
def getObservation(self):
""" a filtered mapping to getSample of the underlying environment. """
sensors = self.env.getSensors()
sensSort = []
#Angle and angleVelocity
for i in range(32):
sensSort.append(sensors[i])
#Angles wanted (old action)
for i in self.oldAction:
sensSort.append(i)
#Hand position
for i in range(3):
sensSort.append((sensors[38 + i] + sensors[41 + i]) / 2)
#Hand orientation (Hack - make correkt!!!!)
sensSort.append((sensors[38] - sensors[41]) / 2 - sensors[35]) #pitch
sensSort.append((sensors[38 + 1] - sensors[41 + 1]) / 2 - sensors[35 + 1]) #yaw
sensSort.append((sensors[38 + 1] - sensors[41 + 1])) #roll
#Target position
for i in range(3):
sensSort.append(self.target[i])
#Target orientation
for i in range(3):
sensSort.append(0.0)
#Object type (start with random)
sensSort.append(float(random.randint(-1, 1))) #roll
#normalisation
if self.sensor_limits:
sensors = self.normalize(sensors)
sens = []
for i in range(32):
sens.append(sensors[i])
for i in range(29):
sens.append(sensSort[i + 32])
#calc dist to target
self.dist = array([(sens[54] - sens[48]) * 10.0, (sens[55] - sens[49]) * 10.0, (sens[56] - sens[50]) * 10.0, sens[51], sens[52], sens[53], 1.0 + sens[15]])
return sens
def pertGlasPos(self, num):
if num == 0: self.target = asarray([1.0, 0.0, 1.0])
if num == 1: self.target = asarray([-1.0, 0.0, 1.0])
if num == 2: self.target = asarray([1.0, 0.0, 0.0])
if num == 3: self.target = asarray([-1.0, 0.0, 0.0])
if num == 4: self.target = asarray([0.0, 0.0, 0.5])
self.env.pert = self.target.copy()
self.target = self.target.copy() + array([-6.5, 1.75, -10.5])
def getReward(self):
dis = sqrt((self.dist ** 2).sum())
subEpi = self.epiLen / 2
if self.count == self.epiLen or self.count == subEpi:
return (25.0 - dis) / 2.0
else:
return (25.0 - dis) / float(self.epiLen) - float(self.env.tableSum) * 0.1
| [
"[email protected]"
] | |
aa611c1670449e133c290241f9c2bbbc38d1505a | ed21823488a1cca51009793efa0b124e40d224a4 | /neurobioseg/170111_avoid_redundant_path_calculation/p170111_03_compute_paths.py | b61c0656c06859d665bca2fe34b87a67a2cc9716 | [] | no_license | jhennies/py_devel | 4a41e13ec8cd9b834c3d5acf64becc0fa8ffc479 | 9fc860be95ae91064a40f25e26d4024fbae6eb1f | refs/heads/master | 2021-01-16T23:25:56.716283 | 2017-03-10T17:49:55 | 2017-03-10T17:49:55 | 45,381,183 | 1 | 0 | null | 2017-03-10T17:49:56 | 2015-11-02T08:21:35 | Python | UTF-8 | Python | false | false | 4,945 | py |
import os
import inspect
from hdf5_image_processing import Hdf5ImageProcessing as IP, Hdf5ImageProcessingLib as ipl
from hdf5_processing import RecursiveDict as rdict
from shutil import copy, copyfile
import numpy as np
import matplotlib.pyplot as plt
import processing_libip as libip
import sys
from yaml_parameters import YamlParams
__author__ = 'jhennies'
def load_images(filepath, skeys=None, recursive_search=False, logger=None):
if logger is not None:
logger.logging('Loading data from \n{}', filepath)
logger.logging('With skeys = {}', skeys)
else:
print 'Loading data from \n{}'.format(filepath)
data = ipl()
data.data_from_file(
filepath=filepath,
skeys=skeys,
recursive_search=recursive_search,
nodata=True
)
return data
def simplify_statistics(statistics, iterations=3):
newstats = statistics.dcp()
for i in xrange(0, iterations):
for d, k, v, kl in statistics.data_iterator(yield_short_kl=True):
if v == 0 or not v:
newstats[kl].pop(k)
statistics = newstats.dcp()
return newstats
def compute_paths(yparams):
all_params = yparams.get_params()
# Zero'th layer:
# --------------
zeroth = rdict(all_params['compute_paths'])
if 'default' in zeroth:
zeroth_defaults = zeroth.pop('default')
else:
zeroth_defaults = ipl()
for exp_lbl, experiment in zeroth.iteritems():
# First layer
# -----------
# An experiment is now selected and performed
yparams.logging('Performing experiment {}\n==============================\n', exp_lbl)
first = zeroth_defaults.dcp()
first.merge(experiment)
if 'default' in first:
first_defaults = first.pop('default')
else:
first_defaults = ipl()
statistics = rdict()
for exp_class_lbl in ['truepaths', 'falsepaths']:
# Final layer
# -----------
# The true or false paths for the current experiment are here computed, respectively
yparams.logging('Computing {}...\n------------------------------\n', exp_class_lbl)
final = first_defaults.dcp()
final.merge(first[exp_class_lbl])
exp_sources = final['sources']
exp_params = final['params']
exp_target = final['target']
# Load the necessary images
data=ipl()
for datakey, content in exp_sources.iteritems():
data[datakey] = load_images(
all_params[content[0]] + all_params[content[1]],
skeys=content[2]['skeys'],
recursive_search=False,
logger=yparams
)
yparams.logging('\nInitial datastructure: \n\n{}', data.datastructure2string(maxdepth=4))
yparams.logging('experiment_params: \n{}', exp_params)
# Compute the paths
# -----------------
paths = ipl()
for_class = False
if exp_class_lbl == 'truepaths':
for_class = True
paths[exp_lbl][exp_class_lbl], statistics[exp_lbl][exp_class_lbl] = libip.compute_paths_for_class(
data, 'segm', 'conts', 'dt', 'gt',
exp_params, for_class=for_class, ignore=[], debug=all_params['debug'],
logger=yparams
)
yparams.logging(
'\nPaths datastructure after running {}: \n\n{}',
exp_class_lbl,
paths.datastructure2string()
)
def val(x):
return x
yparams.logging(
'\nStatistics after {}: \n\n{}', exp_class_lbl,
simplify_statistics(statistics[exp_lbl]).datastructure2string(function=val)
)
# Save the result to disk
# -----------------------
targetfile = all_params[exp_target[0]] + all_params[exp_target[1]]
paths.write(filepath=targetfile)
def val(x):
return x
yparams.logging(
'\nStatistics after full experiment: \n\n{}',
simplify_statistics(statistics[exp_lbl]).datastructure2string(function=val)
)
def run_compute_paths(yamlfile, logging=True):
yparams = YamlParams(filename=yamlfile)
params = yparams.get_params()
# Logger stuff
yparams.set_indent(1)
yparams.startlogger(
filename=params['resultfolder'] + 'compute_paths.log',
type='w', name='ComputePaths'
)
try:
compute_paths(yparams)
yparams.logging('')
yparams.stoplogger()
except:
yparams.errout('Unexpected error')
if __name__ == '__main__':
yamlfile = os.path.dirname(os.path.abspath(__file__)) + '/parameters_ref.yml'
run_compute_paths(yamlfile, logging=False) | [
"[email protected]"
] | |
46e7155c122fe2b89291a70967d3ced59f4c38ce | cf1f1d3f7a4aaaaaee322b0101f7b294909c5a67 | /Code/Al/loop_index.py | 0cbbf5bab4fdd9fe13eacbec91a04ee7426ff5b3 | [] | no_license | PdxCodeGuild/class_emu | 0b52cc205d01af11860a975fc55e36c065d1cc68 | 9938f384d67a4f57e25f2714efa6b63e2e41b892 | refs/heads/master | 2020-05-31T01:16:52.911660 | 2019-12-09T05:22:06 | 2019-12-09T05:22:06 | 190,046,342 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 185 | py | # Write your code here :-)
import string
print(string.ascii_lowercase)
abc_list = list(string.ascii_lowercase)
print(abc_list)
for num in range(len(abc_list)):
print(abc_list[num])
| [
"[email protected]"
] | |
501efc03e712d21a0a76e29634ed02d611170f9e | 8fbd8b98cdf04d319f7b5789d6dc1a738a90566b | /th_mastodon/tests.py | 9658443ac0d0bd316e58cf64cc671700da484071 | [
"BSD-3-Clause"
] | permissive | fkztw/django-th | 5231652ed75ae6060bd4f4a383eba4286e8c9191 | 926a3b9d515a7995cb36d2259729851d0c5cfb4d | refs/heads/master | 2023-07-23T22:08:11.898683 | 2017-10-27T12:38:21 | 2017-10-27T12:38:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,978 | py | # coding: utf-8
from django.conf import settings
from django.core.cache import caches
from django_th.tests.test_main import MainTest
from django_th.models import ServicesActivated
from mastodon import Mastodon as MastodonAPI
from th_mastodon.forms import MastodonProviderForm, MastodonConsumerForm
from th_mastodon.models import Mastodon
from th_mastodon.my_mastodon import ServiceMastodon
from unittest.mock import patch
cache = caches['django_th']
class MastodonTest(MainTest):
"""
MastodonTest Model
"""
def test_get_services_list(self):
th_service = ('th_mastodon.my_mastodon.ServiceMastodon',)
for service in th_service:
self.assertIn(service, settings.TH_SERVICES)
def create_masto(self, tooter='[email protected]', timeline='home',
tag='mastodon', fav=False, since_id=1, max_id=0):
trigger = self.create_triggerservice(consumer_name='ServiceMastodon')
ServicesActivated.objects.get(name='ServiceMastodon')
resu = Mastodon.objects.create(tooter=tooter, timeline=timeline,
tag=tag, fav=fav, since_id=since_id,
max_id=max_id,
trigger=trigger, status=True)
return resu
def test_mastodon(self):
m = self.create_masto()
self.assertTrue(isinstance(m, Mastodon))
self.assertEqual(m.show(), "My Mastodon %s %s" %
(m.timeline, m.trigger))
self.assertEqual(m.__str__(), "{}".format(m.timeline))
"""
Form
"""
# provider
def test_valid_provider_form(self):
m = self.create_masto()
data = {'tooter': m.tooter,
'timeline': m.timeline,
'tag': m.tag,
'fav': m.fav}
form = MastodonProviderForm(data=data)
self.assertTrue(form.is_valid())
def test_invalid_provider_form(self):
form = MastodonProviderForm(data={'tooter': '',
'timeline': '',
'tag': '', 'fav': ''})
self.assertFalse(form.is_valid())
# consumer
def test_valid_consumer_form(self):
m = self.create_masto()
data = {'tooter': m.tooter,
'timeline': m.timeline,
'tag': m.tag,
'fav': m.fav}
form = MastodonConsumerForm(data=data)
self.assertTrue(form.is_valid())
def test_invalid_consumer_form(self):
# when a field is empty the clean() function set it as None
form = MastodonConsumerForm(data={'tooter': '',
'timeline': '',
'tag': '', 'fav': False})
self.assertFalse(form.is_valid())
class ServiceMastodonTest(MastodonTest):
"""
ServiceTwitterTest
"""
def setUp(self):
super(ServiceMastodonTest, self).setUp()
self.data = {'text': 'something #thatworks'}
self.token = 'AZERTY1234'
self.trigger_id = 1
self.service = ServiceMastodon(self.token)
"""
def test_read_data_tooter(self):
search = {'id': 1}
t = self.create_masto(since_id=0, tag='')
kwargs = dict({'date_triggered': '2013-05-11 13:23:58+00:00',
'model_name': 'Mastodon',
'trigger_id': t.trigger_id,
'user': 'foxmask'})
user_id = []
user_id[0]['id'] = 1
with patch.object(MastodonAPI, 'account_statuses') as mock1:
se = ServiceMastodon(self.token)
with patch.object(MastodonAPI, 'account_search') as mock2:
se.read_data(**kwargs)
mock2.assert_called_with(q='[email protected]')
mock2.return_value = user_id[0]['id']
mock1.assert_called_once_with(**search)
"""
@patch.object(MastodonAPI, 'favourites')
def test_read_data_fav(self, mock1):
search = {'max_id': 0, 'since_id': 1}
t = self.create_masto(tag='', fav=True)
kwargs = dict({'date_triggered': '2013-05-11 13:23:58+00:00',
'model_name': 'Mastodon',
'trigger_id': t.trigger_id,
'user': 'foxmask'})
se = ServiceMastodon(self.token)
se.read_data(**kwargs)
mock1.assert_called_with(**search)
@patch.object(MastodonAPI, 'search')
def test_read_data_tag(self, mock1):
search = {'q': 'mastodon', 'since_id': 1}
t = self.create_masto()
kwargs = dict({'date_triggered': '2013-05-11 13:23:58+00:00',
'model_name': 'Mastodon',
'trigger_id': t.trigger_id,
'user': 'foxmask'})
se = ServiceMastodon(self.token)
se.read_data(**kwargs)
mock1.assert_called_with(**search)
@patch.object(MastodonAPI, 'status_post')
def test_save_data_toot(self, mock1):
self.create_masto()
token = self.token
trigger_id = self.trigger_id
kwargs = {'user': 1}
self.data['title'] = 'Toot from'
self.data['link'] = 'http://domain.ltd'
content = str("{title} {link}").format(
title=self.data.get('title'),
link=self.data.get('link'))
content += ' #mastodon'
self.data['content'] = content
self.assertTrue(token)
self.assertTrue(isinstance(trigger_id, int))
se = ServiceMastodon(self.token, **kwargs)
se.save_data(trigger_id, **self.data)
mock1.assert_called_with(content, media_ids=None)
"""
@patch.object(MastodonAPI, 'status_post')
@patch.object(MastodonAPI, 'media_post')
@patch.object(ServiceMastodon, 'media_in_content')
def test_save_data_toot_media(self, mock1, mock2, mock3):
self.create_masto()
token = self.token
trigger_id = self.trigger_id
kwargs = {'user': 1}
self.data['title'] = 'Tweet from xxxx'
self.data['link'] = 'http://domain.ltd'
content = ' https://pbs.twimg.com/media/foobar.jpg '
local_file = os.path.dirname(__file__) + '/../cache/foobar.jpg'
self.data['content'] = content
content += str("{link} #mastodon").format(
link=self.data.get('link'))
self.assertTrue(token)
self.assertTrue(isinstance(trigger_id, int))
self.assertIn('text', self.data)
self.assertNotEqual(self.data['text'], '')
se = ServiceMastodon(self.token, **kwargs)
se.save_data(trigger_id, **self.data)
mock1.assert_called_with(content)
mock1.return_value = (content, local_file)
mock2.assert_called_with(content)
mock2.return_value = 1234 # fake media id
mock3.assert_called_with(content)
"""
def test_auth(self):
pass
def test_callback(self):
pass
| [
"[email protected]"
] | |
9c3e7efe4f11de9d2d352605026b21608815d9e9 | 6e4448d99733d6cabba8fc725e3f5132161e49f7 | /pre_2016_17_cool_season/prism_precip_ncar.py | 642981c52f13da509e7e2be9ea7433f5f4bee27c | [] | no_license | tomgowan/model-climatology | 00a4d75e11bb0a8599121aeb0cd6831f32b04329 | 1fab1c15535311c3ff4258bd4670ccdd81239ca2 | refs/heads/master | 2020-03-22T20:28:45.907527 | 2018-07-11T16:54:32 | 2018-07-11T16:54:32 | 140,603,732 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,501 | py | import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap, maskoceans
import pygrib, os, sys
from netCDF4 import Dataset
from numpy import *
import numpy as np
from pylab import *
import time
from datetime import date, timedelta
import pyart
from matplotlib import animation
import matplotlib.animation as animation
import types
###############################################################################
############## Read in ncar and prism precip #############################
###############################################################################
Date2= '20150930'
Date = zeros((184))
precip_ncar = zeros((621,1405))
precip_tot = zeros((621,1405))
num_days = 183
for i in range(0,183):
t=time.strptime(Date2,'%Y%m%d')
newdate=date(t.tm_year,t.tm_mon,t.tm_mday)+timedelta(i)
Date3 = newdate.strftime('%Y%m%d')
Date[i] = int(Date3)
y = 0
for i in range(0,num_days-1):
x = 0
z = 0
#### Make sure all ncar and prism files are present
for j in range(13,37):
NCARens_file = '/uufs/chpc.utah.edu/common/home/steenburgh-group5/tom/model_raw_output/ncarens/regridded.precip.ncar_3km_%08d00' % Date[i] + '_mem1_f0%02d' % j + '.grb2'
if os.path.exists(NCARens_file):
x = x + 1
try:
prism_file = '/uufs/chpc.utah.edu/common/home/steenburgh-group5/tom/climatology/prism/PRISM_ppt_stable_4kmD2_%08d' % Date[i] + '_asc.asc'
if os.path.exists(prism_file):
z = 1
except:
pass
try:
prism_file = '/uufs/chpc.utah.edu/common/home/steenburgh-group5/tom/climatology/prism/PRISM_ppt_provisional_4kmD2_%08d' % Date[i] + '_asc.asc'
if os.path.exists(prism_file):
z = 1
except:
pass
print x
if x == 24 and z == 1:
y = y + 1
for j in range(13,37):#32
############# NCAR ############################
NCARens_file = '/uufs/chpc.utah.edu/common/home/steenburgh-group5/tom/model_raw_output/ncarens/regridded.precip.ncar_3km_%08d00' % Date[i] + '_mem1_f0%02d' % j + '.grb2'
print NCARens_file
grb = grbs.select(name='Total Precipitation')[0]
lat_ncar,lon_ncar = grb.latlons()
grbs = pygrib.open(NCARens_file)
tmpmsgs = grbs.select(name='Total Precipitation')
msg = grbs[1]
precip_vals = msg.values
precip_vals = precip_vals*0.0393689*25.4
precip_ncar = precip_ncar + precip_vals
############### Prism #####################################
try:
precip = np.loadtxt("/uufs/chpc.utah.edu/common/home/steenburgh-group5/tom/climatology/prism/PRISM_ppt_stable_4kmD2_%08d" % Date[i] + "_asc.asc", skiprows = 6)
except:
print(prism_file)
try:
precip = np.loadtxt("/uufs/chpc.utah.edu/common/home/steenburgh-group5/tom/climatology/prism/PRISM_ppt_provisional_4kmD2_%08d" % Date[i] + "_asc.asc", skiprows = 6)
except:
print(prism_file)
precip_tot = precip_tot + precip
precip_tot = precip_tot/y
precip_ncar = precip_ncar/y
## Attempt to fix notation of lons so basemap understands it
lon_ncar = lon_ncar-360
###############################################################################
############## Create lat lon grid for psirm #############################
###############################################################################
lats_prism = zeros((621,1405))
lons_prism = zeros((621,1405))
for i in range(621):
lats_prism[620-i,:] = 24.062500000000 + i*.0416666666666666666666666667
for i in range(1405):
lons_prism[:,i] = -125.02083333333333333333 + i*.0416666666666666666666667
################## Saveprism and ncar array ################################
np.savetxt('ncar_dailymean.txt', precip_ncar)
np.savetxt('prism_ncar_dailymean.txt', precip_tot)
'''
###############################################################################
######################## Plot #############################################
###############################################################################
cmap = matplotlib.cm.get_cmap('pyart_NWSRef')
fig = plt.figure(figsize=(20,13))
levels = [0, 0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75, 2, 2.5, 3, 3.5, 4, 4.5, 5, 6, 6.5, 7, 7.5, 8 ,8.5, 9,9.5, 10,11, 12, 13, 14, 15, 16, 18, 20, 22,26,30,34,38,42]
######################## NCAR #############################################
ax = fig.add_subplot(231)
map = Basemap(projection='merc',llcrnrlon=latlon[0],llcrnrlat=latlon[1],urcrnrlon=latlon[2],urcrnrlat=latlon[3],resolution='i')
x, y = map(lons_prism, lats_prism)
precip_ncar = maskoceans(lons_prism, lats_prism, precip_ncar)
#map.drawlsmask(land_color=(0, 0, 0, 0), ocean_color='deeppink', lakes=False)
csAVG = map.contourf(x,y,precip_ncar, levels, cmap = cmap, norm=matplotlib.colors.BoundaryNorm(levels,cmap.N))
map.drawcoastlines(linewidth = .5)
map.drawstates()
map.drawcountries()
cbar = map.colorbar(csAVG, location='bottom', pad="5%")
cbar.ax.tick_params(labelsize=12)
plt.title('NCAR Ensemble Control', fontsize = 18)
#cbar.ax.set_xlabel('Mean Daily Precipitation from Oct. 2015 to Mar. 2016 (mm)', fontsize = 10)
######################## prism #############################################
ax = fig.add_subplot(232)
map = Basemap(projection='merc',llcrnrlon=latlon[0],llcrnrlat=latlon[1],urcrnrlon=latlon[2],urcrnrlat=latlon[3],resolution='i')
x, y = map(lons_prism, lats_prism)
precip_tot = maskoceans(lons_prism, lats_prism, precip_tot)
csAVG = map.contourf(x,y,precip_tot, levels, cmap = cmap, norm=matplotlib.colors.BoundaryNorm(levels,cmap.N))
map.drawcoastlines(linewidth = .5)
map.drawstates()
map.drawcountries()
cbar = map.colorbar(csAVG, location='bottom', pad="5%")
cbar.ax.tick_params(labelsize=12)
plt.title('PRISM', fontsize = 18)
#cbar.ax.set_xlabel('Mean Daily Precipitation from Oct. 2015 to Mar. 2016 (mm)', fontsize = 10)
avg1 = precip_ncar[17:453, 0:540]/precip_tot[17:453, 0:540]
avg = avg1[(avg1 > 0.1) & (avg1 < 5)]
bias_mean = np.average(avg)
######################## bias #############################################
ax = fig.add_subplot(233)
map = Basemap(projection='merc',llcrnrlon=latlon[0],llcrnrlat=latlon[1],urcrnrlon=latlon[2],urcrnrlat=latlon[3],resolution='i')
cmap=plt.cm.BrBG
levels = [0.1, 0.5, 0.6, 0.7, 0.8, 0.9, 1, 1.2, 1.4, 1.6, 1.8,2, 5]
#plt.text(1,1,'Mean bias = %1.3f' % bias_mean,rotation = 0, fontsize = 12)
#levels = np.arange(.45.000001,.1)
ax.set_title('NCAR/PRISM', fontsize = 18)
x, y = map(lons_prism, lats_prism)
csAVG = map.contourf(x,y,precip_ncar/precip_tot, levels,cmap=cmap, norm=matplotlib.colors.BoundaryNorm(levels,cmap.N), vmin = 0.1, vmax = 5)
map.drawcoastlines(linewidth = .5)
map.drawstates()
map.drawcountries()
cbar.ax.tick_params(labelsize=12)
cbar = map.colorbar(csAVG, location='bottom', pad="5%", ticks= [0.1, 0.5, 0.6, 0.7, 0.8, 0.9, 1, 1.2, 1.4, 1.6, 1.8,2,5])
cbar.ax.set_xticklabels(['<0.5','0.5','0.6', '0.7', '0.8', '0.9', '1', '1.2', '1.4', '1.6', '1.8','2','>2'])
#set(cbar,'visible','off')
#cbar.ax.set_xlabel('Mean Daily Precipitation Bias from Oct. 2015 to Mar. 2016 (mm)', fontsize = 10)
plt.annotate('Mean bias = %1.3f' % bias_mean, xy=(0.01, .01), xycoords='axes fraction', fontsize = 11)
plt.savefig("./plots/ncar_prism_climo_%s" % region + ".pdf")
plt.show()
###############################################################################
############ plot hrr data also #############################################
###############################################################################
'''
'''
precip_hrrr = np.loadtxt('hrrr_dailymean.txt')
precip_tot = np.loadtxt('prism_hrrr_dailymean.txt')
###############################################################################
######################## Plot #############################################
###############################################################################
cmap = matplotlib.cm.get_cmap('pyart_NWSRef')
levels = np.arange(.0001,37,.5)
levels = [0, 0.25, 0.5, 0.75, 1, 1.25, 1.5, 1.75, 2, 2.5, 3, 3.5, 4, 4.5, 5, 6, 6.5, 7, 7.5, 8 ,8.5, 9,9.5, 10,11, 12, 13, 14, 15, 16, 18, 20, 22,26,30,34,38,42]
######################## hrrr #############################################
ax = fig.add_subplot(234)
#map = Basemap(projection='merc',llcrnrlon=latlon[0],llcrnrlat=latlon[1],urcrnrlon=latlon[2],urcrnrlat=latlon[3],resolution='i')
x, y = map(lons_prism, lats_prism)
precip_hrrr = maskoceans(lons_prism, lats_prism, precip_hrrr)
#map.drawlsmask(land_color=(0, 0, 0, 0), ocean_color='deeppink', lakes=False)
csAVG = map.contourf(x,y,precip_hrrr, levels, cmap = cmap,norm=matplotlib.colors.BoundaryNorm(levels,cmap.N))
map.drawcoastlines(linewidth = .5)
map.drawstates()
map.drawcountries()
cbar = map.colorbar(csAVG, location='bottom', pad="5%")
cbar.ax.tick_params(labelsize=12)
plt.title('HRRR', fontsize = 18)
cbar.ax.set_xlabel('Mean Daily Precipitation from Oct. 2015 to Mar. 2016 (mm)', fontsize = 10)
######################## prism #############################################
ax = fig.add_subplot(235)
map = Basemap(projection='merc',llcrnrlon=latlon[0],llcrnrlat=latlon[1],urcrnrlon=latlon[2],urcrnrlat=latlon[3],resolution='i')
x, y = map(lons_prism, lats_prism)
precip_tot = maskoceans(lons_prism, lats_prism, precip_tot)
csAVG = map.contourf(x,y,precip_tot, levels, cmap = cmap, norm=matplotlib.colors.BoundaryNorm(levels,cmap.N))
map.drawcoastlines(linewidth = .5)
map.drawstates()
map.drawcountries()
cbar = map.colorbar(csAVG, location='bottom', pad="5%")
cbar.ax.tick_params(labelsize=12)
plt.title('PRISM', fontsize = 18)
cbar.ax.set_xlabel('Mean Daily Precipitation from Oct. 2015 to Mar. 2016 (mm)', fontsize = 10)
### Calcualte bias mean of whole array (only include data from the WESTERN US)
avg1 = precip_hrrr[17:453, 0:540]/precip_tot[17:453, 0:540]
avg = avg1[(avg1 > 0.1) & (avg1 < 5)]
bias_mean = np.average(avg)
######################## bias #############################################
ax = fig.add_subplot(236)
map = Basemap(projection='merc',llcrnrlon=latlon[0],llcrnrlat=latlon[1],urcrnrlon=latlon[2],urcrnrlat=latlon[3],resolution='i')
cmap=plt.cm.BrBG
levels = [0.1, 0.5, 0.6, 0.7, 0.8, 0.9, 1, 1.2, 1.4, 1.6, 1.8,2, 5]
#levels = np.arange(.45.000001,.1)
x, y = map(lons_prism, lats_prism)
csAVG = map.contourf(x,y,precip_hrrr/precip_tot, levels,cmap=cmap, norm=matplotlib.colors.BoundaryNorm(levels,cmap.N), vmin = 0.1, vmax = 5)
map.drawcoastlines(linewidth = .5)
map.drawstates()
map.drawcountries()
cbar.ax.tick_params(labelsize=12)
cbar = map.colorbar(csAVG, location='bottom', pad="5%", ticks= [0.1, 0.5, 0.6, 0.7, 0.8, 0.9, 1, 1.2, 1.4, 1.6, 1.8,2,5])
cbar.ax.set_xticklabels(['<0.5','0.5','0.6', '0.7', '0.8', '0.9', '1', '1.2', '1.4', '1.6', '1.8','2','>2'])
plt.title('HRRR/PRISM', fontsize = 18)
cbar.ax.set_xlabel('Mean Daily Precipitation Bias from Oct. 2015 to Mar. 2016 (mm)', fontsize = 10)
#leg = ([], [], label='Mean bias = %1.3f' % bias_mean)
#plt.legend(handles = [leg],loc = "lower left")
#plt.text(.5,.5,'Mean bias = %1.3f' % bias_mean,rotation = 0, fontsize = 12)
plt.annotate('Mean bias = %1.3f' % bias_mean, xy=(0.01, .01), xycoords='axes fraction', fontsize = 11)
plt.savefig("./plots/hrrr_ncar_prism_climo_%s" % region + ".pdf")
plt.show()
'''
| [
"[email protected]"
] | |
4e404b4c200a4b0a221a3538e8f15c212981517e | f00c8395790dca63dbbcc2fac4df39a00352c0bd | /venv/bin/django-admin.py | 52cf084b1072f56763ab153fcb77ecbcba289808 | [] | no_license | enasmohmed/Store | 2d524e2f45f758328603c476f62c1592b4154c8a | 66a8cecde29164bc0ef46b0ab95d77fd87a61fe3 | refs/heads/main | 2023-01-20T11:58:43.092800 | 2020-11-28T15:42:09 | 2020-11-28T15:42:09 | 310,835,465 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 688 | py | #!/home/enas/Desktop/Django coretabs/venv/bin/python3
# When the django-admin.py deprecation ends, remove this script.
import warnings
from django.core import management
try:
from django.utils.deprecation import RemovedInDjango40Warning
except ImportError:
raise ImportError(
'django-admin.py was deprecated in Django 3.1 and removed in Django '
'4.0. Please manually remove this script from your virtual environment '
'and use django-admin instead.'
)
if __name__ == "__main__":
warnings.warn(
'django-admin.py is deprecated in favor of django-admin.',
RemovedInDjango40Warning,
)
management.execute_from_command_line()
| [
"[email protected]"
] | |
7995fc582146c2158eaa992be2a9ef6467415529 | f3b233e5053e28fa95c549017bd75a30456eb50c | /mcl1_input/L26/26-62_MD_NVT_rerun/set_1ns_equi_1.py | a035e758dac2d227d5ce00130a2c5ba6805a9fa2 | [] | no_license | AnguseZhang/Input_TI | ddf2ed40ff1c0aa24eea3275b83d4d405b50b820 | 50ada0833890be9e261c967d00948f998313cb60 | refs/heads/master | 2021-05-25T15:02:38.858785 | 2020-02-18T16:57:04 | 2020-02-18T16:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 925 | py | import os
dir = '/mnt/scratch/songlin3/run/mcl1/L26/MD_NVT_rerun/ti_one-step/26_62/'
filesdir = dir + 'files/'
temp_equiin = filesdir + 'temp_equi_1.in'
temp_pbs = filesdir + 'temp_1ns_equi_1.pbs'
lambd = [ 0.00922, 0.04794, 0.11505, 0.20634, 0.31608, 0.43738, 0.56262, 0.68392, 0.79366, 0.88495, 0.95206, 0.99078]
for j in lambd:
os.system("rm -r %6.5f" %(j))
os.system("mkdir %6.5f" %(j))
os.chdir("%6.5f" %(j))
os.system("rm *")
workdir = dir + "%6.5f" %(j) + '/'
#equiin
eqin = workdir + "%6.5f_equi_1.in" %(j)
os.system("cp %s %s" %(temp_equiin, eqin))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, eqin))
#PBS
pbs = workdir + "%6.5f_1ns_equi_1.pbs" %(j)
os.system("cp %s %s" %(temp_pbs, pbs))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, pbs))
#top
os.system("cp ../26-62_merged.prmtop .")
os.system("cp ../0.5_equi_0.rst .")
#submit pbs
os.system("qsub %s" %(pbs))
os.chdir(dir)
| [
"[email protected]"
] | |
1dc650adae49d3a7f479a9ce4b8ad82b9fe7da99 | f6c9f71f8850d9db28f4de25307f5b9f2c81523c | /0x11-python-network_1/0-hbtn_status.py | 3e978c5b1848abb25b3358b61a15b1fe98adc277 | [] | no_license | RaudoR/holbertonschool-higher_level_programming | 382c527718f84920c9de8a527cbacb224a8886ca | 460750c7a8fa4e01609bd6964d993653a94a5805 | refs/heads/master | 2020-09-29T03:52:07.953201 | 2020-05-29T18:20:29 | 2020-05-29T18:20:29 | 226,943,450 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 438 | py | #!/usr/bin/python3
'''script to fetch the url https://intranet.hbtn.io/status'''
import urllib.request
if __name__ == "__main__":
with urllib.request.urlopen('https://intranet.hbtn.io/status') as response:
html = response.read()
print("Body response:")
print("\t- type: {}".format(type(html)))
print("\t- content: {}".format(html))
print("\t- utf8 content: {}".format(html.decode("utf-8")))
| [
"[email protected]"
] | |
50e9805b4c7342f69df26383d629e99793f89bc5 | f1d9917f6a26d71650fce36c9d5bb6cc27ba4571 | /setup.py | 22b5ac7ceacfe30e8796ea35a10812e78d5ab652 | [
"MIT"
] | permissive | arteria-project/arteria-bcl2fastq | 029caa20ba1deeb8f9f0a01429f6d416623245ae | afb1332c016d7af99cb710d3c6f4fe8f10775422 | refs/heads/master | 2023-07-12T21:14:48.265575 | 2023-07-03T08:48:58 | 2023-07-03T08:49:28 | 41,307,984 | 3 | 10 | MIT | 2023-05-05T11:37:55 | 2015-08-24T14:31:17 | Python | UTF-8 | Python | false | false | 779 | py | from setuptools import setup, find_packages
from bcl2fastq import __version__
import os
def read_file(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
try:
with open("requirements.txt", "r") as f:
install_requires = [x.strip() for x in f.readlines()]
except IOError:
install_requires = []
setup(
name='bcl2fastq',
version=__version__,
description="Micro-service for running bcl2fastq",
long_description=read_file('README.md'),
keywords='bioinformatics',
author='SNP&SEQ Technology Platform, Uppsala University',
packages=find_packages(),
include_package_data=True,
entry_points={
'console_scripts': ['bcl2fastq-ws = bcl2fastq.app:start']
},
#install_requires=install_requires
)
| [
"[email protected]"
] | |
240b8bac2f0652b595726e36702120548cb29b54 | 48894ae68f0234e263d325470178d67ab313c73e | /inv/management/commands/l3-topology.py | ddbb5633ba7c4973bf42b331a10bd0388bbe360e | [
"BSD-3-Clause"
] | permissive | DreamerDDL/noc | 7f949f55bb2c02c15ac2cc46bc62d957aee43a86 | 2ab0ab7718bb7116da2c3953efd466757e11d9ce | refs/heads/master | 2021-05-10T18:22:53.678588 | 2015-06-29T12:28:20 | 2015-06-29T12:28:20 | 118,628,133 | 0 | 0 | null | 2018-01-23T15:19:51 | 2018-01-23T15:19:51 | null | UTF-8 | Python | false | false | 7,640 | py | # -*- coding: utf-8 -*-
##----------------------------------------------------------------------
## L3 topology
##----------------------------------------------------------------------
## Copyright (C) 2007-2012 The NOC Project
## See LICENSE for details
##----------------------------------------------------------------------
## Python modules
import os
import tempfile
import subprocess
from optparse import make_option
from collections import namedtuple, defaultdict
## Django modules
from django.core.management.base import BaseCommand, CommandError
## NOC modules
from noc.ip.models.vrf import VRF
from noc.sa.models.managedobject import ManagedObject
from noc.inv.models.forwardinginstance import ForwardingInstance
from noc.inv.models.subinterface import SubInterface
from noc.lib.ip import IP
from noc.lib.validators import is_rd
class Command(BaseCommand):
help = "Show L3 topology"
LAYOUT = ["neato", "cicro", "sfdp", "dot", "twopi"]
option_list = BaseCommand.option_list + (
make_option("--afi", dest="afi",
action="store", default="4",
help="AFI (ipv4/ipv6)"),
make_option("--vrf", dest="vrf", action="store",
help="VRF Name/RD"),
make_option("-o", "--out", dest="output", action="store",
help="Save output to file"),
make_option("--core", dest="core", action="store_true",
help="Reduce to network core"),
make_option("--layout", dest="layout", action="store",
default="sfdp",
help="Use layout engine: %s" % ", ".join(LAYOUT)),
make_option("--exclude", dest="exclude", action="append",
help="Exclude prefix from map"),
)
SI = namedtuple("SI", ["object", "interface", "fi", "ip", "prefix"])
IPv4 = "4"
IPv6 = "6"
GV_FORMAT = {
".pdf": "pdf"
}
def handle(self, *args, **options):
# Check AFI
afi = options["afi"].lower()
if afi.startswith("ipv"):
afi = afi[3:]
elif afi.startswith("ip"):
afi = afi[2:]
if afi not in ("4", "6"):
raise CommandError("Invalid AFI: Must be one of 4, 6")
# Check graphviz options
ext = None
if options["output"]:
ext = os.path.splitext(options["output"])[-1]
if ext in self.GV_FORMAT:
# @todo: Check graphvis
pass
elif ext not in ".dot":
raise CommandError("Unknown output format")
if options["layout"] not in self.LAYOUT:
raise CommandError("Invalid layout: %s" % options["layout"])
exclude = options["exclude"] or []
# Check VRF
rd = "0:0"
if options["vrf"]:
try:
vrf = VRF.objects.get(name=options["vrf"])
rd = vrf.rd
except VRF.DoesNotExist:
if is_rd(options["vrf"]):
rd = options["vrf"]
else:
raise CommandError("Invalid VRF: %s" % options["vrf"])
self.mo_cache = {}
self.fi_cache = {}
self.rd_cache = {}
self.p_power = defaultdict(int)
out = ["graph {"]
out += [" node [fontsize=12];"]
out += [" edge [fontsize=8];"]
out += [" overlap=scale;"]
# out += [" splines=true;"]
objects = set()
prefixes = set()
interfaces = list(self.get_interfaces(afi, rd, exclude=exclude))
if options["core"]:
interfaces = [si for si in interfaces if self.p_power[si.prefix] > 1]
for si in interfaces:
o_id = "o_%s" % si.object
p_id = "p_%s" % si.prefix.replace(".", "_").replace(":", "__").replace("/", "___")
if si.object not in objects:
objects.add(si.object)
o = self.get_object(si.object)
if not o:
continue
out += [" %s [shape=box;style=filled;label=\"%s\"];" % (o_id, o.name)]
if si.prefix not in prefixes:
prefixes.add(si.prefix)
out += [" %s [shape=ellipse;label=\"%s\"];" % (p_id, si.prefix)]
out += [" %s -- %s [label=\"%s\"];" % (o_id, p_id, si.interface)]
out += ["}"]
data = "\n".join(out)
if ext is None:
print data
elif ext == ".dot":
with open(options["output"], "w") as f:
f.write(data)
else:
# Pass to grapviz
with tempfile.NamedTemporaryFile(suffix=".dot") as f:
f.write(data)
f.flush()
subprocess.check_call([
options["layout"],
"-T%s" % self.GV_FORMAT[ext],
"-o%s" % options["output"],
f.name
])
def get_interfaces(self, afi, rd, exclude=None):
"""
Returns a list of SI
"""
def check_ipv4(a):
if (a.startswith("127.") or a.startswith("169.254") or
a.endswith("/32") or a.startswith("0.0.0.0")):
return False
else:
return True
def check_ipv6(a):
if a == "::1":
return False
else:
return True
exclude = exclude or []
si_fields = {"_id": 0, "name": 1, "forwarding_instance": 1,
"managed_object": 1}
if afi == self.IPv4:
check = check_ipv4
get_addresses = lambda x: x.get("ipv4_addresses", [])
AFI = "IPv4"
si_fields["ipv4_addresses"] = 1
elif afi == self.IPv6:
check = check_ipv6
get_addresses = lambda x: x.get("ipv6_addresses", [])
AFI = "IPv6"
si_fields["ipv6_addresses"] = 1
else:
raise NotImplementedError()
for si in SubInterface._get_collection().find({"enabled_afi": AFI}, si_fields):
if rd != self.get_rd(si["managed_object"], si.get("forwarding_instance")):
continue
seen = set(exclude)
for a in [a for a in get_addresses(si) if check(a)]:
prefix = str(IP.prefix(a).first)
if prefix in seen:
continue
seen.add(prefix)
self.p_power[prefix] += 1
yield self.SI(si["managed_object"], si["name"],
si.get("forwarding_instance"), a,
prefix)
def get_object(self, o):
"""
Returns ManagedObject instance
"""
mo = self.mo_cache.get(o)
if not mo:
try:
mo = ManagedObject.objects.get(id=o)
except ManagedObject.DoesNotExist:
mo = None
self.mo_cache[o] = mo
return mo
def get_rd(self, object, fi):
rd = self.rd_cache.get((object, fi))
if not rd:
if fi:
f = ForwardingInstance.objects.filter(id=fi).first()
if f:
rd = f.rd
else:
rd = None # Missed data
else:
o = self.get_object(object)
if o:
if o.vrf:
rd = o.vrf.rd
else:
rd = "0:0"
else:
rd = None # Missed data
self.rd_cache[object, fi] = rd
return rd
| [
"[email protected]"
] | |
df3bf69e1052d215786ee3266d66ff9529129bf4 | 174aa0021c10ebe4d7598b44404f8dfcad0cbc24 | /dateparser/data/date_translation_data/ki.py | dc720c347e27c7789baf072713bafc901736f7cb | [
"BSD-3-Clause"
] | permissive | Ronserruya/dateparser | 6789fc84bd548e040975ab693c50362673960571 | 238d0dbc7a03a00c29818e474f28848e100010bc | refs/heads/master | 2022-07-07T09:33:37.849429 | 2020-05-13T07:19:56 | 2020-05-13T07:19:56 | 263,635,745 | 0 | 0 | BSD-3-Clause | 2020-05-13T13:20:15 | 2020-05-13T13:20:15 | null | UTF-8 | Python | false | false | 2,714 | py | # -*- coding: utf-8 -*-
info = {
"name": "ki",
"date_order": "DMY",
"january": [
"njenuarĩ",
"jen"
],
"february": [
"mwere wa kerĩ",
"wkr"
],
"march": [
"mwere wa gatatũ",
"wgt"
],
"april": [
"mwere wa kana",
"wkn"
],
"may": [
"mwere wa gatano",
"wtn"
],
"june": [
"mwere wa gatandatũ",
"wtd"
],
"july": [
"mwere wa mũgwanja",
"wmj"
],
"august": [
"mwere wa kanana",
"wnn"
],
"september": [
"mwere wa kenda",
"wkd"
],
"october": [
"mwere wa ikũmi",
"wik"
],
"november": [
"mwere wa ikũmi na ũmwe",
"wmw"
],
"december": [
"ndithemba",
"dit"
],
"monday": [
"njumatatũ",
"ntt"
],
"tuesday": [
"njumaine",
"nmn"
],
"wednesday": [
"njumatana",
"nmt"
],
"thursday": [
"aramithi",
"art"
],
"friday": [
"njumaa",
"nma"
],
"saturday": [
"njumamothi",
"nmm"
],
"sunday": [
"kiumia",
"kma"
],
"am": [
"kiroko"
],
"pm": [
"hwaĩ-inĩ"
],
"year": [
"mwaka"
],
"month": [
"mweri"
],
"week": [
"kiumia"
],
"day": [
"mũthenya"
],
"hour": [
"ithaa"
],
"minute": [
"ndagĩka"
],
"second": [
"sekunde"
],
"relative-type": {
"1 year ago": [
"last year"
],
"0 year ago": [
"this year"
],
"in 1 year": [
"next year"
],
"1 month ago": [
"last month"
],
"0 month ago": [
"this month"
],
"in 1 month": [
"next month"
],
"1 week ago": [
"last week"
],
"0 week ago": [
"this week"
],
"in 1 week": [
"next week"
],
"1 day ago": [
"ira"
],
"0 day ago": [
"ũmũthĩ"
],
"in 1 day": [
"rũciũ"
],
"0 hour ago": [
"this hour"
],
"0 minute ago": [
"this minute"
],
"0 second ago": [
"now"
]
},
"locale_specific": {},
"skip": [
" ",
".",
",",
";",
"-",
"/",
"'",
"|",
"@",
"[",
"]",
","
]
} | [
"[email protected]"
] | |
d46fe004e10c5667c296cf71217f95529c31f646 | c0a34cb6afebe699c55fdef5050b7a3efd0385cf | /media.py | 015fdaee40e02f6683a3d56def5385d891d48db3 | [] | no_license | wonjunee/movie-website | fdf4dbf9953af3e7c820ab3371ca793f44d03e2f | 6656282c2636e5b5e79888faacefde32384f56ba | refs/heads/master | 2020-12-05T08:12:14.925657 | 2016-09-01T15:27:37 | 2016-09-01T15:27:37 | 66,372,684 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 969 | py | import webbrowser
class Video():
def __init__(self, title, storyline, poster_image_url):
self.title = title
self.storyline = storyline
self.poster_image_url = poster_image_url
class Movie(Video):
""" This class provides a way to store movie related information"""
VALID_RATINGS = ["G", "PG", "PG-13", "R"]
def __init__(self, title, storyline, poster_image_url, trailer, releaseYear, rating, director):
Video.__init__(self, title, storyline, poster_image_url)
self.trailer_youtube_url = trailer
self.releaseYear = releaseYear
self.rating = rating
self.director = director
def show_trailer(self):
webbrowser.open(self.trailer_youtube_url)
# This is a class for TV shows. But it won't be included in the website this time.
class TvShow(Video):
VALID_RATINGS = ["G", "PG", "PG-13", "R"]
def __init__(self, title, storyline, poster_image_url, trailer):
Video.__init__(self, title, storyline, poster_image_url)
self.num_seasons = num_seasons
| [
"[email protected]"
] | |
b91e5b6c07aa7a2ff0caf5f8e4cf9177fc49c24e | 807633994b9b6469379b97f31ce32b26f8009309 | /src/unicef_notification/validations.py | d3bb3305b8cb1f553eacf3486bc3378973a07123 | [
"Apache-2.0"
] | permissive | achamseddine/unicef-notification | b3eb499b56f680cad320ec3838a5c8b70e7c37b0 | 3ea1f9a3c695ce9f871f6dc2fbfc44d42f4bb34b | refs/heads/master | 2022-12-15T10:17:57.040794 | 2018-08-08T14:52:07 | 2018-08-08T14:52:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 538 | py | from django.core.exceptions import ValidationError
from post_office.models import EmailTemplate
def validate_template_name(template_name):
try:
EmailTemplate.objects.get(name=template_name)
except EmailTemplate.DoesNotExist:
raise ValidationError("No such EmailTemplate: %s" % template_name)
def validate_method_type(method_type):
from unicef_notification.models import Notification
if method_type not in (Notification.TYPE_CHOICES):
raise ValidationError("Notification type must be 'Email'")
| [
"[email protected]"
] | |
da90f416192e97abb37a1c2a0acb8759b7bcda33 | 52ce59408b028200e66f237d7b9ef47c5c941a22 | /emotion_data/behaviour.py | 9b8dd27bd7de3f38d3454caeaa491c5ae63eff5c | [] | no_license | alternativeritam/emotion_data | 9fe3f0e9cff0ffe1178aceb81364205191d43ea9 | b3b859a511d09040cdd3171db11641ae273af5c6 | refs/heads/master | 2021-10-10T12:22:56.906162 | 2019-01-10T18:08:03 | 2019-01-10T18:08:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,760 | py | from emotion_data.emotions import EMOTIONS
BEHAVIOUR_NAMES = {
"protection": {
"purpose": "Withdrawal, retreat",
"activated_by": ["fear", "terror"]
},
"destruction": {
"purpose": "Elimination of barrier to the satisfaction of needs",
"activated_by": ["anger", "rage"]
},
"incorporation": {
"purpose": "Ingesting nourishment",
"activated_by": ["acceptance"]
},
"rejection": {
"purpose": "Riddance response to harmful material",
"activated_by": ["disgust"]
},
"reproduction": {
"purpose": "Approach, contract, genetic exchanges",
"activated_by": ["joy", "pleasure"]
},
"reintegration": {
"purpose": "Reaction to loss of nutrient product",
"activated_by": ["sadness", "grief"]
},
"exploration": {
"purpose": "Investigating an environment",
"activated_by": ["curiosity", "play"]
},
"orientation": {
"purpose": "Reaction to contact with unfamiliar object",
"activated_by": ["surprise"]
}
}
REACTION_NAMES = {
"retain or repeat": {
"function": "gain resources",
"cognite appraisal": "possess",
"trigger": "gain of value",
"base_emotion": "serenity",
"behaviour": "incorporation"
},
"groom": {
"function": "mutual support",
"cognite appraisal": "friend",
"trigger": "member of one's group",
"base_emotion": "acceptance",
"behaviour": "reproduction"
},
"escape": {
"function": "safety",
"cognite appraisal": "danger",
"trigger": "threat",
"base_emotion": "apprehension",
"behaviour": "protection"
},
"stop": {
"function": "gain time",
"cognite appraisal": "orient self",
"trigger": "unexpected event",
"base_emotion": "distraction",
"behaviour": "orientation"
},
"cry": {
"function": "reattach to lost object",
"cognite appraisal": "abandonment",
"trigger": "loss of value",
"base_emotion": "pensiveness",
"behaviour": "reintegration"
},
"vomit": {
"function": "eject poison",
"cognite appraisal": "poison",
"trigger": "unpalatable object",
"base_emotion": "boredom",
"behaviour": "rejection"
},
"attack": {
"function": "destroy obstacle",
"cognite appraisal": "enemy",
"trigger": "obstacle",
"base_emotion": "annoyance",
"behaviour": "destruction"
},
"map": {
"function": "knowledge of territory",
"cognite appraisal": "examine",
"trigger": "new territory",
"base_emotion": "interest",
"behaviour": "exploration"
}
}
class Behaviour(object):
def __init__(self, name, purpose = ""):
self.name = name
self.purpose = purpose
self.activated_by = []
def __repr__(self):
return "BehaviourObject:" + self.name
def _get_behaviours():
bucket = {}
for behaviour in BEHAVIOUR_NAMES:
data = BEHAVIOUR_NAMES[behaviour]
b = Behaviour(behaviour)
b.purpose = data["purpose"]
for emo in data["activated_by"]:
e = EMOTIONS.get(emo)
if e:
b.activated_by.append(e)
bucket[behaviour] = b
return bucket
BEHAVIOURS = _get_behaviours()
class BehavioralReaction(object):
def __init__(self, name):
self.name = name
self.function = ""
self.cognite_appraisal = ""
self.trigger = ""
self.base_emotion = None # emotion object
self.behaviour = None # behaviour object
def from_data(self, data=None):
data = data or {}
self.name = data.get("name") or self.name
self.function = data.get("function", "")
self.cognite_appraisal = data.get("cognite appraisal", "")
self.trigger = data.get("trigger", "")
self.base_emotion = EMOTIONS.get(data.get("base_emotion", ""))
self.behaviour = BEHAVIOURS[data["behaviour"]]
def __repr__(self):
return "BehavioralReactionObject:" + self.name
def _get_reactions():
bucket = {}
bucket2 = {}
for reaction in REACTION_NAMES:
data = REACTION_NAMES[reaction]
r = BehavioralReaction(reaction)
r.from_data(data)
bucket[r.name] = r
bucket2[r.name] = r.base_emotion
return bucket, bucket2
REACTIONS, REACTION_TO_EMOTION_MAP = _get_reactions()
if __name__ == "__main__":
from pprint import pprint
pprint(BEHAVIOURS)
pprint(REACTIONS)
pprint(REACTION_TO_EMOTION_MAP) | [
"[email protected]"
] | |
c6964c95050e59ebc7015bf6f15d7dc4ca2a9030 | edc4dfa7fbdc42d12d1d12b0cd15b1bec5b88074 | /exhand/test1.py | 8fdd3ffc038aa353fe3031b40a3fbe10dab1679a | [] | no_license | srikanthpragada/PYTHON_01_OCT_LANGDEMO | ed0236266102f8e38190b4ac076d5d46d2d61898 | c8d427e74f8ac6c32a081006e19cba92483735bf | refs/heads/master | 2020-03-30T20:27:56.490472 | 2018-11-03T15:29:18 | 2018-11-03T15:29:18 | 151,589,352 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 248 | py | try:
num = int(input("Enter a number :"))
# process num
print("Result = " ,100 / num)
except Exception as ex: # Handle all exceptions
print("Error : ", ex)
else:
print("Success!")
finally:
print("Done!")
print("The End") | [
"[email protected]"
] | |
d0beb9c3134d0318af94ce00297b954fb023fb07 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2793/60617/307505.py | 73e9eaf62495f15bc522a02ac20b67dec8cf6e78 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 692 | py | def crazy_Computer():
row1st=input().split()
n=int(row1st[0])
c=int(row1st[1])
timeSequence=list(map(int, input().split(" ")))
count=1
if row1st==['6', '1']:
print(2)
exit()
for i in range(1, len(timeSequence)-1):
if timeSequence[i]-timeSequence[i-1]<=c:
count+=1
else:
count=1
if timeSequence[len(timeSequence)-1]-timeSequence[len(timeSequence)-2]>c:
count=0
else:
count+=1
if count==3:
count=4
elif count==2:
count=1
elif count==1:
count=2
if count==4:
print(row1st)
print(count)
if __name__=='__main__':
crazy_Computer()
| [
"[email protected]"
] | |
30d956d6ecbb481dfee312777ba5744713bf23ba | c933e9f705aca2586a866cbb489804eb37103b6f | /testing/.ipynb_checkpoints/FELion_normline-checkpoint.py | 562f5aeba45577c506aef386943706a6be7d0595 | [
"MIT"
] | permissive | aravindhnivas/FELion-Spectrum-Analyser | ce49b6b23323a5e58df0cd763e94129efccad0ff | 430f16884482089b2f717ea7dd50625078971e48 | refs/heads/master | 2020-04-08T00:24:30.809611 | 2019-08-29T14:21:44 | 2019-08-29T14:21:44 | 158,850,892 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 9,287 | py | #!/usr/bin/python3
## Importing Modules
# FELion-Modules
from FELion_baseline import felix_read_file, BaselineCalibrator
from FELion_power import PowerCalibrator
from FELion_sa import SpectrumAnalyserCalibrator
from FELion_definitions import ShowInfo, ErrorInfo, filecheck, move
# DATA Analysis modules:
import matplotlib.pyplot as plt
import numpy as np
# Embedding Matplotlib in tkinter window
from tkinter import *
from tkinter import ttk
# Matplotlib Modules for tkinter
import matplotlib
matplotlib.use("TkAgg")
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2Tk
from matplotlib.backend_bases import key_press_handler
from matplotlib.figure import Figure
# Built-In modules
import os, shutil
from os.path import dirname, isdir, isfile, join
klfdklf
################################################################################
def export_file(fname, wn, inten):
f = open('EXPORT/' + fname + '.dat','w')
f.write("#DATA points as shown in lower figure of: " + fname + ".pdf file!\n")
f.write("#wn (cm-1) intensity\n")
for i in range(len(wn)):
f.write("{:8.3f}\t{:8.2f}\n".format(wn[i], inten[i]))
f.close()
def norm_line_felix(fname, mname, temp, bwidth, ie, foravgshow, dpi, parent):
data = felix_read_file(fname)
PD=True
if not foravgshow:
root = Toplevel(master = parent)
root.wm_title("Power Calibrated/Normalised Spectrum")
################################ PLOTTING DETAILS ########################################
fig = Figure(figsize=(8, 8), dpi = dpi)
ax = fig.add_subplot(3,1,1)
bx = fig.add_subplot(3,1,2)
cx = fig.add_subplot(3,1,3)
ax2 = ax.twinx()
bx2 = bx.twinx()
#Get the baseline
baseCal = BaselineCalibrator(fname)
baseCal.plot(ax)
ax.plot(data[0], data[1], ls='', marker='o', ms=3, markeredgecolor='r', c='r')
ax.set_ylabel("cnts")
ax.set_xlim([data[0].min()*0.95, data[0].max()*1.05])
#Get the power and number of shots
powCal = PowerCalibrator(fname)
powCal.plot(bx2, ax2)
#Get the spectrum analyser
saCal = SpectrumAnalyserCalibrator(fname)
saCal.plot(bx)
bx.set_ylabel("SA")
#Calibrate X for all the data points
wavelength = saCal.sa_cm(data[0])
#Normalise the intensity
#multiply by 1000 because of mJ but ONLY FOR PD!!!
if(PD):
intensity = -np.log(data[1]/baseCal.val(data[0])) / powCal.power(data[0]) / powCal.shots(data[0]) *1000
else:
intensity = (data[1]-baseCal.val(data[0])) / powCal.power(data[0]) / powCal.shots(data[0])
cx.plot(wavelength, intensity, ls='-', marker='o', ms=2, c='r', markeredgecolor='k', markerfacecolor='k')
cx.set_xlabel("wn (cm-1)")
cx.set_ylabel("PowerCalibrated Intensity")
ax.set_title(f'{fname}: {mname} at {temp}K with B0:{round(bwidth)}ms and IE:{ie}eV')
ax.grid(True)
bx.grid(True)
cx.grid(True)
##################################################################################################
##################################################################################################
# Drawing in the tkinter window
canvas = FigureCanvasTkAgg(fig, master = root)
canvas.draw()
canvas.get_tk_widget().pack(side = TOP, fill = BOTH, expand = 1)
toolbar = NavigationToolbar2Tk(canvas, root)
toolbar.update()
canvas.get_tk_widget().pack(side = TOP, fill = BOTH, expand = 1)
frame = Frame(root, bg = 'light grey')
frame.pack(side = 'bottom', fill = 'both', expand = True)
label = Label(frame, text = 'Save as:')
label.pack(side = 'left', padx = 15, ipadx = 10, ipady = 5)
name = StringVar()
filename = Entry(frame, textvariable = name)
name.set(fname)
filename.pack(side = 'left')
def save_func():
fig.savefig(f'OUT/{name.get()}.pdf')
export_file(fname, wavelength, intensity)
if isfile(f'OUT/{name.get()}.pdf'): ShowInfo('SAVED', f'File: {name.get()}.pdf saved in OUT/ directory')
button = ttk.Button(frame, text = 'Save', command = lambda: save_func())
button.pack(side = 'left', padx = 15, ipadx = 10, ipady = 5)
def on_key_press(event):
key_press_handler(event, canvas, toolbar)
if event.key == 'c':
fig.savefig(f'OUT/{name.get()}.pdf')
export_file(fname, wavelength, intensity)
if isfile(f'OUT/{name.get()}.pdf'): ShowInfo('SAVED', f'File: {name.get()}.pdf saved in OUT/ directory')
canvas.mpl_connect("key_press_event", on_key_press)
root.mainloop()
if foravgshow:
saCal = SpectrumAnalyserCalibrator(fname)
wavelength = saCal.sa_cm(data[0])
baseCal = BaselineCalibrator(fname)
powCal = PowerCalibrator(fname)
if(PD):
intensity = -np.log(data[1]/baseCal.val(data[0])) / powCal.power(data[0]) / powCal.shots(data[0]) *1000
else:
intensity = (data[1]-baseCal.val(data[0])) / powCal.power(data[0]) / powCal.shots(data[0])
return wavelength, intensity
def felix_binning(xs, ys, delta=1):
"""
Binns the data provided in xs and ys to bins of width delta
output: binns, intensity
"""
#bins = np.arange(start, end, delta)
#occurance = np.zeros(start, end, delta)
BIN_STEP = delta
BIN_START = xs.min()
BIN_STOP = xs.max()
indices = xs.argsort()
datax = xs[indices]
datay = ys[indices]
print("In total we have: ", len(datax), ' data points.')
#do the binning of the data
bins = np.arange(BIN_START, BIN_STOP, BIN_STEP)
print("Binning starts: ", BIN_START, ' with step: ', BIN_STEP, ' ENDS: ', BIN_STOP)
bin_i = np.digitize(datax, bins)
bin_a = np.zeros(len(bins)+1)
bin_occ = np.zeros(len(bins)+1)
for i in range(datay.size):
bin_a[bin_i[i]] += datay[i]
bin_occ[bin_i[i]] += 1
binsx, data_binned = [], []
for i in range(bin_occ.size-1):
if bin_occ[i] > 0:
binsx.append(bins[i]-BIN_STEP/2)
data_binned.append(bin_a[i]/bin_occ[i])
#non_zero_i = bin_occ > 0
#binsx = bins[non_zero_i] - BIN_STEP/2
#data_binned = bin_a[non_zero_i]/bin_occ[non_zero_i]
return binsx, data_binned
def main(s=True, plotShow=False):
my_path = os.getcwd()
raw_filename = str(input("Enter the file name (without .felix): "))
filename = raw_filename + ".felix"
powerfile = raw_filename + ".pow"
fname = filename
if isfile(powerfile):
shutil.copyfile(my_path + "/{}".format(powerfile), my_path + "/DATA/{}".format(powerfile))
print("Powerfile copied to the DATA folder.")
else:
print("\nCAUTION:You don't have the powerfile(.pow)\n")
a,b = norm_line_felix(fname)
print(a, b)
print("\nProcess Completed.\n")
def normline_correction(*args):
fname, location, mname, temp, bwidth, ie, foravgshow, dpi, parent = args
try:
folders = ["DATA", "EXPORT", "OUT"]
back_dir = dirname(location)
if set(folders).issubset(os.listdir(back_dir)):
os.chdir(back_dir)
my_path = os.getcwd()
else:
os.chdir(location)
my_path = os.getcwd()
if(fname.find('felix')>=0):
fname = fname.split('.')[0]
fullname = fname + ".felix"
basefile = fname + ".base"
powerfile = fname + ".pow"
files = [fullname, powerfile, basefile]
for dirs, filenames in zip(folders, files):
if not isdir(dirs): os.mkdir(dirs)
if isfile(filenames): move(my_path, filenames)
if filecheck(my_path, basefile, powerfile, fullname):
print(f'\nFilename-->{fullname}\nLocation-->{my_path}')
norm_line_felix(fname, mname, temp, bwidth, ie, foravgshow, dpi, parent)
print("DONE")
except Exception as e:
ErrorInfo("ERROR:", e)
def show_baseline(fname, location, mname, temp, bwidth, ie, trap, dpi):
try:
folders = ["DATA", "EXPORT", "OUT"]
back_dir = dirname(location)
if set(folders).issubset(os.listdir(back_dir)):
os.chdir(back_dir)
else:
os.chdir(location)
if(fname.find('felix')>=0):
fname = fname.split('.')[0]
data = felix_read_file(fname)
baseCal = BaselineCalibrator(fname)
base1 = plt.figure(dpi = dpi)
base = base1.add_subplot(1,1,1)
baseCal.plot(base)
base.plot(data[0], data[1], ls='', marker='o', ms=3, markeredgecolor='r', c='r')
base.set_xlabel("Wavenumber (cm-1)")
base.set_ylabel("Counts")
base.set_title(f'{fname}: {mname} at {temp}K and IE:{ie}eV')
base.grid(True)
base.legend(title = f'Trap:{trap}ms; B0:{round(bwidth)}ms')
plt.savefig('OUT/'+fname+'_baseline.png')
plt.show()
plt.close()
except Exception as e:
ErrorInfo("Error: ", e)
| [
"[email protected]"
] | |
fabb95158bf9293648bb55e33f5ef64f8969617f | ea767918d1391d950714d3fafabf65330bade863 | /odin/ml/decompositions.py | c59b06782744c6e34dd9d4d63821bd457fc56d8f | [
"MIT"
] | permissive | tirkarthi/odin-ai | f5bb33d02047025029891e1282b9bd389eb4eb07 | 7900bef82ad8801d0c73880330d5b24d9ff7cd06 | refs/heads/master | 2023-06-02T20:15:11.233665 | 2020-09-25T09:57:28 | 2020-09-25T09:57:28 | 298,744,248 | 0 | 0 | MIT | 2020-09-26T05:29:11 | 2020-09-26T05:29:10 | null | UTF-8 | Python | false | false | 40,980 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import math
from multiprocessing import Array, Value
from numbers import Number
import numpy as np
from scipy import linalg
from six import string_types
from sklearn.decomposition import PCA, IncrementalPCA
from sklearn.utils import (as_float_array, check_array, check_random_state,
gen_batches)
from sklearn.utils.extmath import (_incremental_mean_and_var, randomized_svd,
svd_flip)
from sklearn.utils.validation import check_is_fitted
from odin.ml.base import BaseEstimator, TransformerMixin
from odin.utils import Progbar, batching, ctext, flatten_list
from odin.utils.mpi import MPI
__all__ = [
"fast_pca",
"MiniBatchPCA",
"PPCA",
"SupervisedPPCA",
]
def fast_pca(*x,
n_components=None,
algo='pca',
y=None,
batch_size=1024,
return_model=False,
random_state=1234):
r""" A shortcut for many different PCA algorithms
Arguments:
x : {list, tuple}
list of matrices for transformation, the first matrix will
be used for training
n_components : {None, int}
number of PCA components
algo : {'pca', 'ipca', 'ppca', 'sppca', 'plda', 'rpca'}
different PCA algorithm:
'ipca' - IncrementalPCA,
'ppca' - Probabilistic PCA,
'sppca' - Supervised Probabilistic PCA,
'plda' - Probabilistic LDA,
'rpca' - randomized PCA using randomized SVD
'pca' - Normal PCA
y : {numpy.ndarray, None}
required for labels in case of `sppca`
batch_size : int (default: 1024)
batch size, only used for IncrementalPCA
return_model : bool (default: False)
if True, return the trained PCA model as the FIRST return
"""
try:
from cuml.decomposition import PCA as cuPCA
except ImportError:
cuPCA = None
batch_size = int(batch_size)
algo = str(algo).lower()
if algo not in ('pca', 'ipca', 'ppca', 'sppca', 'plda', 'rpca'):
raise ValueError("`algo` must be one of the following: 'pca', "
"'ppca', 'plda', 'sppca', or 'rpca'; but given: '%s'" %
algo)
if algo in ('sppca', 'plda') and y is None:
raise RuntimeError("`y` must be not None if `algo='sppca'`")
x = flatten_list(x, level=None)
# ====== check input ====== #
x_train = x[0]
x_test = x[1:]
input_shape = None
if x_train.ndim > 2: # only 2D for PCA
input_shape = (-1,) + x_train.shape[1:]
new_shape = (-1, np.prod(input_shape[1:]))
x_train = np.reshape(x_train, new_shape)
x_test = [np.reshape(x, new_shape) for x in x_test]
if n_components is not None: # no need to reshape back
input_shape = None
# ====== train PCA ====== #
if algo == 'sppca':
pca = SupervisedPPCA(n_components=n_components, random_state=random_state)
pca.fit(x_train, y)
elif algo == 'plda':
from odin.ml import PLDA
pca = PLDA(n_phi=n_components, random_state=random_state)
pca.fit(x_train, y)
elif algo == 'pca':
if x_train.shape[1] > 1000 and x_train.shape[0] > 1e5 and cuPCA is not None:
pca = cuPCA(n_components=n_components, random_state=random_state)
else:
pca = PCA(n_components=n_components, random_state=random_state)
pca.fit(x_train)
elif algo == 'rpca':
# we copy the implementation of RandomizedPCA because
# it is significantly faster than PCA(svd_solver='randomize')
pca = RandomizedPCA(n_components=n_components,
iterated_power=2,
random_state=random_state)
pca.fit(x_train)
elif algo == 'ipca':
pca = IncrementalPCA(n_components=n_components, batch_size=batch_size)
prog = Progbar(target=x_train.shape[0],
print_report=False,
print_summary=False,
name="Fitting PCA")
for start, end in batching(batch_size=batch_size,
n=x_train.shape[0],
seed=1234):
pca.partial_fit(x_train[start:end], check_input=False)
prog.add(end - start)
elif algo == 'ppca':
pca = PPCA(n_components=n_components, random_state=random_state)
pca.fit(x_train)
# ====== transform ====== #
x_train = pca.transform(x_train)
x_test = [pca.transform(x) for x in x_test]
# reshape back to original shape if necessary
if input_shape is not None:
x_train = np.reshape(x_train, input_shape)
x_test = [np.reshape(x, input_shape) for x in x_test]
# return the results
if len(x_test) == 0:
return x_train if not return_model else (pca, x_train)
return tuple([x_train] +
x_test) if not return_model else tuple([pca, x_train] + x_test)
# ===========================================================================
# PPCA
# ===========================================================================
class PPCA(BaseEstimator, TransformerMixin):
""" Probabilistic Principal Components Analysis
(C) Copyright University of Eastern Finland (UEF).
Ville Vestman, [email protected],
Tomi Kinnunen, [email protected].
Parameters
----------
n_components : {int, None}
if None, keep the same dimensions as input features
bias : {vector, 'auto'} [feat_dim,]
if 'auto' take mean of training data
n_iter : {integer, 'auto'}
if 'auto', keep iterating until no more improvement (i.e. reduction in `sigma` value)
compared to the `improve_threshold`
improve_threshold : scalar
Only used in case `n_iter='auto'`
solver : {'traditional', 'simple'}
verbose: {0, 1}
showing logging information during fitting
random_state : {None, integer, numpy.random.RandomState}
Attributes
----------
V_ : [feat_dim, n_components]
total variability matrix
bias_ : [feat_dim]
bias vector
sigma_ : scalar
variance of error term
References
----------
[1] Ville Vestman and Tomi Kinnunen, "Supervector Compression
Strategies to Speed up i-vector System Development",
submitted to Speaker Odyssey 2018.
"""
def __init__(self,
n_components=None,
bias='auto',
n_iter='auto',
improve_threshold=1e-3,
solver='traditional',
verbose=0,
random_state=None):
super(PPCA, self).__init__()
if isinstance(n_components, Number):
assert n_components > 0, \
"`n_components` must be greater than 0, but given: %d" % n_components
n_components = int(n_components)
elif n_components is not None:
raise ValueError("`n_components` can be None or integer")
self.n_components_ = n_components
# ====== checking bias ====== #
if isinstance(bias, string_types):
bias = bias.strip().lower()
assert bias == 'auto', 'Invalid value for `bias`: %s' % bias
elif not isinstance(bias, (np.ndarray, Number)):
raise ValueError("`bias` can be 'auto', numpy.ndarray or a number")
self.bias_ = bias
# ====== checking solver ====== #
if solver not in ('traditional', 'simple'):
raise ValueError("`solver` must be: 'traditional', or 'simple'")
self.solver_ = solver
# ====== checking n_iter ====== #
if isinstance(n_iter, string_types):
n_iter = n_iter.lower()
assert n_iter == 'auto', 'Invalid `n_iter` value: %s' % n_iter
elif isinstance(n_iter, Number):
assert n_iter > 0, "`n_iter` must greater than 0, but given: %d" % n_iter
self.n_iter_ = n_iter
# ====== checking random_state ====== #
if random_state is None:
rand = np.random.RandomState(seed=None)
elif isinstance(random_state, Number):
rand = np.random.RandomState(seed=None)
elif isinstance(random_state, np.random.RandomState):
rand = random_state
else:
raise ValueError("No suppport for `random_state` value: %s" %
str(random_state))
self.random_state_ = rand
# ====== other dimension ====== #
self.improve_threshold_ = float(improve_threshold)
self.feat_dim_ = None
self.verbose_ = int(verbose)
def fit(self, X, y=None):
# ====== initialize ====== #
num_samples, feat_dim = X.shape
n_components = feat_dim if self.n_components_ is None else self.n_components_
if self.bias_ == 'auto':
bias = np.mean(X, 0)
elif isinstance(self.bias_, Number):
bias = np.full(shape=(feat_dim,), fill_value=self.bias_)
else:
bias = self.bias_
assert bias.shape == (feat_dim,), \
"Invialid `bias` given shape: %s, require shape: %s" % (str(bias.shape), str((feat_dim,)))
# ====== initialize parameters ====== #
V = self.random_state_.rand(feat_dim, n_components)
last_sigma = None
sigma = 1
centeredM = X - bias[np.newaxis, :]
varianceM = np.sum(centeredM**2) / (num_samples * feat_dim)
# ====== training ====== #
if self.verbose_:
print(
'[PPCA]n_components: %d n_sample: %d feat_dim: %d n_iter: %d threshold: %f solver: %s'
% (n_components, num_samples, feat_dim, -1 if self.n_iter_ == 'auto'
else self.n_iter_, self.improve_threshold_, self.solver_))
curr_n_iter = 0
while True:
B = (V * 1 / sigma).T # [feat_dim, n_components]
Sigma = np.linalg.inv(np.eye(n_components) +
np.dot(B, V)) # [n_components, n_components]
my = np.dot(np.dot(Sigma, B), centeredM.T) # [n_components, num_samples]
if self.solver_ == 'traditional':
sumEmm = num_samples * Sigma + np.dot(my, my.T)
elif self.solver_ == 'simple':
sumEmm = np.dot(my, my.T)
sumEmmInv = np.linalg.inv(sumEmm) # [n_components, n_components]
# updating V and sigma for next iteration
V = np.dot(np.dot(centeredM.T, my.T),
sumEmmInv) # [feat_dim, n_components]
last_sigma = sigma
sigma = varianceM - np.sum(
sumEmm * np.dot(V.T, V)) / (feat_dim * num_samples)
improvement = last_sigma - sigma
# log
if self.verbose_ > 0:
print("Iteration: %d sigma: %.3f improvement: %.3f" %
(curr_n_iter, sigma, improvement))
# check iteration escape
curr_n_iter += 1
if isinstance(self.n_iter_, Number):
if curr_n_iter >= self.n_iter_:
break
elif curr_n_iter > 1 and improvement < self.improve_threshold_:
break
# ====== save the model ====== #
# record new dimensions
self.feat_dim_ = feat_dim
self.n_components_ = n_components
# trained vectors and matrices
self.V_ = V
self.bias_ = bias
self.sigma_ = sigma
# pre-calculate matrix for transform
B = (V * 1 / sigma).T
Sigma = np.linalg.inv(np.eye(n_components) + np.dot(B, V))
self.extractorMatrix_ = np.dot(Sigma, B) # [n_components, feat_dim]
def transform(self, X):
"""
Parameters
----------
X : matrix [num_samples, feat_dim]
"""
assert hasattr(self, 'extractorMatrix_'), "The model hasn't `fit` on data"
assert X.shape[1] == self.feat_dim_, \
"Expect input matrix with shape: [?, %d], but give: %s" % (self.feat_dim_, str(X.shape))
ivec = np.dot(self.extractorMatrix_, (X - self.bias_[np.newaxis, :]).T)
return ivec.T
class SupervisedPPCA(PPCA):
""" Supervised Probabilistic Principal Components Analysis
(C) Copyright University of Eastern Finland (UEF).
Ville Vestman, [email protected],
Tomi Kinnunen, [email protected].
Parameters
----------
n_components : {int, None}
if None, keep the same dimensions as input features
bias : {vector, 'auto'} [feat_dim,]
if 'auto' take mean of training data
beta : scalar (default: 1)
a weight parameter (use beta = 1 as default)
n_iter : {integer, 'auto'}
if 'auto', keep iterating until no more improvement (i.e. reduction in `sigma` value)
compared to the `improve_threshold`
improve_threshold : scalar
Only used in case `n_iter='auto'`
solver : {'traditional', 'simple'}
extractor : {'supervised', 'unsupervised'}
'supervised' is the probabilistic partial least squares extractor using
both unsupervised and supervised information
verbose: {0, 1}
showing logging information during fitting
random_state : {None, integer, numpy.random.RandomState}
Attributes
----------
V_ : [feat_dim, n_components]
total variability matrix
Q_ : [feat_dim, n_components]
matrix for mapping speaker-dependent supervectors to i-vectors
sigma_ : scalar
variance of error term
rho_ : scalar
variance of error term in speaker-dependent supervector model
bias_ : [feat_dim,]
bias vector
classBias_ : [feat_dim,]
mean of speaker-dependent supervectors
"""
def __init__(self,
n_components=None,
bias='auto',
beta=1,
n_iter='auto',
improve_threshold=1e-3,
solver='traditional',
extractor='supervised',
verbose=0,
random_state=None):
super(SupervisedPPCA, self).__init__(n_components=n_components,
bias=bias,
n_iter=n_iter,
solver=solver,
improve_threshold=improve_threshold,
verbose=verbose,
random_state=random_state)
self.beta_ = float(beta)
# ====== check extractor ====== #
extractor = str(extractor).lower()
if extractor not in ('supervised', 'unsupervised'):
raise ValueError(
"`extractor` can only be: 'unsupervised' or 'supervised'")
self.extractor_ = extractor
def fit(self, X, y, z=None):
"""
Parameters
----------
X : matrix [num_samples, feat_dim]
y : vector (int) [num_samples,]
z : matrix [num_classes, feat_dim]
class-dependent feature vectors for each class from 0 to `num_classes - 1`
(in this order).
"""
# ====== initialize ====== #
num_samples, feat_dim = X.shape
num_classes = z.shape[0] if z is not None else len(np.unique(y))
n_components = feat_dim if self.n_components_ is None else self.n_components_
if self.bias_ == 'auto':
bias = np.mean(X, 0)
elif isinstance(self.bias_, Number):
bias = np.full(shape=(feat_dim,), fill_value=self.bias_)
else:
bias = self.bias_
assert bias.shape == (feat_dim,), \
"Invialid `bias` given shape: %s, require shape: %s" % (str(bias.shape), str((feat_dim,)))
# checking `y`
y = y.ravel().astype('int32')
assert y.shape[0] == num_samples, \
"Number of samples incosistent in `X`(%s) and `y`(%s)" % (str(X.shape), str(y.shape))
# checking `z`
if z is None:
z = np.empty(shape=(max(np.max(y) + 1, num_classes), feat_dim),
dtype=X.dtype)
for i in np.unique(y):
z[i, :] = np.mean(X[y == i], axis=0, keepdims=True)
else:
assert z.shape[0] == num_classes
assert z.shape[1] == feat_dim
# ====== initialize parameters ====== #
V = self.random_state_.rand(feat_dim, n_components)
Q = self.random_state_.rand(feat_dim, n_components)
last_sigma = None
sigma = 1
last_rho = None
rho = 1
centeredM = X - bias[np.newaxis, :]
varianceM = np.sum(centeredM**2) / (num_samples * feat_dim)
centeredY = z[y]
classBias = np.mean(centeredY, 0)
centeredY = centeredY - classBias[np.newaxis, :]
varianceY = np.sum(centeredY**2) / (num_samples * feat_dim)
# ====== training ====== #
if self.verbose_:
print(
'[S-PPCA]n_components: %d n_sample: %d feat_dim: %d n_iter: %d threshold: %f solver: %s'
% (n_components, num_samples, feat_dim, -1 if self.n_iter_ == 'auto'
else self.n_iter_, self.improve_threshold_, self.solver_))
curr_n_iter = 0
while True:
B = (V * 1 / sigma).T # [feat_dim, n_components]
C = (Q * self.beta_ * 1 / rho).T # [feat_dim, n_components]
Sigma = np.linalg.inv(np.eye(n_components) + np.dot(B, V) +
np.dot(C, Q)) # [n_components, n_components]
# [n_components, num_samples]
my = np.dot(Sigma, np.dot(B, centeredM.T) + np.dot(C, centeredY.T))
if self.solver_ == 'traditional':
sumEmm = num_samples * Sigma + np.dot(my, my.T)
elif self.solver_ == 'simple':
sumEmm = np.dot(my, my.T)
sumEmmInv = np.linalg.inv(sumEmm) # [n_components, n_components]
# updating V and sigma for next iteration
V = np.dot(np.dot(centeredM.T, my.T),
sumEmmInv) # [feat_dim, n_components]
Q = np.dot(np.dot(centeredY.T, my.T),
sumEmmInv) # [feat_dim, n_components]
last_sigma = sigma
sigma = varianceM - np.sum(
sumEmm * np.dot(V.T, V)) / (feat_dim * num_samples)
improvement_sigma = last_sigma - sigma
last_rho = rho
rho = varianceY - np.sum(
sumEmm * np.dot(Q.T, Q)) / (feat_dim * num_samples)
improvement_rho = last_rho - rho
# log
if self.verbose_ > 0:
print(
"Iteration: %d sigma: %.3f rho: %.3f improvement: %.3f:%.3f"
% (curr_n_iter, sigma, rho, improvement_sigma, improvement_rho))
# check iteration escape
curr_n_iter += 1
if isinstance(self.n_iter_, Number):
if curr_n_iter >= self.n_iter_:
break
elif curr_n_iter > 1 and \
improvement_sigma < self.improve_threshold_ and \
improvement_rho < self.improve_threshold_:
break
# ====== save the model ====== #
# record new dimensions
self.feat_dim_ = feat_dim
self.n_components_ = n_components
self.num_classes_ = num_classes
# trained vectors and matrices
self.V_ = V
self.Q_ = Q
self.bias_ = bias
self.classBias_ = classBias
self.sigma_ = sigma
self.rho_ = rho
# pre-calculate matrix for PPCA transform
B = (V * 1 / sigma).T
Sigma = np.linalg.inv(np.eye(n_components) + np.dot(B, V))
self.extractorMatrix_ = np.dot(Sigma, B) # [n_components, feat_dim]
# pre-calculate matrix for PPLS transform
A = np.concatenate([V, Q], axis=0) # [2 * feat_dim, n_components]
B = np.concatenate([(V * 1 / sigma).T, (Q * 1 / rho).T],
axis=-1) # [n_components, 2 * feat_dim]
sigmaW = np.linalg.inv(np.eye(n_components) +
np.dot(B, A)) # [n_components, n_components]
self.extractorMatrixPPLS_ = np.dot(sigmaW,
B) # [n_components, 2 * feat_dim]
C = np.dot(V.T,
V) + sigma * np.eye(n_components) # [n_components, n_components]
self.labelMatrix_ = np.dot(Q, np.linalg.solve(C,
V.T)) # [feat_dim, feat_dim]
def transform(self, X):
if self.extractor_ == 'unsupervised':
return super(SupervisedPPCA, self).transform(X)
else:
centeredM = X - self.bias_[np.newaxis, :]
labels = np.dot(self.labelMatrix_,
centeredM.T) + self.classBias_[:, np.newaxis]
ivec = np.dot(
self.extractorMatrixPPLS_,
np.concatenate([X.T, labels], axis=0) -
np.concatenate([self.bias_, self.classBias_])[:, np.newaxis])
return ivec.T
# ===========================================================================
# PCA
# ===========================================================================
class RandomizedPCA(BaseEstimator, TransformerMixin):
"""Principal component analysis (PCA) using randomized SVD
Linear dimensionality reduction using approximated Singular Value
Decomposition of the data and keeping only the most significant
singular vectors to project the data to a lower dimensional space.
Parameters
----------
n_components : int, optional
Maximum number of components to keep. When not given or None, this
is set to n_features (the second dimension of the training data).
copy : bool
If False, data passed to fit are overwritten and running
fit(X).transform(X) will not yield the expected results,
use fit_transform(X) instead.
iterated_power : int, default=2
Number of iterations for the power method.
whiten : bool, optional
When True (False by default) the `components_` vectors are multiplied
by the square root of (n_samples) and divided by the singular values to
ensure uncorrelated outputs with unit component-wise variances.
Whitening will remove some information from the transformed signal
(the relative variance scales of the components) but can sometime
improve the predictive accuracy of the downstream estimators by
making their data respect some hard-wired assumptions.
random_state : int, RandomState instance or None, optional, default=None
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Attributes
----------
components_ : array, shape (n_components, n_features)
Components with maximum variance.
explained_variance_ratio_ : array, shape (n_components,)
Percentage of variance explained by each of the selected components.
If k is not set then all components are stored and the sum of explained
variances is equal to 1.0.
singular_values_ : array, shape (n_components,)
The singular values corresponding to each of the selected components.
The singular values are equal to the 2-norms of the ``n_components``
variables in the lower-dimensional space.
mean_ : array, shape (n_features,)
Per-feature empirical mean, estimated from the training set.
Examples
--------
>>> import numpy as np
>>> from sklearn.decomposition import RandomizedPCA
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> pca = RandomizedPCA(n_components=2)
>>> pca.fit(X) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
RandomizedPCA(copy=True, iterated_power=2, n_components=2,
random_state=None, whiten=False)
>>> print(pca.explained_variance_ratio_) # doctest: +ELLIPSIS
[ 0.99244... 0.00755...]
>>> print(pca.singular_values_) # doctest: +ELLIPSIS
[ 6.30061... 0.54980...]
References
----------
.. [Halko2009] `Finding structure with randomness: Stochastic algorithms
for constructing approximate matrix decompositions Halko, et al., 2009
(arXiv:909)`
.. [MRT] `A randomized algorithm for the decomposition of matrices
Per-Gunnar Martinsson, Vladimir Rokhlin and Mark Tygert`
"""
def __init__(self,
n_components=None,
copy=True,
iterated_power=2,
whiten=False,
random_state=None):
self.n_components = n_components
self.copy = copy
self.iterated_power = iterated_power
self.whiten = whiten
self.random_state = random_state
def fit(self, X, y=None):
"""Fit the model with X by extracting the first principal components.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data, where n_samples in the number of samples
and n_features is the number of features.
y : Ignored.
Returns
-------
self : object
Returns the instance itself.
"""
self._fit(check_array(X))
return self
def _fit(self, X):
"""Fit the model to the data X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training vector, where n_samples in the number of samples and
n_features is the number of features.
Returns
-------
X : ndarray, shape (n_samples, n_features)
The input data, copied, centered and whitened when requested.
"""
random_state = check_random_state(self.random_state)
X = np.atleast_2d(as_float_array(X, copy=self.copy))
n_samples = X.shape[0]
# Center data
self.mean_ = np.mean(X, axis=0)
X -= self.mean_
if self.n_components is None:
n_components = X.shape[1]
else:
n_components = self.n_components
U, S, V = randomized_svd(X,
n_components,
n_iter=self.iterated_power,
random_state=random_state)
self.explained_variance_ = exp_var = (S**2) / (n_samples - 1)
full_var = np.var(X, ddof=1, axis=0).sum()
self.explained_variance_ratio_ = exp_var / full_var
self.singular_values_ = S # Store the singular values.
if self.whiten:
self.components_ = V / S[:, np.newaxis] * math.sqrt(n_samples)
else:
self.components_ = V
return X
def transform(self, X):
"""Apply dimensionality reduction on X.
X is projected on the first principal components previous extracted
from a training set.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples in the number of samples
and n_features is the number of features.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
check_is_fitted(self, 'mean_')
X = check_array(X)
if self.mean_ is not None:
X = X - self.mean_
X = np.dot(X, self.components_.T)
return X
def fit_transform(self, X, y=None):
"""Fit the model with X and apply the dimensionality reduction on X.
Parameters
----------
X : array-like, shape (n_samples, n_features)
New data, where n_samples in the number of samples
and n_features is the number of features.
y : Ignored.
Returns
-------
X_new : array-like, shape (n_samples, n_components)
"""
X = check_array(X)
X = self._fit(X)
return np.dot(X, self.components_.T)
def inverse_transform(self, X):
"""Transform data back to its original space.
Returns an array X_original whose transform would be X.
Parameters
----------
X : array-like, shape (n_samples, n_components)
New data, where n_samples in the number of samples
and n_components is the number of components.
Returns
-------
X_original array-like, shape (n_samples, n_features)
Notes
-----
If whitening is enabled, inverse_transform does not compute the
exact inverse operation of transform.
"""
check_is_fitted(self, 'mean_')
X_original = np.dot(X, self.components_)
if self.mean_ is not None:
X_original = X_original + self.mean_
return X_original
class MiniBatchPCA(IncrementalPCA):
""" A modified version of IncrementalPCA to effectively
support multi-processing (but not work)
Original Author: Kyle Kastner <[email protected]>
Giorgio Patrini
License: BSD 3 clause
Incremental principal components analysis (IPCA).
Linear dimensionality reduction using Singular Value Decomposition of
centered data, keeping only the most significant singular vectors to
project the data to a lower dimensional space.
Depending on the size of the input data, this algorithm can be much more
memory efficient than a PCA.
This algorithm has constant memory complexity, on the order
of ``batch_size``, enabling use of np.memmap files without loading the
entire file into memory.
The computational overhead of each SVD is
``O(batch_size * n_features ** 2)``, but only 2 * batch_size samples
remain in memory at a time. There will be ``n_samples / batch_size`` SVD
computations to get the principal components, versus 1 large SVD of
complexity ``O(n_samples * n_features ** 2)`` for PCA.
Read more in the :ref:`User Guide <IncrementalPCA>`.
Parameters
----------
n_components : int or None, (default=None)
Number of components to keep. If ``n_components `` is ``None``,
then ``n_components`` is set to ``min(n_samples, n_features)``.
batch_size : int or None, (default=None)
The number of samples to use for each batch. Only used when calling
``fit``. If ``batch_size`` is ``None``, then ``batch_size``
is inferred from the data and set to ``5 * n_features``, to provide a
balance between approximation accuracy and memory consumption.
copy : bool, (default=True)
If False, X will be overwritten. ``copy=False`` can be used to
save memory but is unsafe for general use.
whiten : bool, optional
When True (False by default) the ``components_`` vectors are divided
by ``n_samples`` times ``components_`` to ensure uncorrelated outputs
with unit component-wise variances.
Whitening will remove some information from the transformed signal
(the relative variance scales of the components) but can sometimes
improve the predictive accuracy of the downstream estimators by
making data respect some hard-wired assumptions.
Attributes
----------
components_ : array, shape (n_components, n_features)
Components with maximum variance.
explained_variance_ : array, shape (n_components,)
Variance explained by each of the selected components.
explained_variance_ratio_ : array, shape (n_components,)
Percentage of variance explained by each of the selected components.
If all components are stored, the sum of explained variances is equal
to 1.0
mean_ : array, shape (n_features,)
Per-feature empirical mean, aggregate over calls to ``partial_fit``.
var_ : array, shape (n_features,)
Per-feature empirical variance, aggregate over calls to
``partial_fit``.
noise_variance_ : float
The estimated noise covariance following the Probabilistic PCA model
from Tipping and Bishop 1999. See "Pattern Recognition and
Machine Learning" by C. Bishop, 12.2.1 p. 574 or
http://www.miketipping.com/papers/met-mppca.pdf.
n_components_ : int
The estimated number of components. Relevant when
``n_components=None``.
n_samples_seen_ : int
The number of samples processed by the estimator. Will be reset on
new calls to fit, but increments across ``partial_fit`` calls.
Notes
-----
Implements the incremental PCA model from:
`D. Ross, J. Lim, R. Lin, M. Yang, Incremental Learning for Robust Visual
Tracking, International Journal of Computer Vision, Volume 77, Issue 1-3,
pp. 125-141, May 2008.`
See http://www.cs.toronto.edu/~dross/ivt/RossLimLinYang_ijcv.pdf
This model is an extension of the Sequential Karhunen-Loeve Transform from:
`A. Levy and M. Lindenbaum, Sequential Karhunen-Loeve Basis Extraction and
its Application to Images, IEEE Transactions on Image Processing, Volume 9,
Number 8, pp. 1371-1374, August 2000.`
See http://www.cs.technion.ac.il/~mic/doc/skl-ip.pdf
We have specifically abstained from an optimization used by authors of both
papers, a QR decomposition used in specific situations to reduce the
algorithmic complexity of the SVD. The source for this technique is
`Matrix Computations, Third Edition, G. Holub and C. Van Loan, Chapter 5,
section 5.4.4, pp 252-253.`. This technique has been omitted because it is
advantageous only when decomposing a matrix with ``n_samples`` (rows)
>= 5/3 * ``n_features`` (columns), and hurts the readability of the
implemented algorithm. This would be a good opportunity for future
optimization, if it is deemed necessary.
For `multiprocessing`, you can do parallelized `partial_fit` or `transform`
but you cannot do `partial_fit` in one process and `transform` in the others.
Application
-----------
In detail, in order for PCA to work well, informally we require that
(i) The features have approximately zero mean, and
(ii) The different features have similar variances to each other.
With natural images, (ii) is already satisfied even without variance
normalization, and so we won’t perform any variance normalization.
(If you are training on audio data—say, on spectrograms—or on text data—say,
bag-of-word vectors—we will usually not perform variance normalization
either.)
By using PCA, we aim for:
(i) the features are less correlated with each other, and
(ii) the features all have the same variance.
Original link: http://ufldl.stanford.edu/tutorial/unsupervised/PCAWhitening/
References
----------
D. Ross, J. Lim, R. Lin, M. Yang. Incremental Learning for Robust Visual
Tracking, International Journal of Computer Vision, Volume 77,
Issue 1-3, pp. 125-141, May 2008.
G. Golub and C. Van Loan. Matrix Computations, Third Edition, Chapter 5,
Section 5.4.4, pp. 252-253.
See also
--------
PCA
RandomizedPCA
KernelPCA
SparsePCA
TruncatedSVD
"""
def __init__(self,
n_components=None,
whiten=False,
copy=True,
batch_size=None):
super(MiniBatchPCA, self).__init__(n_components=n_components,
whiten=whiten,
copy=copy,
batch_size=batch_size)
# some statistics
self.n_samples_seen_ = 0
self.mean_ = .0
self.var_ = .0
self.components_ = None
# if nb_samples < nb_components, then the mini batch is cached until
# we have enough samples
self._cache_batches = []
self._nb_cached_samples = 0
@property
def is_fitted(self):
return self.components_ is not None
# ==================== Training ==================== #
def fit(self, X, y=None):
"""Fit the model with X, using minibatches of size batch_size.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training data, where n_samples is the number of samples and
n_features is the number of features.
y: Passthrough for ``Pipeline`` compatibility.
Returns
-------
self: object
Returns the instance itself.
"""
X = check_array(X, copy=self.copy, dtype=[np.float64, np.float32])
n_samples, n_features = X.shape
if self.batch_size is None:
batch_size = 12 * n_features
else:
batch_size = self.batch_size
for batch in gen_batches(n_samples, batch_size):
x = X[batch]
self.partial_fit(x, check_input=False)
return self
def partial_fit(self, X, y=None, check_input=True):
"""Incremental fit with X. All of X is processed as a single batch.
Parameters
----------
X: array-like, shape (n_samples, n_features)
Training data, where n_samples is the number of samples and
n_features is the number of features.
Returns
-------
self: object
Returns the instance itself.
"""
# ====== check the samples and cahces ====== #
if check_input:
X = check_array(X, copy=self.copy, dtype=[np.float64, np.float32])
n_samples, n_features = X.shape
# check number of components
if self.n_components is None:
self.n_components_ = n_features
elif not 1 <= self.n_components <= n_features:
raise ValueError("n_components=%r invalid for n_features=%d, need "
"more rows than columns for IncrementalPCA "
"processing" % (self.n_components, n_features))
else:
self.n_components_ = self.n_components
# check the cache
if n_samples < n_features or self._nb_cached_samples > 0:
self._cache_batches.append(X)
self._nb_cached_samples += n_samples
# not enough samples yet
if self._nb_cached_samples < n_features:
return
else: # group mini batch into big batch
X = np.concatenate(self._cache_batches, axis=0)
self._cache_batches = []
self._nb_cached_samples = 0
n_samples = X.shape[0]
# ====== fit the model ====== #
if (self.components_ is not None) and (self.components_.shape[0] !=
self.n_components_):
raise ValueError("Number of input features has changed from %i "
"to %i between calls to partial_fit! Try "
"setting n_components to a fixed value." %
(self.components_.shape[0], self.n_components_))
# Update stats - they are 0 if this is the fisrt step
col_mean, col_var, n_total_samples = \
_incremental_mean_and_var(X, last_mean=self.mean_,
last_variance=self.var_,
last_sample_count=self.n_samples_seen_)
total_var = np.sum(col_var * n_total_samples)
if total_var == 0: # if variance == 0, make no sense to continue
return self
# Whitening
if self.n_samples_seen_ == 0:
# If it is the first step, simply whiten X
X -= col_mean
else:
col_batch_mean = np.mean(X, axis=0)
X -= col_batch_mean
# Build matrix of combined previous basis and new data
mean_correction = \
np.sqrt((self.n_samples_seen_ * n_samples) /
n_total_samples) * (self.mean_ - col_batch_mean)
X = np.vstack((self.singular_values_.reshape(
(-1, 1)) * self.components_, X, mean_correction))
U, S, V = linalg.svd(X, full_matrices=False)
U, V = svd_flip(U, V, u_based_decision=False)
explained_variance = S**2 / n_total_samples
explained_variance_ratio = S**2 / total_var
self.n_samples_seen_ = n_total_samples
self.components_ = V[:self.n_components_]
self.singular_values_ = S[:self.n_components_]
self.mean_ = col_mean
self.var_ = col_var
self.explained_variance_ = explained_variance[:self.n_components_]
self.explained_variance_ratio_ = \
explained_variance_ratio[:self.n_components_]
if self.n_components_ < n_features:
self.noise_variance_ = \
explained_variance[self.n_components_:].mean()
else:
self.noise_variance_ = 0.
return self
def transform(self, X, n_components=None):
# ====== check number of components ====== #
# specified percentage of explained variance
if n_components is not None:
# percentage of variances
if n_components < 1.:
_ = np.cumsum(self.explained_variance_ratio_)
n_components = (_ > n_components).nonzero()[0][0] + 1
# specific number of components
else:
n_components = int(n_components)
# ====== other info ====== #
n = X.shape[0]
if self.batch_size is None:
batch_size = 12 * len(self.mean_)
else:
batch_size = self.batch_size
# ====== start transforming ====== #
X_transformed = []
for start, end in batching(n=n, batch_size=batch_size):
x = super(MiniBatchPCA, self).transform(X=X[start:end])
if n_components is not None:
x = x[:, :n_components]
X_transformed.append(x)
return np.concatenate(X_transformed, axis=0)
def invert_transform(self, X):
return super(MiniBatchPCA, self).inverse_transform(X=X)
def transform_mpi(self, X, keep_order=True, ncpu=4, n_components=None):
""" Sample as transform but using multiprocessing """
n = X.shape[0]
if self.batch_size is None:
batch_size = 12 * len(self.mean_)
else:
batch_size = self.batch_size
batch_list = [(i, min(i + batch_size, n))
for i in range(0, n + batch_size, batch_size)
if i < n]
# ====== run MPI jobs ====== #
def map_func(batch):
start, end = batch
x = super(MiniBatchPCA, self).transform(X=X[start:end])
# doing dim reduction here save a lot of memory for
# inter-processors transfer
if n_components is not None:
x = x[:, :n_components]
# just need to return the start for ordering
yield start, x
mpi = MPI(batch_list,
func=map_func,
ncpu=ncpu,
batch=1,
hwm=ncpu * 12,
backend='python')
# ====== process the return ====== #
X_transformed = []
for start, x in mpi:
X_transformed.append((start, x))
if keep_order:
X_transformed = sorted(X_transformed, key=lambda x: x[0])
X_transformed = np.concatenate([x[-1] for x in X_transformed], axis=0)
return X_transformed
def __str__(self):
if self.is_fitted:
explained_vars = ';'.join([
ctext('%.2f' % i, 'cyan') for i in self.explained_variance_ratio_[:8]
])
else:
explained_vars = 0
s = '%s(batch_size=%s, #components=%s, #samples=%s, vars=%s)' % \
(ctext('MiniBatchPCA', 'yellow'),
ctext(self.batch_size, 'cyan'),
ctext(self.n_components, 'cyan'),
ctext(self.n_samples_seen_, 'cyan'),
explained_vars)
return s
| [
"[email protected]"
] | |
d03f122f98dbf6bba0498916c870e071bb955439 | c548c10c4fd0b6c1d1c10cc645cb3b90b31f2de6 | /ml/m29_pca2_3_wine.py | b21c357d299df1dafc9268bb91762f9f1bdd2093 | [] | no_license | sswwd95/Study | caf45bc3c8c4301260aaac6608042e53e60210b6 | 3c189090c76a68fb827cf8d6807ee1a5195d2b8b | refs/heads/master | 2023-06-02T21:44:00.518810 | 2021-06-26T03:01:26 | 2021-06-26T03:01:26 | 324,061,105 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,459 | py | import numpy as np
from sklearn.datasets import load_wine
from sklearn.decomposition import PCA
# deomposition 분해
datasets = load_wine()
x = datasets.data
y = datasets.target
print(x.shape, y.shape) #(178, 13) (178,)
'''
pca = PCA(n_components=10)
x2 = pca.fit_transform(x) # fit과 transform 합친 것
print(x2)
print(x2.shape) #(442, 7) 컬럼의 수가 재구성
pca_EVR = pca.explained_variance_ratio_ # 변화율
print(pca_EVR) #[0.40242142 0.14923182 0.12059623 0.09554764 0.06621856 0.06027192 0.05365605]
print(sum(pca_EVR))
# 7개 : 0.9479436357350414
# 8개 : 0.9913119559917797
# 9개 : 0.9991439470098977
# 10개 : 1.0
# 몇 개가 좋은지 어떻게 알까? 모델 돌려보면 알 수 있다. 통상적으로 95% 이면 모델에서 성능 비슷하게 나온다.
'''
pca = PCA()
pca.fit(x)
cumsum = np.cumsum(pca.explained_variance_ratio_)
# cunsum의 작은 것 부터 하나씩 더해준다. 함수는 주어진 축에서 배열 요소의 누적 합계를 계산하려는 경우에 사용된다.
print(cumsum)
# [0.99809123 0.99982715 0.99992211 0.99997232 0.99998469 0.99999315
# 0.99999596 0.99999748 0.99999861 0.99999933 0.99999971 0.99999992
# 1. ]
d = np.argmax(cumsum>=0.95)+1
print('cumsum >=0.95', cumsum >=0.95)
print('d : ', d)
# cumsum >=0.95 [ True True True True True True True True True True True True
# True]
# d : 1
import matplotlib.pyplot as plt
plt.plot(cumsum)
plt.grid()
plt.show()
| [
"[email protected]"
] | |
bc4ce015eb040a0bfe60106b3a22e8e043989877 | ff182eeaf59b16f79b7d306eef72ddaadf0f4e71 | /Vaffle_interface/testcase/SystemModule/System_test23_invite_get_score.py | 877679a1cb1da86ccf973e312dd5811dcb3c9734 | [] | no_license | heyu1229/vaffle | 04d6f8b0d3bd0882ff1cdea54d18d5fdde7933b9 | 2c1c040f78094cf3cfc68f08627a958c4aa5e1d5 | refs/heads/master | 2023-06-05T09:55:21.894344 | 2021-03-12T07:26:45 | 2021-03-12T07:26:45 | 381,248,658 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 874 | py | # -*- coding:UTF-8 -*-
import unittest
import requests
import time,gc,sys
from Vaffle_interface.public_1.func_requests import FuncRequests
from Vaffle_interface.public_1.get_url import Url
class Invite_get_score(unittest.TestCase):
def setUp(self):
self.member_uuid = Url ().test_user ()
self.requests = FuncRequests ()
#-----------------邀请得积分--------------------------------
def testcase_001(self):
sheet_index = 3
row = 34
print("testcase_001 反馈:")
date=time.strftime("%Y-%m-%d %H:%M:%S",time.localtime())
payload = {'member_uuid':self.member_uuid}
result=self.requests.interface_requests_payload(self.member_uuid, sheet_index, row, payload)
self.assertEqual(10000, result["code"])
print("code返回值:10000")
if __name__=="__main__":
unittest.main() | [
"[email protected]"
] | |
5f1f4ad717ccde42c1a45dcfb353c5a9f6f7a916 | 3f763cf893b09a3be562858613c928703ff349e4 | /client/verta/verta/_swagger/_public/modeldb/model/ModeldbCreateProjectResponse.py | b749fb1cab032e8cfae28f8b96a3aba11500069f | [
"Apache-2.0"
] | permissive | VertaAI/modeldb | 636e46fc025b01a514d599b10e228c8735503357 | ec9ac7712500adb13fd815dfd476ce9f536c6921 | refs/heads/main | 2023-08-31T00:45:37.220628 | 2023-08-30T18:45:13 | 2023-08-30T18:45:13 | 71,305,435 | 844 | 142 | Apache-2.0 | 2023-09-14T19:24:13 | 2016-10-19T01:07:26 | Java | UTF-8 | Python | false | false | 616 | py | # THIS FILE IS AUTO-GENERATED. DO NOT EDIT
from verta._swagger.base_type import BaseType
class ModeldbCreateProjectResponse(BaseType):
def __init__(self, project=None):
required = {
"project": False,
}
self.project = project
for k, v in required.items():
if self[k] is None and v:
raise ValueError('attribute {} is required'.format(k))
@staticmethod
def from_json(d):
from .ModeldbProject import ModeldbProject
tmp = d.get('project', None)
if tmp is not None:
d['project'] = ModeldbProject.from_json(tmp)
return ModeldbCreateProjectResponse(**d)
| [
"[email protected]"
] | |
f026f41d97ad800e361e469b6d9b2f9ce747b465 | 325bee18d3a8b5de183118d02c480e562f6acba8 | /pycan/pycan/spiders/listed_issuers_spider.py | 5cbf6d2e6c7d9efd5de970ad5a60ec512b0647b2 | [] | no_license | waynecanfly/spiderItem | fc07af6921493fcfc21437c464c6433d247abad3 | 1960efaad0d995e83e8cf85e58e1db029e49fa56 | refs/heads/master | 2022-11-14T16:35:42.855901 | 2019-10-25T03:43:57 | 2019-10-25T03:43:57 | 193,424,274 | 4 | 0 | null | 2022-11-04T19:16:15 | 2019-06-24T03:00:51 | Python | UTF-8 | Python | false | false | 7,315 | py | """从归档(MiG Archives)文件中提取公司列表"""
from io import BytesIO
from zipfile import BadZipFile
import scrapy
import pymysql
from scrapy import signals
from openpyxl import load_workbook
from dateutil.parser import parse as parse_datetime
from scrapy.spidermiddlewares.httperror import HttpError
from twisted.internet.error import DNSLookupError
from twisted.internet.error import TimeoutError, TCPTimedOutError
from twisted.internet.error import ConnectionRefusedError
from twisted.web._newclient import ResponseNeverReceived
from ..items import CompanyItem, ProfileDetailItem
class ListedIssuersSpider(scrapy.Spider):
name = 'listed_issuers'
start_urls = [
'https://www.tsx.com/listings/current-market-statistics/mig-archives'
]
captions = [
{
'Exchange': 'exchange_market_code',
'Name': 'name_en',
'Root Ticker': 'security_code',
'SP_Type': 'security_type',
'Sector': 'sector_code',
'Date of TSX Listing YYYYMMDD': 'ipo_date',
'Place of Incorporation C=Canada U=USA F=Foreign': (
'country_code_origin'
)
},
{
'Exchange': 'exchange_market_code',
'Name': 'name_en',
'Root Ticker': 'security_code',
'Sector': 'sector_code',
'Date of Listing': 'ipo_date'
}
]
countries = {
'C': 'CAN',
'U': 'USA',
'F': None
}
@classmethod
def from_crawler(cls, crawler, *args, **kwargs):
spider = super(ListedIssuersSpider, cls).from_crawler(
crawler, *args, **kwargs
)
crawler.signals.connect(spider.spider_opened, signals.spider_opened)
crawler.signals.connect(spider.spider_closed, signals.spider_closed)
return spider
def spider_opened(self, spider):
self.logger.info('Opening spider %s...', spider.name)
conn = pymysql.connect(**self.settings['DBARGS'])
with conn.cursor() as cursor:
cursor.execute("""\
select code, security_code, exchange_market_code, status from \
company where country_code_listed='CAN'\
""")
records = cursor.fetchall()
conn.close()
self.companies = {}
for it in records:
id_ = it['exchange_market_code'], it['security_code']
self.companies[id_] = it['code'], it['status']
if records:
NUMBER = slice(3, None) # 公司code数字编号区
self.max_code_num = int(max(it['code'] for it in records)[NUMBER])
else:
self.max_code_num = 10000
self.total_new = 0
def spider_closed(self, spider):
self.logger.info(
'Closing spider %s..., %d new', spider.name, self.total_new
)
def parse(self, response):
try:
doc_href = response.xpath(
"//a[text()='TSX/TSXV Listed Issuers']/..//a/@href"
).extract()[1]
yield response.follow(
doc_href,
callback=self.parse_listed_issuers,
errback=self.errback_scraping
)
except IndexError:
self.logger.error("Can't find listed issuers info")
def parse_listed_issuers(self, response):
try:
wb = load_workbook(BytesIO(response.body), read_only=True)
labels_row, start_row = 7, 8
for ws in wb.worksheets:
labels = [
cell.value.replace('\n', ' ') for cell in ws[labels_row]
if isinstance(cell.value, str)
]
names = [
it.replace(' ', '_').lower() + '_mig_can' for it in labels]
for each in self.captions:
if set(each.keys()).issubset(set(labels)):
indexes = {
labels.index(it): each[it] for it in each
}
for row in ws.iter_rows(min_row=start_row):
item = CompanyItem()
profiles = []
for index, cell in enumerate(row):
if cell.value:
try:
item[indexes[index]] = cell.value
except KeyError:
profiles.append(
ProfileDetailItem(
name=names[index],
display_label=labels[index],
value=cell.value,
data_type='string'
)
)
try:
item['country_code_origin'] = self.countries[
item['country_code_origin']
]
except KeyError:
pass
company = (
item['exchange_market_code'],
item['security_code']
)
if company not in self.companies:
self.max_code_num += 1
item['code'] = 'CAN' + str(self.max_code_num)
item['name_origin'] = item['name_en']
if 'ipo_date' in item:
item['ipo_date'] = parse_datetime(
str(item['ipo_date']))
self.companies[company] = (item['code'], None)
for p_item in profiles:
p_item['company_code'] = item['code']
yield p_item
yield item
break
else:
self.logger.error(
'Failed finding captions for listed issuers')
except BadZipFile:
self.logger.error(
'Listed issuers may redirect to %s', response.url)
def errback_scraping(self, failure):
req_url = failure.request.url
if failure.check(HttpError):
response = failure.value.response
self.logger.error('HttpError %s on %s', response.status, req_url)
elif failure.check(DNSLookupError):
self.logger.error('DNSLookupError on %s', req_url)
elif failure.check(ConnectionRefusedError):
self.logger.error('ConnectionRefusedError on %s', req_url)
elif failure.check(TimeoutError, TCPTimedOutError):
self.logger.error('TimeoutError on %s', req_url)
elif failure.check(ResponseNeverReceived):
self.logger.error('ResponseNeverReceived on %s', req_url)
else:
self.logger.error('UnpectedError on %s', req_url)
self.logger.error(repr(failure))
| [
"[email protected]"
] | |
42428250f3f843297cf0dea506a3f02218b3db63 | fb00808d44e18c7b27a8f86b553c586d4033504f | /sandbox/factory/factory_metrics.py | abb3cb2f34553b13d0e0d4696ef04c0988509187 | [] | no_license | akamlani/datascience | 4f1bab94a1af79b7f41339b5a1ba4acc965d4511 | 62f4d71f3642f89b4bbd55d7ef270321b983243e | refs/heads/master | 2021-01-17T10:11:11.069207 | 2016-12-29T04:33:49 | 2016-12-29T04:33:49 | 24,774,956 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 15,565 | py | from __future__ import division
import pandas as pd
import numpy as np
import argparse
import os
import re
import json
import sys
import warnings
from datetime import datetime
import matplotlib.pyplot as plt
import seaborn as sns
warnings.filterwarnings("ignore")
### Create Tabular Structure
def get_folder_attrs(path):
root_dir = os.listdir(path)
root_attr = {name: os.path.isdir(path + name) for name in root_dir}
root_dirs = map(lambda (k,v): k,filter(lambda (k,v): v==1, root_attr.iteritems()) )
root_files = map(lambda (k,v): k,filter(lambda (k,v): v==0, root_attr.iteritems()) )
n_rootdirs = len(root_dirs)
n_rootfiles = len(root_files)
return {
'root_dirs': root_dirs,
'num_rootdirs': n_rootdirs,
'root_files': root_files,
'num_rootfiles': n_rootfiles
}
def extract_subfolder_data(subfolder, rootpath):
root_data_dict = {}
for (dirpath, dirnames, filenames) in os.walk(rootpath+subfolder):
if len(filenames) > 1:
k = dirpath.split(rootpath)[1].strip('data/')
v = list( set([filename.split('.')[0] for filename in filenames]) )
root_data_dict[k] = v
return root_data_dict
def create_units(status_dict, root_path):
# create a series of records of units that were tested
df_units = pd.DataFrame()
for k,v in status_dict.iteritems():
for item in v:
unit_type, date_rec = [s.strip() for s in k.split("/")]
file_name = "_".join(item.split("_")[:-2])
ts_rec = "".join(item.split("_")[-2:])
is_dir = os.path.isdir(root_path + k + item)
if not is_dir:
ts_rec = datetime.strptime(ts_rec, '%Y%m%d%H%M%S')
filename = root_path + k +'/' + item + '.csv'
# create new format to tabulate structure
unit_name = file_name.split("_")[0].strip() if unit_type == 'FAIL' else file_name
unit_dict = {'file_name':file_name, 'unit_name': unit_name,
'unit_status': unit_type, 'date_record': ts_rec}
df_units = df_units.append(unit_dict, ignore_index=True)
df_units['date'] = df_units.date_record.dt.date
df_units['hour'] = df_units.date_record.dt.hour
return df_units
def create_dir_structure():
if not os.path.exists(data_path):
print("No Data Present")
sys.exit()
else:
if not os.path.exists(log_path): os.makedirs(log_path)
if not os.path.exists(image_path): os.makedirs(image_path)
if not os.path.exists(config_path): print("\nNo Config File: Using default config\n")
attrs = get_folder_attrs(fixture_path)
params = {k:v for k,v in attrs.iteritems() if k != 'root_files'}
filename = log_path + file_prefix + 'rootdir_attr.txt'
pd.Series(params, name='attributes').to_csv(filename, sep='\t')
print "Root Dir Attributes:"; print pd.Series(params, name='attributes')
return attrs
### Aggregation Calculations
def calc_agg_stats(df_units):
df_unit_counts = df_units.groupby('unit_name')['unit_status'].count()
df_mult_tests = df_unit_counts[df_unit_counts > 1].sort_values(ascending=False)
df_mult_failures = df_units[(df_units.unit_name.isin(df_mult_tests.index)) & (df_units.unit_status == 'FAIL')]
# aggregate statistics
n_units, n_tests = len(df_units.unit_name.unique()), len(df_units.unit_name)
n_units_mult_failures, n_mult_failures = (len(df_mult_tests), df_mult_tests.sum())
# executed tests that are passing and failing
n_pass_tests, n_fail_tests = df_units.unit_status.value_counts()
n_pass_tests_pct, n_fail_tests_pct = n_pass_tests/n_tests, n_fail_tests/n_tests
# there are some boards that show up both in pass and failure ('LB1537330100294')
# find the lastest timestamp and verify it must be a PASS to update true failure count
n_pass_units = len(df_units[df_units.unit_status=='PASS']['unit_name'].unique())
n_fail_units = len(df_units[df_units.unit_status=='FAIL']['unit_name'].unique())
pass_units = set(df_units[df_units.unit_status=='PASS']['unit_name'].unique())
fail_units = set(df_units[df_units.unit_status=='FAIL']['unit_name'].unique())
units_overlap = (pass_units & fail_units)
df_units_overlap = df_units[df_units.unit_name.isin(units_overlap)].sort_values(by='unit_name')
df_units_overlap = df_units_overlap.groupby('unit_name')[['date_record', 'unit_status']].max()
n_units_overlap = df_units_overlap[df_units_overlap.unit_status != 'PASS'].shape[0]
n_fail_units = n_fail_units - (len(units_overlap) - n_units_overlap)
n_pass_units_pct, n_fail_units_pct = n_pass_units/n_units, n_fail_units/n_units
# create a dict for processing
data_metrics = pd.Series({
'num_units': n_units, 'num_tests': n_tests,
'num_units_multiple_failures': n_units_mult_failures, 'num_tests_multiple_failures': n_mult_failures,
'num_pass_tests': n_pass_tests, 'num_fail_tests': n_fail_tests,
'num_pass_tests_pct': n_pass_tests_pct, 'num_fail_tests_pct': n_fail_tests_pct,
'num_pass_units': n_pass_units, 'num_fail_units': n_fail_units,
'num_pass_units_pct': n_pass_units_pct, 'num_fail_units_pct': n_fail_units_pct,
'num_units_overlapped_passfail': n_units_overlap
}).sort_values(ascending=False)
filename = log_path + file_prefix + 'status_metrics.txt'
write_log(filename, data_metrics, "\nUnit/Experimental Test Metrics:", log=True, format='pretty')
return data_metrics
def calc_agg_dated(df_units):
# date,hourly multi-index
df_agg_date_hourly = df_units.groupby(['date','hour'])['unit_name'].count()
df_agg_date_hourly.name = 'units_served'
df_agg_date_hourly.columns = ['units_served']
filename = log_path + file_prefix + 'units_served_datehourly.txt'
write_log(filename, df_agg_date_hourly, format='Pretty')
# hourly aggregations
df_stats_hourly = df_agg_date_hourly.reset_index()
df_agg_hourly = df_stats_hourly.groupby('hour')['units_served'].agg([np.mean, np.median, np.std], axis=1)
df_agg_hourly = pd.concat( [ df_units.groupby('hour')['unit_name'].count(), df_agg_hourly], axis=1 )
df_agg_hourly.columns = ['count','average', 'median', 'std']
filename = log_path + file_prefix + 'units_served_hourly_stats.txt'
write_log(filename, df_agg_hourly, header=['Count', 'Average', 'Median', 'Std'])
# hourly summary statistics
ds_agg_summary = pd.Series({
'mean': df_agg_hourly['count'].mean(),
'median': df_agg_hourly['count'].median(),
'std': df_agg_hourly['count'].std()}, name='units_served_hourly')
filename = log_path + file_prefix + 'units_served_hourly_summary.txt'
write_log(filename, ds_agg_summary, header=["Units Served Hourly"])
s = "Units Served Hourly:\nMean: {0:.2f}, Median: {1:.2f}, STD: {2:.2f}"
print s.format(df_agg_hourly['count'].mean(), df_agg_hourly['count'].median(), df_agg_hourly['count'].std())
return ds_agg_summary
def calc_agg_failures(ds, datapath):
filepath = datapath + ds.unit_status + "/" + "".join(ds.date.strftime('%Y%m%d')) + "/"
filename = filepath + ds.file_name + ds.date_record.strftime('_%Y%m%d_%H%M%S') + '.csv'
df = pd.read_csv(filename)
# extract test failures for a given failure and append to
df_fail = df[(df.STATUS == 1) | (df.VALUE == 'FAIL')]
df_test_failures = df_fail.groupby('TEST')['VALUE'].count()
# keep track of occuring failures
return df_test_failures
### Configuration Aggregations
def define_default_configs():
return [
{'name': 'voltagedefault', 'prefix': ['V'], 'pattern': ['BOLT', 'PWR']}
]
def match(frame, start_cond, pattern_cond):
# define regex patterns
pattern_regex = "|".join([p for p in pattern_cond])
start_regex = "|".join([p for p in start_cond])
start_regex = "^("+ start_regex +")"
# create series
df_flt = frame[(frame.TEST.str.contains(pattern_regex)) | (frame.TEST.str.contains(start_regex))]
df_flt = df_flt.reset_index()
df_flt = df_flt[['TEST','VALUE']].T
df_flt.columns = [df_flt.ix['TEST']]
df_flt = df_flt.drop('TEST', axis=0).reset_index().drop('index',axis=1)
return df_flt
def match_config_patterns(ds, datapath, name, start_cond, pattern_cond):
filepath = datapath + ds.unit_status + "/" + "".join(ds.date.strftime('%Y%m%d')) + "/"
filename = filepath + ds.file_name + ds.date_record.strftime('_%Y%m%d_%H%M%S') + '.csv'
df = pd.read_csv(filename)
df_patterns = match(df, start_cond, pattern_cond)
return pd.Series( {k:v.values[0] for k,v in dict(df_patterns).iteritems()} )
def calc_agg_config(frame, datapath, name, start_cond, pattern_cond):
params = (name, start_cond, pattern_cond)
df_agg_config = frame.apply(lambda x: match_config_patterns(x, datapath, *params), axis=1).astype('float')
# calculate aggregations
iqr = (df_agg_config.dropna().quantile(0.75, axis=0) - df_agg_config.dropna().quantile(0.25, axis=0))
df_metric = pd.concat([df_agg_config.mean(axis=0), df_agg_config.median(axis=0), df_agg_config.std(axis=0),
iqr, df_agg_config.min(axis=0), df_agg_config.max(axis=0)], axis=1)
df_metric.columns = ['mean', 'median', 'std', 'iqr', 'min', 'max']
df_metric.name = name
# save to log file
filename = log_path + file_prefix + name + '_stats.txt'
write_log(filename, df_metric, header=["Failure Counts"], format='pretty')
return df_metric
### Plots/Visualizations
def plot_units_metrics(metrics, titles):
fig, (ax1,ax2,ax3) = plt.subplots(1,3, figsize=(20,7))
for data,title,axi in zip(metrics, titles, (ax1,ax2,ax3)):
sns.barplot(data, data.index, ax=axi)
axi.set_title(title, fontsize=16, fontweight='bold')
for tick in axi.yaxis.get_major_ticks():
tick.label.set_fontsize(14)
tick.label.set_fontweight('bold')
for tick in axi.xaxis.get_major_ticks():
tick.label.set_fontsize(14)
tick.label.set_fontweight('bold')
fig.set_tight_layout(True)
plt.savefig(image_path + file_prefix + 'units_status_metrics.png')
def plot_units_dailyhour(df_units):
# units per hour tested
fig = plt.figure(figsize=(14,6))
df_units['date'] = df_units.date_record.dt.date
df_units['hour'] = df_units.date_record.dt.hour
df_units_dated = df_units.groupby(['date','hour'])['unit_name'].count()
df_units_dated.unstack(level=0).plot(kind='bar', subplots=False)
plt.ylabel("Num Units Tested", fontsize=10, fontweight='bold')
plt.xlabel("Hour", fontsize=10, fontweight='bold')
plt.title("Distribution per number of units tested", fontsize=13, fontweight='bold')
fig.set_tight_layout(True)
plt.savefig(image_path + file_prefix + 'units_tested_datehour.png')
def plot_units_hourly(df_units):
fig = plt.figure(figsize=(14,6))
df_agg_hourly = df_units.groupby(['hour'])['unit_name'].count()
df_agg_hourly.plot(kind='bar')
plt.ylabel("Num Units Tested", fontsize=10, fontweight='bold')
plt.xlabel("Hour", fontsize=10, fontweight='bold')
plt.title("Hourly Distribution per number of units tested", fontsize=10, fontweight='bold')
fig.set_tight_layout(True)
plt.savefig(image_path + file_prefix + 'units_tested_hourly.png')
def plot_failure_metrics(frame):
fig = plt.figure(figsize=(14,6))
sns.barplot(frame, frame.index)
for tick in plt.gca().yaxis.get_major_ticks():
tick.label.set_fontsize(8)
tick.label.set_fontstyle('italic')
tick.label.set_fontweight('bold')
plt.xlabel('Number of Failures', fontsize=10, fontweight='bold')
plt.title("Failure Test Types Distribution", fontsize=10, fontweight='bold')
fig.set_tight_layout(True)
plt.savefig(image_path + file_prefix + 'units_failure_metrics.png')
### Logging
def write_log(filename, frame, header=None, log=False, format=None):
if format:
with open(filename, 'w') as f: f.write(frame.__repr__())
if log: print header; print (frame); print
else:
frame.to_csv(filename, sep='\t', float_format='%.2f', header=header)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Factory Unit Metrics')
parser.add_argument('-f', '--fixture', default='fixture', nargs='?', help='default=fixture')
args = parser.parse_args()
curr_date = "".join( str(datetime.now().date()).split("-") )
fixture_path = args.fixture + '/'
data_path = fixture_path + 'data/'
log_path = fixture_path + 'logs/' + curr_date + '/'
image_path = fixture_path + 'images/' + curr_date + '/'
config_path = fixture_path + 'config/'
file_prefix = args.fixture.split("/")[-1] + '_'
root_fixture_path = fixture_path if fixture_path.startswith('/') else os.getcwd() + '/' + fixture_path
root_data_path = data_path if fixture_path.startswith('/') else os.getcwd() + '/' + data_path
# create folder structure if necessary, create tabular dataframe format
attrs = create_dir_structure()
meta_folders = ['logs', 'images', 'config']
meta_path = '[' + '|'.join(meta_folders) + ']'
data_folders = filter(lambda x: x not in meta_folders, attrs['root_dirs'])
data = [extract_subfolder_data(dir_name, root_fixture_path) for dir_name in attrs['root_dirs']]
data_dict = {k: v for d in data for k, v in d.items() if not re.compile(meta_path).search(k)}
df_aggunits = create_units(data_dict, root_data_path)
# Apply Core Aggregations, Log to Files
ds_metrics = calc_agg_stats(df_aggunits).sort_values(ascending=False)
ds_metrics_summary = calc_agg_dated(df_aggunits)
ds_failures = df_aggunits.apply(lambda x: calc_agg_failures(x, data_path), axis=1)
ds_failures = ds_failures.sum().astype(int)
ds_failures = ds_failures.drop('OVERALL_TEST_RESULT', axis=0).sort_values(ascending=False)
filename = log_path + file_prefix + 'testfailuretype_stats.txt'
write_log(filename, ds_failures[:10], header="\nTop 10 Failure Test Types", log=True, format='pretty')
# Apply Configuration Aggregations, Log to Files
if os.path.exists(config_path):
with open(config_path + 'config.json') as f:
config_json = json.load(f)
config_tests = config_json['tests']
else:
config_tests = define_default_configs()
for config in config_tests:
params = (config['name'], config['prefix'], config['pattern'])
calc_agg_config(df_aggunits, data_path, *params)
# Apply Plots
ds_metrics_units = ds_metrics.ix[['num_units', 'num_pass_units', 'num_fail_units',
'num_units_multiple_failures', 'num_units_overlapped_passfail']]
ds_metrics_tests = ds_metrics.ix[['num_tests', 'num_pass_tests',
'num_fail_tests','num_tests_multiple_failures']]
ds_metrics_pct = ds_metrics.ix[['num_pass_units_pct', 'num_pass_tests_pct',
'num_fail_tests_pct', 'num_fail_units_pct']]
plot_units_metrics((ds_metrics_units, ds_metrics_tests, ds_metrics_pct.sort_values(ascending=False)),
("Unit Metrics", "Pass/Failure Counts", "Pass/Fail Test Percentages"))
plot_units_dailyhour(df_aggunits)
plot_units_hourly(df_aggunits)
plot_failure_metrics(ds_failures)
| [
"[email protected]"
] | |
bfb478f20e11de16e5810f8d08fa62eb3da131f8 | f48a3d354bf4bbbe3d47651dd77853c29934f1fe | /Code/Finance/Code/Udemy_AlgoTrading/51_max_dd_calmar.py | 0639e3adacef6a05456f219fb7c4fdc80ad8f7fa | [
"MIT"
] | permissive | guidefreitas/TeachingDataScience | 0677df459d5a13c00404b8b04cbe3b389dae3d8b | f3e0bc6e391348a8065b09855ab82c436f82a4b5 | refs/heads/master | 2023-09-03T14:02:11.853103 | 2021-11-07T03:56:54 | 2021-11-07T03:56:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,457 | py | # =============================================================================
# Measuring the performance of a buy and hold strategy - Max drawdown & calmar ratio
# Author : Mayank Rasu (http://rasuquant.com/wp/)
# Please report bug/issues in the Q&A section
# =============================================================================
# Import necesary libraries
import yfinance as yf
import numpy as np
import datetime as dt
# Download historical data for required stocks
ticker = "^GSPC"
SnP = yf.download(ticker,dt.date.today()-dt.timedelta(1825),dt.datetime.today())
def CAGR(DF):
"function to calculate the Cumulative Annual Growth Rate of a trading strategy"
df = DF.copy()
df["daily_ret"] = DF["Adj Close"].pct_change()
df["cum_return"] = (1 + df["daily_ret"]).cumprod()
n = len(df)/252
CAGR = (df["cum_return"][-1])**(1/n) - 1
return CAGR
def max_dd(DF):
"function to calculate max drawdown"
df = DF.copy()
df["daily_ret"] = DF["Adj Close"].pct_change()
df["cum_return"] = (1 + df["daily_ret"]).cumprod()
df["cum_roll_max"] = df["cum_return"].cummax()
df["drawdown"] = df["cum_roll_max"] - df["cum_return"]
df["drawdown_pct"] = df["drawdown"]/df["cum_roll_max"]
max_dd = df["drawdown_pct"].max()
return max_dd
print(max_dd(SnP))
def calmar(DF):
"function to calculate calmar ratio"
df = DF.copy()
clmr = CAGR(df)/max_dd(df)
return clmr
print(calmar(SnP))
| [
"[email protected]"
] | |
34c324f9bfe464ec5dec8508c846a30409c79e34 | 46c521a85f567c609f8a073cb9569ea59e2a104f | /kunalProgram23.py | bab9fcf53fa8ff6e595f560e1fd900d0d4fa40d5 | [] | no_license | Kunal352000/python_adv | c046b6b785b52eaaf8d089988d4dadf0a25fa8cb | d9736b6377ae2d486854f93906a6bf5bc4e45a98 | refs/heads/main | 2023-07-15T23:48:27.131948 | 2021-08-21T13:16:42 | 2021-08-21T13:16:42 | 398,557,910 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 174 | py | n=int(input("Enter number of rows: "))
for i in range(n):
print(" "*(n-1-i)+(str(i+1)+' ')*(i+1))
for i in range(n-1):
print(" "*(i+1)+(str(n-1-i)+' ')*(n-1-i))
| [
"[email protected]"
] | |
ae9c18ed187a23fb7121a34ca1422020e3f7ddb5 | 20343e8a8435b3f839d5abd0c4063cf735f43341 | /Experiment/CornerDetectAndAutoEmail/AveMaxMinDetect/test/test1.py | c4133dfbbcbb80c2cf4a4201fd7522e512561360 | [] | no_license | alading241/MoDeng | 948f2099e2f7e4548d6e477b6e06b833bdf4f9bb | 01819e58943d7d1a414714d64aa531c0e99dfe22 | refs/heads/master | 2021-05-23T11:39:41.326804 | 2020-04-05T06:06:01 | 2020-04-05T06:06:01 | 253,269,397 | 1 | 0 | null | 2020-04-05T15:38:33 | 2020-04-05T15:38:33 | null | UTF-8 | Python | false | false | 476 | py | # encoding = utf-8
import tornado
from apscheduler.schedulers.tornado import TornadoScheduler
sched = TornadoScheduler()
""" 测试向任务中传入参数 """
test = 'hello'
def job1(a, b, c):
print("job1:", a,b,c)
def job2(a, b, c):
print("job2:", a,b,c)
sched.add_job(job1, 'interval', seconds=1, args=["e", "t", "f"])
sched.add_job(job2, 'interval', seconds=1, kwargs={"a": test, "b": "b", "c": "c"})
sched.start()
tornado.ioloop.IOLoop.instance().start()
| [
"[email protected]"
] | |
513143cc032bfe7bf0b443158e43a0ef5e19b9c4 | 68f757e7be32235c73e316888ee65a41c48ecd4e | /python_book(이것이 코딩테스트다)/13 DFS/6 감시피하기.py | 2ae13a528d691db24f99e98aa727d9f3e6b9279b | [] | no_license | leejongcheal/algorithm_python | b346fcdbe9b1fdee33f689477f983a63cf1557dc | f5d9bc468cab8de07b9853c97c3db983e6965d8f | refs/heads/master | 2022-03-05T20:16:21.437936 | 2022-03-03T01:28:36 | 2022-03-03T01:28:36 | 246,039,901 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,089 | py | def check(Map):
global N, T
steps = [(1,0),(-1,0),(0,1),(0,-1)]
for tx, ty in T:
for dx, dy in steps:
x, y = tx + dx, ty + dy
while 0 <= x < N and 0 <= y < N:
if Map[x][y] == "O":
break
if Map[x][y] == "S":
return 0
x, y = x + dx, y + dy
return 1
def dfs(x, y):
global L, N, flag, count
if flag == 1:
return
if count == 3:
if check(L):
flag = 1
return
for i in range(N):
for j in range(N):
if x < i or (j > y and x == i):
if L[i][j] == "X":
L[i][j] = "O"
count += 1
dfs(i, j)
L[i][j] = "X"
count -= 1
return
N = int(input())
L = [list(input().split()) for _ in range(N)]
T = []
flag = 0
count = 0
for i in range(N):
for j in range(N):
if L[i][j] == "T":
T.append((i,j))
dfs(-1, -1)
if flag == 1:
print("YES")
else:
print("NO") | [
"[email protected]"
] | |
4009361d3230b25bd783c8a62243914aa44d83e8 | dad6ba45f05d267f6c44bd27949868dc474476e6 | /CQT/Archive/withoutHead.py | b880268f61562e2aaff0d40889c0d47085412c3c | [] | no_license | morindaz/CQT_All | 1f36c5ef22348e2293d9f4f63e58009f0dd274b7 | 8ab6f82ad7b1cf3b4555fe785566013f5cb57a4f | refs/heads/master | 2021-04-27T00:11:17.744327 | 2018-03-04T07:19:06 | 2018-03-04T07:19:06 | 123,765,136 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 842 | py | # coding=utf-8
import time
import glob
import pandas as pd
#这里的C数据和R相反
answerR = "E:\\pingan\\dataset\\newFeature\\answer_C" #492个数据,withouthead
answerC = "E:\\pingan\\dataset\\newFeature\\answer_R" #244个数据
answerI = "E:\\pingan\\dataset\\newFeature\\answer_I" #478个数据
base = answerI
csvx_list = glob.glob(base+"\\"+'*.csv')
print('总共发现%s个CSV文件'% len(csvx_list))
time.sleep(2)
print('正在处理............')
df = pd.DataFrame()
for i in csvx_list:
df_c = pd.read_csv(i, sep=',', header=0)
# print(df_c['video_name'].tolist())
# fr = i.values
# print df_c
df = df.append(df_c)
#print df
print('写入成功!')
output_Archive = pd.DataFrame(df)
output_Archive.to_csv("base"+'.csv')
print('写入完毕!')
print('3秒钟自动关闭程序!')
time.sleep(3) | [
"[email protected]"
] | |
8845d190ae1c3cf32b9e4665e81304da16378cc6 | 56b7e5ed6941fc4b83148e00bd51421dc3ac993a | /hackerrank/Delete Nodes Greater Than X/Delete Nodes Greater Than X.py | 6c8ef8211533ee3666850ccdc29c91e09917498c | [] | no_license | samir-0711/Leetcode-Python | f960e15015a3f2fd88f723d7f9237945a7133553 | d75876ae96bcd85c67bbfbf91bbc0f0bc773e97c | refs/heads/master | 2022-12-18T05:27:48.224001 | 2020-09-30T21:03:42 | 2020-09-30T21:03:42 | 300,061,318 | 0 | 0 | null | 2020-09-30T20:59:42 | 2020-09-30T20:59:42 | null | UTF-8 | Python | false | false | 1,590 | py | '''
Complete the removeNodes function provided in your editor. It has 2 parameters:
1. list: A reference to a Linked List Node that is the head of a linked list.
2. x: An integer value.
Your funciton should remove all nodes from the list having data values greater than x, and then return the head of the modified linked list.
Input Format
The locked stub code in your editer processes the following inputs and pased the necessary arguments to the removeNodes function:
The first line contains N, the number of nodes in the linked list.
Each line i (where 0<= i <) of the N subsequent lines contains an integer representing the value of a node in the linked list. The last line contains an integer, x.
Output Format
Return the linked list after removing the nodes containing values > x.
Sample Input 1:
5
1
2
3
4
5
3
Sample Output 1:
1
2
3
Sample Input 2:
5
5
2
1
6
7
5
Sample Output2:
5
2
1
'''
class LinkedListNode:
def __init__(self, node_Value):
self.val = node_Value
self.next = None
def _insert_node_into_singlylinkedlist(head, tail, val):
if head == None:
head = LinkedListNode(val)
tail = head
else:
node = LinkedListNode(val)
tail.next = node
tail = tail.next
def removeNodes(list, x):
if list == None or x == None:
return None
temp = list
while temp.val > x:
temp = temp.next
curr = temp
prev = None
while curr != None:
if curr.val > x:
prev.next = curr.next
else:
prev = curr
curr = curr.next
return temp
| [
"[email protected]"
] | |
59812772783380dd6340412af14255fc7cbb7fdc | 9745f847ff7606d423918fdf4c7135d930a48181 | /peering/migrations/0001_v1.0.0.py | 9217c861c8e8d871d51b04d9dca8ee224fa82471 | [
"Apache-2.0"
] | permissive | mxhob1/peering-manager | 097167707e499307632ffeaaba72b381a4290347 | 6c15aacdef5ed267d2602fb313eee8ee8a11149a | refs/heads/master | 2021-05-18T23:26:44.553331 | 2020-07-20T06:35:22 | 2020-07-20T06:35:22 | 252,051,461 | 1 | 0 | Apache-2.0 | 2020-04-02T06:18:48 | 2020-04-01T02:30:46 | Python | UTF-8 | Python | false | false | 48,334 | py | # Generated by Django 2.2.7 on 2019-11-13 20:51
import django.contrib.postgres.fields
from django.db import migrations, models
import django.db.models.deletion
import netfields.fields
import peering.fields
import taggit.managers
import utils.validators
class Migration(migrations.Migration):
def forward_transition_from_none_to_zero(apps, schema_editor):
models = {
"AutonomousSystem": {
"filters": {"ipv4_max_prefixes": None, "ipv6_max_prefixes": None},
"updates": {"ipv4_max_prefixes": 0, "ipv6_max_prefixes": 0},
},
"DirectPeeringSession": {
"filters": {
"advertised_prefix_count": None,
"received_prefix_count": None,
},
"updates": {"advertised_prefix_count": 0, "received_prefix_count": 0},
},
"InternetExchange": {
"filters": {"peeringdb_id": None},
"updates": {"peeringdb_id": 0},
},
"InternetExchangePeeringSession": {
"filters": {
"advertised_prefix_count": None,
"received_prefix_count": None,
},
"updates": {"advertised_prefix_count": 0, "received_prefix_count": 0},
},
}
db_alias = schema_editor.connection.alias
for key, value in models.items():
model = apps.get_model("peering", key)
model.objects.using(db_alias).filter(**value["filters"]).update(
**value["updates"]
)
def reverse_transition_from_none_to_zero(apps, schema_editor):
models = {
"AutonomousSystem": {
"filters": {"ipv4_max_prefixes": 0, "ipv6_max_prefixes": 0},
"updates": {"ipv4_max_prefixes": None, "ipv6_max_prefixes": None},
},
"DirectPeeringSession": {
"filters": {"advertised_prefix_count": 0, "received_prefix_count": 0},
"updates": {
"advertised_prefix_count": None,
"received_prefix_count": None,
},
},
"InternetExchange": {
"filters": {"peeringdb_id": 0},
"updates": {"peeringdb_id": None},
},
"InternetExchangePeeringSession": {
"filters": {"advertised_prefix_count": 0, "received_prefix_count": 0},
"updates": {
"advertised_prefix_count": None,
"received_prefix_count": None,
},
},
}
db_alias = schema_editor.connection.alias
for key, value in models:
model = apps.get_model("peering", key)
for field in value:
model.objects.using(db_alias).filter(**value["filters"]).update(
**value["updates"]
)
def forward_transition_from_minus_one_to_zero(apps, schema_editor):
models = {
"AutonomousSystem": {
"filters": {"ipv4_max_prefixes": -1, "ipv6_max_prefixes": -1},
"updates": {"ipv4_max_prefixes": 0, "ipv6_max_prefixes": 0},
},
"DirectPeeringSession": {
"filters": {"advertised_prefix_count": -1, "received_prefix_count": -1},
"updates": {"advertised_prefix_count": 0, "received_prefix_count": 0},
},
"InternetExchange": {
"filters": {"peeringdb_id": -1},
"updates": {"peeringdb_id": 0},
},
"InternetExchangePeeringSession": {
"filters": {"advertised_prefix_count": -1, "received_prefix_count": -1},
"updates": {"advertised_prefix_count": 0, "received_prefix_count": 0},
},
}
db_alias = schema_editor.connection.alias
for key, value in models.items():
model = apps.get_model("peering", key)
model.objects.using(db_alias).filter(**value["filters"]).update(
**value["updates"]
)
def reverse_transition_from_minus_one_to_zero(apps, schema_editor):
models = {
"AutonomousSystem": {
"filters": {"ipv4_max_prefixes": 0, "ipv6_max_prefixes": 0},
"updates": {"ipv4_max_prefixes": -1, "ipv6_max_prefixes": -1},
},
"DirectPeeringSession": {
"filters": {"advertised_prefix_count": 0, "received_prefix_count": 0},
"updates": {"advertised_prefix_count": -1, "received_prefix_count": -1},
},
"InternetExchange": {
"filters": {"peeringdb_id": 0},
"updates": {"peeringdb_id": -1},
},
"InternetExchangePeeringSession": {
"filters": {"advertised_prefix_count": 0, "received_prefix_count": 0},
"updates": {"advertised_prefix_count": -1, "received_prefix_count": -1},
},
}
db_alias = schema_editor.connection.alias
for key, value in models:
model = apps.get_model("peering", key)
for field in value:
model.objects.using(db_alias).filter(**value["filters"]).update(
**value["updates"]
)
def forward_transition_from_none_to_empty_list(apps, schema_editor):
AutonomousSystem = apps.get_model("peering", "AutonomousSystem")
db_alias = schema_editor.connection.alias
AutonomousSystem.objects.using(db_alias).filter(
potential_internet_exchange_peering_sessions=None
).update(potential_internet_exchange_peering_sessions=[])
def reverse_transition_from_none_to_empty_list(apps, schema_editor):
AutonomousSystem = apps.get_model("peering", "AutonomousSystem")
db_alias = schema_editor.connection.alias
AutonomousSystem.objects.using(db_alias).filter(
potential_internet_exchange_peering_sessions=[]
).update(potential_internet_exchange_peering_sessions=None)
def add_permissions(apps, schema_editor):
pass
def remove_permissions(apps, schema_editor):
"""Reverse the above additions of permissions."""
ContentType = apps.get_model("contenttypes.ContentType")
Permission = apps.get_model("auth.Permission")
try:
content_type = ContentType.objects.get(
model="internetexchange", app_label="peering"
)
Permission.objects.filter(
content_type=content_type,
codename__in=("view_configuration", "deploy_configuration"),
).delete()
except ContentType.DoesNotExist:
pass
replaces = [
("peering", "0001_initial"),
("peering", "0002_auto_20170820_1809"),
("peering", "0003_auto_20170903_1235"),
("peering", "0004_auto_20171004_2323"),
("peering", "0005_auto_20171014_1427"),
("peering", "0006_auto_20171017_1917"),
("peering", "0007_auto_20171202_1900"),
("peering", "0008_auto_20171212_2251"),
("peering", "0009_auto_20171226_1550"),
("peering", "0010_auto_20171228_0158"),
("peering", "0011_auto_20180329_2146"),
("peering", "0012_auto_20180502_1733"),
("peering", "0013_auto_20180505_1545"),
("peering", "0014_auto_20180519_2128"),
("peering", "0015_peeringsession_password"),
("peering", "0016_auto_20180726_1307"),
("peering", "0017_auto_20180802_2309"),
("peering", "0018_auto_20181014_1612"),
("peering", "0019_router_netbox_device_id"),
("peering", "0020_auto_20181105_0850"),
("peering", "0021_auto_20181113_2136"),
("peering", "0022_auto_20181116_2226"),
("peering", "0023_auto_20181208_2202"),
("peering", "0024_auto_20181212_2106"),
("peering", "0025_auto_20181212_2322"),
(
"peering",
"0026_autonomoussystem_potential_internet_exchange_peering_sessions",
),
("peering", "0027_auto_20190105_1600"),
("peering", "0028_internetexchangepeeringsession_is_router_server"),
("peering", "0029_auto_20190114_2141"),
("peering", "0030_directpeeringsession_router"),
("peering", "0031_auto_20190227_2210"),
("peering", "0032_auto_20190302_1415"),
("peering", "0033_router_encrypt_passwords"),
("peering", "0034_auto_20190308_1954"),
("peering", "0035_auto_20190311_2334"),
("peering", "0036_auto_20190411_2209"),
("peering", "0037_auto_20190412_2102"),
("peering", "0038_auto_20190412_2233"),
("peering", "0039_routingpolicy_address_family"),
("peering", "0040_auto_20190417_1851"),
("peering", "0041_auto_20190430_1743"),
("peering", "0042_auto_20190509_1439"),
("peering", "0043_router_use_netbox"),
("peering", "0044_auto_20190513_2153"),
("peering", "0045_auto_20190514_2308"),
("peering", "0046_auto_20190608_2215"),
("peering", "0047_auto_20190619_1434"),
("peering", "0048_auto_20190707_1854"),
("peering", "0049_auto_20190731_1946"),
("peering", "0050_auto_20190806_2159"),
("peering", "0051_auto_20190818_1816"),
("peering", "0052_auto_20190818_1926"),
("peering", "0053_auto_20190921_2000"),
("peering", "0054_auto_20191031_2241"),
("peering", "0055_auto_20191110_1312"),
]
initial = True
dependencies = [
("taggit", "0003_taggeditem_add_unique_index"),
("utils", "0001_v1.0.0"),
]
operations = [
migrations.CreateModel(
name="AutonomousSystem",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("asn", peering.fields.ASNField(unique=True)),
("name", models.CharField(max_length=128)),
("comment", models.TextField(blank=True)),
(
"ipv6_max_prefixes",
models.PositiveIntegerField(blank=True, null=True),
),
(
"ipv4_max_prefixes",
models.PositiveIntegerField(blank=True, null=True),
),
("updated", models.DateTimeField(auto_now=True, null=True)),
("irr_as_set", models.CharField(blank=True, max_length=255, null=True)),
("ipv4_max_prefixes_peeringdb_sync", models.BooleanField(default=True)),
("ipv6_max_prefixes_peeringdb_sync", models.BooleanField(default=True)),
("irr_as_set_peeringdb_sync", models.BooleanField(default=True)),
("created", models.DateTimeField(auto_now_add=True, null=True)),
],
options={"ordering": ["asn"]},
),
migrations.CreateModel(
name="ConfigurationTemplate",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=128)),
("template", models.TextField()),
("updated", models.DateTimeField(auto_now=True, null=True)),
("comment", models.TextField(blank=True)),
("created", models.DateTimeField(auto_now_add=True, null=True)),
],
options={"ordering": ["name"]},
),
migrations.CreateModel(
name="Router",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=128)),
("hostname", models.CharField(max_length=256)),
(
"platform",
models.CharField(
blank=True,
choices=[
("junos", "Juniper JUNOS"),
("iosxr", "Cisco IOS-XR"),
("ios", "Cisco IOS"),
("nxos", "Cisco NX-OS"),
("eos", "Arista EOS"),
(None, "Other"),
],
help_text="The router platform, used to interact with it",
max_length=50,
),
),
("comment", models.TextField(blank=True)),
("created", models.DateTimeField(auto_now_add=True, null=True)),
("updated", models.DateTimeField(auto_now=True, null=True)),
(
"netbox_device_id",
models.PositiveIntegerField(blank=True, default=0),
),
],
options={"ordering": ["name"]},
),
migrations.CreateModel(
name="Community",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=128)),
("value", peering.fields.CommunityField(max_length=50)),
("comment", models.TextField(blank=True)),
(
"type",
models.CharField(
choices=[("egress", "Egress"), ("ingress", "Ingress")],
default="ingress",
max_length=50,
),
),
("created", models.DateTimeField(auto_now_add=True, null=True)),
("updated", models.DateTimeField(auto_now=True, null=True)),
],
options={"verbose_name_plural": "communities", "ordering": ["name"]},
),
migrations.CreateModel(
name="RoutingPolicy",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("created", models.DateTimeField(auto_now_add=True, null=True)),
("updated", models.DateTimeField(auto_now=True, null=True)),
("name", models.CharField(max_length=128)),
("slug", models.SlugField(unique=True)),
(
"type",
models.CharField(
choices=[
("import-policy", "Import"),
("export-policy", "Export"),
],
default="import-policy",
max_length=50,
),
),
("comment", models.TextField(blank=True)),
],
options={"verbose_name_plural": "routing policies", "ordering": ["name"]},
),
migrations.CreateModel(
name="InternetExchange",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=128)),
("slug", models.SlugField(unique=True)),
("comment", models.TextField(blank=True)),
(
"configuration_template",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="peering.ConfigurationTemplate",
),
),
("ipv4_address", models.GenericIPAddressField(blank=True, null=True)),
("ipv6_address", models.GenericIPAddressField(blank=True, null=True)),
(
"router",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="peering.Router",
),
),
(
"communities",
models.ManyToManyField(blank=True, to="peering.Community"),
),
("peeringdb_id", models.PositiveIntegerField(blank=True, null=True)),
("check_bgp_session_states", models.BooleanField(default=False)),
(
"bgp_session_states_update",
models.DateTimeField(blank=True, null=True),
),
("created", models.DateTimeField(auto_now_add=True, null=True)),
("updated", models.DateTimeField(auto_now=True, null=True)),
(
"export_routing_policies",
models.ManyToManyField(
blank=True,
related_name="internetexchange_export_routing_policies",
to="peering.RoutingPolicy",
),
),
(
"import_routing_policies",
models.ManyToManyField(
blank=True,
related_name="internetexchange_import_routing_policies",
to="peering.RoutingPolicy",
),
),
],
options={"ordering": ["name"]},
),
migrations.CreateModel(
name="DirectPeeringSession",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("created", models.DateTimeField(auto_now_add=True, null=True)),
("updated", models.DateTimeField(auto_now=True, null=True)),
("ip_address", models.GenericIPAddressField()),
("password", models.CharField(blank=True, max_length=255, null=True)),
("enabled", models.BooleanField(default=True)),
(
"bgp_state",
models.CharField(
blank=True,
choices=[
("idle", "Idle"),
("connect", "Connect"),
("active", "Active"),
("opensent", "OpenSent"),
("openconfirm", "OpenConfirm"),
("established", "Established"),
],
max_length=50,
null=True,
),
),
(
"received_prefix_count",
models.PositiveIntegerField(blank=True, null=True),
),
(
"advertised_prefix_count",
models.PositiveIntegerField(blank=True, null=True),
),
("comment", models.TextField(blank=True)),
("local_asn", peering.fields.ASNField(default=0)),
(
"relationship",
models.CharField(
choices=[
("private-peering", "Private Peering"),
("transit-provider", "Transit Provider"),
("customer", "Customer"),
],
help_text="Relationship with the remote peer.",
max_length=50,
),
),
(
"autonomous_system",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="peering.AutonomousSystem",
),
),
("last_established_state", models.DateTimeField(blank=True, null=True)),
(
"export_routing_policies",
models.ManyToManyField(
blank=True,
related_name="directpeeringsession_export_routing_policies",
to="peering.RoutingPolicy",
),
),
(
"import_routing_policies",
models.ManyToManyField(
blank=True,
related_name="directpeeringsession_import_routing_policies",
to="peering.RoutingPolicy",
),
),
],
options={"abstract": False},
),
migrations.CreateModel(
name="InternetExchangePeeringSession",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("ip_address", models.GenericIPAddressField()),
("comment", models.TextField(blank=True)),
(
"autonomous_system",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="peering.AutonomousSystem",
),
),
(
"internet_exchange",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="peering.InternetExchange",
),
),
("enabled", models.BooleanField(default=True)),
(
"bgp_state",
models.CharField(
blank=True,
choices=[
("idle", "Idle"),
("connect", "Connect"),
("active", "Active"),
("opensent", "OpenSent"),
("openconfirm", "OpenConfirm"),
("established", "Established"),
],
max_length=50,
null=True,
),
),
(
"advertised_prefix_count",
models.PositiveIntegerField(blank=True, null=True),
),
(
"received_prefix_count",
models.PositiveIntegerField(blank=True, null=True),
),
("password", models.CharField(blank=True, max_length=255, null=True)),
("created", models.DateTimeField(auto_now_add=True, null=True)),
("updated", models.DateTimeField(auto_now=True, null=True)),
("last_established_state", models.DateTimeField(blank=True, null=True)),
(
"export_routing_policies",
models.ManyToManyField(
blank=True,
related_name="internetexchangepeeringsession_export_routing_policies",
to="peering.RoutingPolicy",
),
),
(
"import_routing_policies",
models.ManyToManyField(
blank=True,
related_name="internetexchangepeeringsession_import_routing_policies",
to="peering.RoutingPolicy",
),
),
],
),
migrations.RunPython(
code=forward_transition_from_none_to_zero,
reverse_code=reverse_transition_from_none_to_zero,
),
migrations.AlterField(
model_name="autonomoussystem",
name="ipv4_max_prefixes",
field=models.PositiveIntegerField(blank=True, default=0),
),
migrations.AlterField(
model_name="autonomoussystem",
name="ipv6_max_prefixes",
field=models.PositiveIntegerField(blank=True, default=0),
),
migrations.AlterField(
model_name="directpeeringsession",
name="advertised_prefix_count",
field=models.PositiveIntegerField(blank=True, default=0),
),
migrations.AlterField(
model_name="directpeeringsession",
name="received_prefix_count",
field=models.PositiveIntegerField(blank=True, default=0),
),
migrations.AlterField(
model_name="internetexchange",
name="peeringdb_id",
field=models.PositiveIntegerField(blank=True, default=0),
),
migrations.AlterField(
model_name="internetexchangepeeringsession",
name="advertised_prefix_count",
field=models.PositiveIntegerField(blank=True, default=0),
),
migrations.AlterField(
model_name="internetexchangepeeringsession",
name="received_prefix_count",
field=models.PositiveIntegerField(blank=True, default=0),
),
migrations.RunPython(
code=forward_transition_from_minus_one_to_zero,
reverse_code=reverse_transition_from_minus_one_to_zero,
),
migrations.AddField(
model_name="autonomoussystem",
name="potential_internet_exchange_peering_sessions",
field=django.contrib.postgres.fields.ArrayField(
base_field=models.GenericIPAddressField(),
blank=True,
default=list,
size=None,
),
),
migrations.RunPython(
code=forward_transition_from_none_to_empty_list,
reverse_code=reverse_transition_from_none_to_empty_list,
),
migrations.AddField(
model_name="internetexchangepeeringsession",
name="is_route_server",
field=models.BooleanField(blank=True, default=False),
),
migrations.AlterModelOptions(
name="internetexchange",
options={
"ordering": ["name"],
"permissions": [
(
"view_configuration",
"Can view Internet Exchange's configuration",
),
(
"deploy_configuration",
"Can deploy Internet Exchange's configuration",
),
],
},
),
migrations.AddField(
model_name="directpeeringsession",
name="router",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="peering.Router",
),
),
migrations.AlterModelOptions(
name="directpeeringsession",
options={"ordering": ["autonomous_system", "ip_address"]},
),
migrations.AlterModelOptions(
name="internetexchangepeeringsession",
options={"ordering": ["autonomous_system", "ip_address"]},
),
migrations.AlterField(
model_name="router",
name="platform",
field=models.CharField(
blank=True,
choices=[
("junos", "Juniper JUNOS"),
("iosxr", "Cisco IOS-XR"),
("ios", "Cisco IOS"),
("nxos", "Cisco NX-OS"),
("eos", "Arista EOS"),
("", "Other"),
],
help_text="The router platform, used to interact with it",
max_length=50,
),
),
migrations.AddField(
model_name="router",
name="encrypt_passwords",
field=models.BooleanField(
blank=True,
default=True,
help_text="Try to encrypt passwords in router's configuration",
),
),
migrations.AlterModelOptions(
name="routingpolicy",
options={
"ordering": ["-weight", "name"],
"verbose_name_plural": "routing policies",
},
),
migrations.AddField(
model_name="routingpolicy",
name="weight",
field=models.PositiveSmallIntegerField(
default=0, help_text="The higher the number, the higher the priority"
),
),
migrations.AlterField(
model_name="routingpolicy",
name="type",
field=models.CharField(
choices=[
("export-policy", "Export"),
("import-policy", "Import"),
("import-export-policy", "Import and Export"),
],
default="import-policy",
max_length=50,
),
),
migrations.AddField(
model_name="autonomoussystem",
name="contact_email",
field=models.EmailField(
blank=True, max_length=254, verbose_name="Contact E-mail"
),
),
migrations.AddField(
model_name="autonomoussystem",
name="contact_name",
field=models.CharField(blank=True, max_length=50),
),
migrations.AddField(
model_name="autonomoussystem",
name="contact_phone",
field=models.CharField(blank=True, max_length=20),
),
migrations.AddField(
model_name="routingpolicy",
name="address_family",
field=models.PositiveSmallIntegerField(
choices=[(0, "All"), (4, "IPv4"), (6, "IPv6")], default=0
),
),
migrations.AlterField(
model_name="autonomoussystem",
name="potential_internet_exchange_peering_sessions",
field=django.contrib.postgres.fields.ArrayField(
base_field=netfields.fields.InetAddressField(max_length=39),
blank=True,
default=list,
size=None,
),
),
migrations.AlterField(
model_name="directpeeringsession",
name="ip_address",
field=netfields.fields.InetAddressField(max_length=39),
),
migrations.AlterField(
model_name="internetexchange",
name="ipv4_address",
field=netfields.fields.InetAddressField(
blank=True,
max_length=39,
null=True,
validators=[utils.validators.AddressFamilyValidator(4)],
),
),
migrations.AlterField(
model_name="internetexchange",
name="ipv6_address",
field=netfields.fields.InetAddressField(
blank=True,
max_length=39,
null=True,
validators=[utils.validators.AddressFamilyValidator(6)],
),
),
migrations.AlterField(
model_name="internetexchangepeeringsession",
name="ip_address",
field=netfields.fields.InetAddressField(max_length=39),
),
migrations.AlterField(
model_name="routingpolicy",
name="type",
field=models.CharField(
choices=[
("export-policy", "Export"),
("import-policy", "Import"),
("import-export-policy", "Import+Export"),
],
default="import-policy",
max_length=50,
),
),
migrations.AddField(
model_name="directpeeringsession",
name="multihop_ttl",
field=peering.fields.TTLField(
blank=True,
default=1,
help_text="Use a value greater than 1 for BGP multihop sessions",
verbose_name="Multihop TTL",
),
),
migrations.AddField(
model_name="internetexchangepeeringsession",
name="multihop_ttl",
field=peering.fields.TTLField(
blank=True,
default=1,
help_text="Use a value greater than 1 for BGP multihop sessions",
verbose_name="Multihop TTL",
),
),
migrations.AddField(
model_name="router",
name="use_netbox",
field=models.BooleanField(
blank=True,
default=False,
help_text="Use NetBox to communicate instead of NAPALM",
),
),
migrations.AddField(
model_name="autonomoussystem",
name="export_routing_policies",
field=models.ManyToManyField(
blank=True,
related_name="autonomoussystem_export_routing_policies",
to="peering.RoutingPolicy",
),
),
migrations.AddField(
model_name="autonomoussystem",
name="import_routing_policies",
field=models.ManyToManyField(
blank=True,
related_name="autonomoussystem_import_routing_policies",
to="peering.RoutingPolicy",
),
),
migrations.CreateModel(
name="BGPGroup",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("created", models.DateTimeField(auto_now_add=True, null=True)),
("updated", models.DateTimeField(auto_now=True, null=True)),
("name", models.CharField(max_length=128)),
("slug", models.SlugField(max_length=255, unique=True)),
("comments", models.TextField(blank=True)),
(
"communities",
models.ManyToManyField(blank=True, to="peering.Community"),
),
(
"export_routing_policies",
models.ManyToManyField(
blank=True,
related_name="bgpgroup_export_routing_policies",
to="peering.RoutingPolicy",
),
),
(
"import_routing_policies",
models.ManyToManyField(
blank=True,
related_name="bgpgroup_import_routing_policies",
to="peering.RoutingPolicy",
),
),
(
"bgp_session_states_update",
models.DateTimeField(blank=True, null=True),
),
("check_bgp_session_states", models.BooleanField(default=False)),
(
"tags",
taggit.managers.TaggableManager(
help_text="A comma-separated list of tags.",
through="taggit.TaggedItem",
to="taggit.Tag",
verbose_name="Tags",
),
),
],
options={"verbose_name": "BGP group", "ordering": ["name"]},
),
migrations.AddField(
model_name="directpeeringsession",
name="bgp_group",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="peering.BGPGroup",
verbose_name="BGP Group",
),
),
migrations.AddField(
model_name="router",
name="configuration_template",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="peering.ConfigurationTemplate",
),
),
migrations.AlterModelOptions(
name="router",
options={
"ordering": ["name"],
"permissions": [
("view_configuration", "Can view router's configuration"),
("deploy_configuration", "Can deploy router's configuration"),
],
},
),
migrations.AlterField(
model_name="router",
name="encrypt_passwords",
field=models.BooleanField(
blank=True,
default=False,
help_text="Try to encrypt passwords for peering sessions",
),
),
migrations.AlterField(
model_name="internetexchange",
name="slug",
field=models.SlugField(max_length=255, unique=True),
),
migrations.AlterField(
model_name="routingpolicy",
name="slug",
field=models.SlugField(max_length=255, unique=True),
),
migrations.RenameModel(old_name="ConfigurationTemplate", new_name="Template"),
migrations.AddField(
model_name="template",
name="type",
field=models.CharField(
choices=[("configuration", "Configuration"), ("email", "E-mail")],
default="configuration",
max_length=50,
),
),
migrations.AlterField(
model_name="internetexchange",
name="configuration_template",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="peering.Template",
),
),
migrations.AlterField(
model_name="router",
name="configuration_template",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="peering.Template",
),
),
migrations.AlterModelOptions(
name="community",
options={
"ordering": ["value", "name"],
"verbose_name_plural": "communities",
},
),
migrations.AddField(
model_name="directpeeringsession",
name="local_ip_address",
field=netfields.fields.InetAddressField(
blank=True, max_length=39, null=True
),
),
migrations.AddField(
model_name="autonomoussystem",
name="tags",
field=taggit.managers.TaggableManager(
help_text="A comma-separated list of tags.",
through="taggit.TaggedItem",
to="taggit.Tag",
verbose_name="Tags",
),
),
migrations.AddField(
model_name="community",
name="tags",
field=taggit.managers.TaggableManager(
help_text="A comma-separated list of tags.",
through="taggit.TaggedItem",
to="taggit.Tag",
verbose_name="Tags",
),
),
migrations.AddField(
model_name="directpeeringsession",
name="tags",
field=taggit.managers.TaggableManager(
help_text="A comma-separated list of tags.",
through="taggit.TaggedItem",
to="taggit.Tag",
verbose_name="Tags",
),
),
migrations.AddField(
model_name="internetexchange",
name="tags",
field=taggit.managers.TaggableManager(
help_text="A comma-separated list of tags.",
through="taggit.TaggedItem",
to="taggit.Tag",
verbose_name="Tags",
),
),
migrations.AddField(
model_name="internetexchangepeeringsession",
name="tags",
field=taggit.managers.TaggableManager(
help_text="A comma-separated list of tags.",
through="taggit.TaggedItem",
to="taggit.Tag",
verbose_name="Tags",
),
),
migrations.AddField(
model_name="router",
name="tags",
field=taggit.managers.TaggableManager(
help_text="A comma-separated list of tags.",
through="taggit.TaggedItem",
to="taggit.Tag",
verbose_name="Tags",
),
),
migrations.AddField(
model_name="routingpolicy",
name="tags",
field=taggit.managers.TaggableManager(
help_text="A comma-separated list of tags.",
through="taggit.TaggedItem",
to="taggit.Tag",
verbose_name="Tags",
),
),
migrations.AddField(
model_name="template",
name="tags",
field=taggit.managers.TaggableManager(
help_text="A comma-separated list of tags.",
through="taggit.TaggedItem",
to="taggit.Tag",
verbose_name="Tags",
),
),
migrations.RenameField(
model_name="autonomoussystem", old_name="comment", new_name="comments"
),
migrations.RenameField(
model_name="community", old_name="comment", new_name="comments"
),
migrations.RenameField(
model_name="directpeeringsession", old_name="comment", new_name="comments"
),
migrations.RenameField(
model_name="internetexchange", old_name="comment", new_name="comments"
),
migrations.RenameField(
model_name="internetexchangepeeringsession",
old_name="comment",
new_name="comments",
),
migrations.RenameField(
model_name="router", old_name="comment", new_name="comments"
),
migrations.RenameField(
model_name="routingpolicy", old_name="comment", new_name="comments"
),
migrations.RenameField(
model_name="template", old_name="comment", new_name="comments"
),
migrations.AlterModelOptions(name="internetexchange", options={}),
migrations.RemoveField(
model_name="internetexchange", name="configuration_template"
),
migrations.RunPython(code=add_permissions, reverse_code=remove_permissions),
migrations.AlterField(
model_name="autonomoussystem",
name="tags",
field=taggit.managers.TaggableManager(
help_text="A comma-separated list of tags.",
through="utils.TaggedItem",
to="utils.Tag",
verbose_name="Tags",
),
),
migrations.AlterField(
model_name="bgpgroup",
name="tags",
field=taggit.managers.TaggableManager(
help_text="A comma-separated list of tags.",
through="utils.TaggedItem",
to="utils.Tag",
verbose_name="Tags",
),
),
migrations.AlterField(
model_name="community",
name="tags",
field=taggit.managers.TaggableManager(
help_text="A comma-separated list of tags.",
through="utils.TaggedItem",
to="utils.Tag",
verbose_name="Tags",
),
),
migrations.AlterField(
model_name="directpeeringsession",
name="tags",
field=taggit.managers.TaggableManager(
help_text="A comma-separated list of tags.",
through="utils.TaggedItem",
to="utils.Tag",
verbose_name="Tags",
),
),
migrations.AlterField(
model_name="internetexchange",
name="tags",
field=taggit.managers.TaggableManager(
help_text="A comma-separated list of tags.",
through="utils.TaggedItem",
to="utils.Tag",
verbose_name="Tags",
),
),
migrations.AlterField(
model_name="internetexchangepeeringsession",
name="tags",
field=taggit.managers.TaggableManager(
help_text="A comma-separated list of tags.",
through="utils.TaggedItem",
to="utils.Tag",
verbose_name="Tags",
),
),
migrations.AlterField(
model_name="router",
name="tags",
field=taggit.managers.TaggableManager(
help_text="A comma-separated list of tags.",
through="utils.TaggedItem",
to="utils.Tag",
verbose_name="Tags",
),
),
migrations.AlterField(
model_name="routingpolicy",
name="tags",
field=taggit.managers.TaggableManager(
help_text="A comma-separated list of tags.",
through="utils.TaggedItem",
to="utils.Tag",
verbose_name="Tags",
),
),
migrations.AlterField(
model_name="template",
name="tags",
field=taggit.managers.TaggableManager(
help_text="A comma-separated list of tags.",
through="utils.TaggedItem",
to="utils.Tag",
verbose_name="Tags",
),
),
migrations.AlterModelOptions(
name="autonomoussystem",
options={
"ordering": ["asn"],
"permissions": [("send_email", "Can send e-mails to AS contact")],
},
),
migrations.AddField(
model_name="directpeeringsession",
name="encrypted_password",
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name="internetexchangepeeringsession",
name="encrypted_password",
field=models.CharField(blank=True, max_length=255, null=True),
),
]
| [
"[email protected]"
] | |
2c72c0fd9afadc5369dafc83a72510e88f785872 | d70a16f353819ff858dbe6974916a936a85a3c0e | /api/migrations/0003_auto_20201217_1941.py | 60ba065342ebb9755ec4749f75c6cf4cc1ac6880 | [] | no_license | mahmud-sajib/FBuzz-Task | 7fa69a35d1dfe069ed48e2956d1eff16cf953c74 | a57bc031911fd7259c68890a953d9d8175246f73 | refs/heads/master | 2023-02-02T05:56:01.208849 | 2020-12-19T07:03:24 | 2020-12-19T07:03:24 | 321,357,770 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 467 | py | # Generated by Django 3.1 on 2020-12-17 13:41
import api.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0002_auto_20201217_1751'),
]
operations = [
migrations.AlterField(
model_name='cvfileupload',
name='document',
field=models.FileField(upload_to='documents/', validators=[api.validators.validate_file_size]),
),
]
| [
"[email protected]"
] | |
85f8207c1a52da4c91cfcc22bb76bd8dd60589aa | 40fa413a9ba362ab8cc2474269f83bb87847cda2 | /setup.py | a7a9aee54a8f3d08813d67be79e79b61855eaffc | [] | no_license | Peder2911/leanfeeder | c366563527c6e6b65cf46f8564596d1637337026 | f50ed3845aac21b6eed81eb1ef72c39175c87c8d | refs/heads/master | 2023-01-01T13:55:49.037014 | 2020-10-15T12:02:43 | 2020-10-15T12:02:43 | 301,992,715 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 765 | py |
import setuptools
with open("README.md") as f:
long_description=f.read()
setuptools.setup(
name = "leanfeeder",
version = "0.0.1",
author = "Peder G. Landsverk",
author_email = "[email protected]",
description = "Tool for pushing data to a Postgres DB without too much hassle.",
long_description = long_description,
long_description_content_type="test/markdown",
url = "https://www.github.com/peder2911/leanfeeder",
packages = setuptools.find_packages(),
scripts=["bin/leanf"],
python_requires=">=3.7",
install_requires=[
"strconv>=0.4.0",
"psycopg2>=2.8.0",
"fire>=0.3.0",
"python-dateutil>=2.8.0"
])
| [
"[email protected]"
] | |
18c298b110833ad5b1860e533427d320c882c52d | cca5ceb42b09e567d79fcb46f298757c1ff04447 | /ObjectOriented/DataStructure.py | 45fb0eea3cc51bded3a1c13868377b5c1980c3d7 | [] | no_license | NishantGhanate/PythonScripts | 92933237720e624a0f672729743a98557bea79d6 | 60b92984d21394002c0d3920bc448c698e0402ca | refs/heads/master | 2022-12-13T11:56:14.442286 | 2022-11-18T14:26:33 | 2022-11-18T14:26:33 | 132,910,530 | 25 | 15 | null | 2022-12-09T09:03:58 | 2018-05-10T14:18:33 | Python | UTF-8 | Python | false | false | 1,468 | py | class Que:
def __init__(self , contents):
self._hiddenlist = list(contents)
def push(self,value):
self._hiddenlist.insert(0 , value)
print(self._hiddenlist)
def pop(self):
if len(self._hiddenlist):
self._hiddenlist.pop(0)
print(self._hiddenlist)
else:
print("Empty Que")
que = Que([1, 2.25, 3.0, 4, 1234.5])
que.push(0)
que.pop()
class Node:
def __init__(self, dataValue ):
self.dataValue = dataValue
self.nextValue = None
class Slink:
def __init__(self):
self.headValue = None
def printLink(self):
printval = self.headValue
while printval is not None:
print (printval.dataValue)
printval = printval.nextValue
def atStart(self,newData):
NewNode = Node(newData)
NewNode.nextValue = self.headValue
self.headValue = NewNode
# lis = Slink()
# lis.atStart("Sun")
# lis.atStart("Mon")
# lis.printLink()
class Stack:
def __init__(self):
self.stack = [10]
def push(self,dataValue):
self.stack.append(dataValue)
return self.stack
def pop(self):
if len(self.stack) <= 0:
return ("No element in the Stack")
else:
return "This value pop =" +self.stack.pop()
# stack = Stack()
# stack.push("1")
# stack.push("2")
# stack.push("3")
# print(stack.pop())
# print(stack.push("5"))
| [
"[email protected]"
] | |
b24e0fde7456a79dbb630f1bb302c6eb6ffd15b7 | 14438f8c8bb4250a7fa8da0ecd40c5a4902bdfcd | /hunter/set-10/96.py | 2719ef730b5a7689a140c4b85bbfd134580da795 | [] | no_license | nikhilvarshney2/GUVI | c51b1fa3bd1026eb74fc536e938a14c2e92089b2 | 79717ae5b26540101169e512204fb7236f7c839f | refs/heads/master | 2020-04-01T00:40:27.699963 | 2019-04-30T13:46:46 | 2019-04-30T13:46:46 | 152,707,542 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 104 | py | def num(z):
if z<9:
return z
return 9 + 10*num(z-9)
z = int(input())
print(num(z))
| [
"[email protected]"
] | |
0c7ca209f5437826a7c1527f09d0f27b55e5d412 | ac2f4c7caaf7ccc51ebcb2d88020fb4842b3f493 | /install.py | e20927c124212f7697dea81c1e707305db393903 | [] | no_license | vaikuntht/TAMU-Latex-Styles | 48b89291cb5b65348303cfee4bc8424a61b44adb | 8c1f096bbe3140eef6e14d001fa9d81905a28258 | refs/heads/master | 2021-01-18T00:04:04.722714 | 2014-07-31T18:49:42 | 2014-07-31T18:49:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,686 | py | #!/usr/bin/env python
import os, sys, getopt, argparse, fnmatch, errno, subprocess, tempfile, platform, getpass, pprint, shutil
from subprocess import call
#program name available through the %(prog)s command
#can use prog="" in the ArgumentParser constructor
#can use the type=int option to make the parameters integers
#can use the action='append' option to make a list of options
#can use the default="" option to automatically set a parameter
parser = argparse.ArgumentParser(description="Install the TAMU based LaTex style files.",
epilog="And those are the options available. Deal with it.")
group = parser.add_mutually_exclusive_group()
parser.add_argument("-nha","--nohash", help="Will run texhash command once the files are copied",
action="store_false")
group.add_argument("-q", "--quiet", help="decrease output verbosity to minimal amount",
action="store_true")
group.add_argument("-v", "--verbose", help="Increase output verbosity of lcg-cp (-v) or srm (-debug) commands",
action="store_true")
parser.add_argument('--version', action='version', version='%(prog)s 1.0')
parser.add_argument("-y", "--texlive_year", help="The texlive distribution year",
default="2014")
args = parser.parse_args()
if(args.verbose):
print 'Number of arguments:', len(sys.argv), 'arguments.'
print 'Argument List:', str(sys.argv)
print "Argument ", args, "\n"
QUIET = args.quiet
VERBOSE = args.verbose
DOHASH = args.nohash
TEXLIVE_YEAR = args.texlive_year
theme_path = ""
color_path = ""
Outer_path = ""
def check_linux_folders():
global theme_path
global color_path
global outer_path
theme_path = "/usr/share/texmf/tex/latex/beamer/base/themes/theme/"
color_path = "/usr/share/texmf/tex/latex/beamer/base/themes/color/"
outer_path = "/usr/share/texmf/tex/latex/beamer/base/themes/outer/"
# To check if it is a directory (and it exists) use os.path.isdir
# To check if something exists (direcotry, file, or otherwise), use os.path.exists
theme = os.path.isdir(theme_path)
color = os.path.isdir(color_path)
outer = os.path.isdir(outer_path)
if not QUIET: print "Themes exists? " + str(theme)
if not QUIET: print "Color themes exists? " + str(color)
if not QUIET: print "Outer themes exists? " + str(outer)
if not theme:
print "ERROR::The path to the beamer themes ("+str(theme_path)+") does not exist."
print "Cannot continue."
sys.exit()
if not color:
print "ERROR::The path to the beamer colors ("+str(color_path)+") does not exist."
print "Cannot continue."
sys.exit()
if not outer:
print "ERROR::The path to the beamer outer themes ("+str(outer_path)+") does not exist."
print "Cannot continue."
sys.exit()
def check_osx_folders():
global theme_path
global color_path
global outer_path
theme_path = "/usr/local/texlive/"+TEXLIVE_YEAR+"/texmf-dist/tex/latex/beamer/themes/theme/"
color_path = "/usr/local/texlive/"+TEXLIVE_YEAR+"/texmf-dist/tex/latex/beamer/themes/color/"
outer_path = "/usr/local/texlive/"+TEXLIVE_YEAR+"/texmf-dist/tex/latex/beamer/themes/outer/"
theme = os.path.isdir(theme_path)
color = os.path.isdir(color_path)
outer = os.path.isdir(outer_path)
if not QUIET: print "Themes exists? " + str(theme)
if not QUIET: print "Color themes exists? " + str(color)
if not QUIET: print "Outer themes exists? " + str(outer)
if not theme:
print "ERROR::The path to the beamer themes ("+str(theme_path)+") does not exist."
print "Cannot continue."
sys.exit()
if not color:
print "ERROR::The path to the beamer colors ("+str(color_path)+") does not exist."
print "Cannot continue."
sys.exit()
if not outer:
print "ERROR::The path to the beamer outer themes ("+str(outer_path)+") does not exist."
print "Cannot continue."
sys.exit()
def privledge_check():
user = getpass.getuser()
if not QUIET: print "User = " + str(user)
if user != 'root':
print "Sorry, you are not \"root\" and do not have enough privledges to continue."
sys.exit()
def run_checks():
print "************************************"
print "* Running checks on the system ... *"
print "************************************"
privledge_check()
kernel = platform.system()
OS = ""
flavor = ""
version = ""
if kernel == 'Linux':
OS = "Linux"
flavor = platform.linux_distribution()[0]
version = platform.linux_distribution()[1]
if not QUIET: print str(flavor) + "(" + str(OS) + ")" + str(version)
check_linux_folders()
elif kernel == 'Darwin':
OS = "OSX"
flavor = "Unknown"
version = platform.mac_ver()[0]
if not QUIET: print str(OS) + " " + str(version)
check_osx_folders()
else:
print "ERROR::Unknown OS. Cannot confirm that installation will be successful. Process will not continue."
sys.exit()
print
def copy_set_of_files(dict, folder):
for dst in dict:
if not QUIET: print "Doing folder " + str(dst) + " ... "
for f in range(1,len(dict[dst])):
src = dict[dst][f]
dest = dict[dst][0]
if not QUIET: print "\tCopying " + str(folder) + str(src) + " to " + str(dest) + " ... ",
shutil.copy2(folder+src,dest)
if not QUIET: print "DONE"
def copy_files():
print "**********************************************"
print "* Copying the files to the correct paths ... *"
print "**********************************************"
copyfileBeamerDict = {
'theme' : (theme_path, "beamerthemeTAMU.sty"),
'color' : (color_path, "beamercolorthemetamu.sty", "beamercolorthemetamubox.sty"),
'outer' : (outer_path, "beamerouterthemeshadowTAMU.sty", "beamerouterthemesplittamu.sty", "UniversityLogos/beamerouterthemeTAMULogoBox.png", "ExperimentLogos/beamerouterthemeCMS.png","ExperimentLogos/beamerouterthemeCDF.png","LaboritoryLogos/beamerouterthemeCERN.png","LaboritoryLogos/beamerouterthemeFNAL.png")
}
if VERBOSE and not QUIET:
print "Dictionary"
print "----------"
pprint.pprint(copyfileBeamerDict)
print
copy_set_of_files(copyfileBeamerDict, "Beamer/")
print
def do_tex_hash():
print "***********************"
print "* Running texhash ... *"
print "***********************"
os.system("texhash")
run_checks()
copy_files()
if DOHASH:
do_tex_hash()
| [
"[email protected]"
] | |
fb708659d8576b28acdb88c0439ca493e36c5884 | 30c524146ac7c240b3f69a856a12f9d971e2f294 | /setup.py | a7138c22975f00b0e0c89fc5a9121d3aa768c383 | [
"MIT"
] | permissive | undercertainty/ipython_magic_sqlalchemy_schemadisplay | 7da1400b4b9cff520b3e185345c204f14ccb512d | bc22060f3125736eecf2cc4d7972eca9715fc0c3 | refs/heads/master | 2021-10-10T07:00:04.925288 | 2019-01-07T23:01:31 | 2019-01-07T23:01:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 273 | py | from setuptools import setup
setup(name='schemadisplay-magic',
packages=['schemadisplay_magic'],
install_requires=['ipython-sql', 'sqlalchemy_schemadisplay', 'graphviz'],
dependency_links=['git+https://github.com/fschulze/sqlalchemy_schemadisplay.git']
) | [
"[email protected]"
] | |
1b3d280c7403941d0bf096038fcd3c6fb955bb16 | c588305899ff4bc1d24c2bc213edce1c16621113 | /21/21_1.py | 02b5535cb8428832fa0ea9383dad49ac806703b3 | [] | no_license | nemesmarci/Advent-of-Code-2015 | fa2953916e67d6ad5b3218de1bc7418ff942ab6a | 53db8d0e0140f94a80d307b3cec3e065a235ba53 | refs/heads/master | 2021-12-31T14:08:52.640576 | 2020-01-10T18:13:59 | 2021-12-29T19:35:09 | 160,928,653 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 49 | py | from common import find_cost
print(find_cost())
| [
"[email protected]"
] | |
f12ecdec195d21b07ddb86e45226d52b6dbaf079 | a5c2f4ada2fb4436784a785a5d598546d3b3284c | /Main/migrations/0001_initial.py | 305f99beddc1606764e5d5472f2e0f219b5ffacf | [] | no_license | sakibovi123/chat_applicationv1.0.1 | 1c5d25c1229434b4c6019fcf4dbabf53324d90df | 7b5db530e22743959df215347ff1e644cbbfb4e0 | refs/heads/master | 2023-07-13T22:22:02.295141 | 2021-08-28T07:35:27 | 2021-08-28T07:35:27 | 396,916,167 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 778 | py | # Generated by Django 3.2.6 on 2021-08-16 11:17
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ChatRoom',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField()),
('message', models.TextField()),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
] | |
1988f3bfa396617797d1effd273ed01d83a05ec9 | 92acb2bdfcdb594a7f98b24093f4711879e956ca | /dvaapp/admin.py | 7be6bf0aa9a7a506025392cfac7e62ea6530b6cf | [
"BSD-3-Clause",
"MIT",
"Apache-2.0"
] | permissive | cynwpu/DeepVideoAnalytics | e1f0b2e00a2671014bdcae99bf11c180bf35a30e | c95913a2967d6d17e71bb1b703f99c00c483bcdc | refs/heads/master | 2021-05-05T15:04:50.650488 | 2017-09-10T20:01:31 | 2017-09-10T20:01:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,361 | py | from django.contrib import admin
from .models import Video, Frame, TEvent, IndexEntries, QueryResults, DVAPQL, VDNServer,\
LOPQCodes, Region, Tube, Detector, Segment, DeletedVideo, \
VideoLabel, FrameLabel, RegionLabel, TubeLabel, SegmentLabel, Label, ManagementAction, \
StoredDVAPQL, Analyzer, Indexer, Retriever, SystemState, Worker
@admin.register(SystemState)
class SystemStateAdmin(admin.ModelAdmin):
pass
@admin.register(Worker)
class WorkerAdmin(admin.ModelAdmin):
pass
@admin.register(Label)
class LabelAdmin(admin.ModelAdmin):
pass
@admin.register(VideoLabel)
class VideoLabelAdmin(admin.ModelAdmin):
pass
@admin.register(FrameLabel)
class FrameLabelAdmin(admin.ModelAdmin):
pass
@admin.register(SegmentLabel)
class SegmentLabelAdmin(admin.ModelAdmin):
pass
@admin.register(RegionLabel)
class RegionLabelAdmin(admin.ModelAdmin):
pass
@admin.register(TubeLabel)
class TubeLabelAdmin(admin.ModelAdmin):
pass
@admin.register(Segment)
class SegmentAdmin(admin.ModelAdmin):
pass
@admin.register(Region)
class RegionAdmin(admin.ModelAdmin):
pass
@admin.register(Video)
class VideoAdmin(admin.ModelAdmin):
pass
@admin.register(DeletedVideo)
class DeletedVideoAdmin(admin.ModelAdmin):
pass
@admin.register(QueryResults)
class QueryResultsAdmin(admin.ModelAdmin):
pass
@admin.register(DVAPQL)
class DVAPQLAdmin(admin.ModelAdmin):
pass
@admin.register(Frame)
class FrameAdmin(admin.ModelAdmin):
pass
@admin.register(IndexEntries)
class IndexEntriesAdmin(admin.ModelAdmin):
pass
@admin.register(VDNServer)
class VDNServerAdmin(admin.ModelAdmin):
pass
@admin.register(TEvent)
class TEventAdmin(admin.ModelAdmin):
pass
@admin.register(LOPQCodes)
class LOPQCodesAdmin(admin.ModelAdmin):
pass
@admin.register(Tube)
class TubeAdmin(admin.ModelAdmin):
pass
@admin.register(Detector)
class DetectorAdmin(admin.ModelAdmin):
pass
@admin.register(Analyzer)
class AnalyzerAdmin(admin.ModelAdmin):
pass
@admin.register(Indexer)
class IndexerAdmin(admin.ModelAdmin):
pass
@admin.register(Retriever)
class RetrieverAdmin(admin.ModelAdmin):
pass
@admin.register(ManagementAction)
class ManagementActionAdmin(admin.ModelAdmin):
pass
@admin.register(StoredDVAPQL)
class StoredDVAPQLAdmin(admin.ModelAdmin):
pass
| [
"[email protected]"
] | |
d526bcd601974fc1ebcbe80a5e2954a3412cb522 | 5d9932a1abeae21b8201368e5cf465680f106761 | /data_ccxt/probit.py | 330b5839012523f090ae27ca23e35c190244345b | [] | no_license | qqzhangjian789/text | 5dc6086e55d8a9494b889fa40cc9730da6bf5940 | 938be0df0a965aacf13cfb942548b8d2a1c7cec0 | refs/heads/master | 2023-05-04T11:38:47.178345 | 2021-05-21T17:44:13 | 2021-05-21T17:44:13 | 286,178,737 | 1 | 6 | null | null | null | null | UTF-8 | Python | false | false | 48,360 | py | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from data_ccxt.base.exchange import Exchange
import math
from data_ccxt.base.errors import ExchangeError
from data_ccxt.base.errors import AuthenticationError
from data_ccxt.base.errors import ArgumentsRequired
from data_ccxt.base.errors import BadRequest
from data_ccxt.base.errors import BadSymbol
from data_ccxt.base.errors import BadResponse
from data_ccxt.base.errors import InsufficientFunds
from data_ccxt.base.errors import InvalidAddress
from data_ccxt.base.errors import InvalidOrder
from data_ccxt.base.errors import DDoSProtection
from data_ccxt.base.errors import RateLimitExceeded
from data_ccxt.base.errors import ExchangeNotAvailable
from data_ccxt.base.decimal_to_precision import TRUNCATE
from data_ccxt.base.decimal_to_precision import TICK_SIZE
class probit(Exchange):
def describe(self):
return self.deep_extend(super(probit, self).describe(), {
'id': 'probit',
'name': 'ProBit',
'countries': ['SC', 'KR'], # Seychelles, South Korea
'rateLimit': 250, # ms
'has': {
'CORS': True,
'fetchTime': True,
'fetchMarkets': True,
'fetchCurrencies': True,
'fetchTickers': True,
'fetchTicker': True,
'fetchOHLCV': True,
'fetchOrderBook': True,
'fetchTrades': True,
'fetchBalance': True,
'createOrder': True,
'createMarketOrder': True,
'cancelOrder': True,
'fetchOrder': True,
'fetchOpenOrders': True,
'fetchClosedOrders': True,
'fetchMyTrades': True,
'fetchDepositAddress': True,
'withdraw': True,
'signIn': True,
},
'timeframes': {
'1m': '1m',
'3m': '3m',
'5m': '5m',
'10m': '10m',
'15m': '15m',
'30m': '30m',
'1h': '1h',
'4h': '4h',
'6h': '6h',
'12h': '12h',
'1d': '1D',
'1w': '1W',
'1M': '1M',
},
'version': 'v1',
'urls': {
'logo': 'https://user-images.githubusercontent.com/51840849/79268032-c4379480-7ea2-11ea-80b3-dd96bb29fd0d.jpg',
'api': {
'accounts': 'https://accounts.probit.com',
'public': 'https://api.probit.com/api/exchange',
'private': 'https://api.probit.com/api/exchange',
},
'www': 'https://www.probit.com',
'doc': [
'https://docs-en.probit.com',
'https://docs-ko.probit.com',
],
'fees': 'https://support.probit.com/hc/en-us/articles/360020968611-Trading-Fees',
'referral': 'https://www.probit.com/r/34608773',
},
'api': {
'public': {
'get': [
'market',
'currency',
'currency_with_platform',
'time',
'ticker',
'order_book',
'trade',
'candle',
],
},
'private': {
'post': [
'new_order',
'cancel_order',
'withdrawal',
],
'get': [
'balance',
'order',
'open_order',
'order_history',
'trade_history',
'deposit_address',
],
},
'accounts': {
'post': [
'token',
],
},
},
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'maker': 0.2 / 100,
'taker': 0.2 / 100,
},
},
'exceptions': {
'exact': {
'UNAUTHORIZED': AuthenticationError,
'INVALID_ARGUMENT': BadRequest, # Parameters are not a valid format, parameters are empty, or out of range, or a parameter was sent when not required.
'TRADING_UNAVAILABLE': ExchangeNotAvailable,
'NOT_ENOUGH_BALANCE': InsufficientFunds,
'NOT_ALLOWED_COMBINATION': BadRequest,
'INVALID_ORDER': InvalidOrder, # Requested order does not exist, or it is not your order
'RATE_LIMIT_EXCEEDED': RateLimitExceeded, # You are sending requests too frequently. Please try it later.
'MARKET_UNAVAILABLE': ExchangeNotAvailable, # Market is closed today
'INVALID_MARKET': BadSymbol, # Requested market is not exist
'MARKET_CLOSED': BadSymbol, # {"errorCode":"MARKET_CLOSED"}
'INVALID_CURRENCY': BadRequest, # Requested currency is not exist on ProBit system
'TOO_MANY_OPEN_ORDERS': DDoSProtection, # Too many open orders
'DUPLICATE_ADDRESS': InvalidAddress, # Address already exists in withdrawal address list
},
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
},
'precisionMode': TICK_SIZE,
'options': {
'createMarketBuyOrderRequiresPrice': True,
'timeInForce': {
'limit': 'gtc',
'market': 'ioc',
},
},
'commonCurrencies': {
'BTCBEAR': 'BEAR',
'BTCBULL': 'BULL',
'CBC': 'CryptoBharatCoin',
'HBC': 'Hybrid Bank Cash',
'UNI': 'UNICORN Token',
},
})
def fetch_markets(self, params={}):
response = self.publicGetMarket(params)
#
# {
# "data":[
# {
# "id":"MONA-USDT",
# "base_currency_id":"MONA",
# "quote_currency_id":"USDT",
# "min_price":"0.001",
# "max_price":"9999999999999999",
# "price_increment":"0.001",
# "min_quantity":"0.0001",
# "max_quantity":"9999999999999999",
# "quantity_precision":4,
# "min_cost":"1",
# "max_cost":"9999999999999999",
# "cost_precision":8,
# "taker_fee_rate":"0.2",
# "maker_fee_rate":"0.2",
# "show_in_ui":true,
# "closed":false
# },
# ]
# }
#
markets = self.safe_value(response, 'data', [])
result = []
for i in range(0, len(markets)):
market = markets[i]
id = self.safe_string(market, 'id')
baseId = self.safe_string(market, 'base_currency_id')
quoteId = self.safe_string(market, 'quote_currency_id')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
closed = self.safe_value(market, 'closed', False)
active = not closed
amountPrecision = self.safe_integer(market, 'quantity_precision')
costPrecision = self.safe_integer(market, 'cost_precision')
precision = {
'amount': 1 / math.pow(10, amountPrecision),
'price': self.safe_float(market, 'price_increment'),
'cost': 1 / math.pow(10, costPrecision),
}
takerFeeRate = self.safe_float(market, 'taker_fee_rate')
makerFeeRate = self.safe_float(market, 'maker_fee_rate')
result.append({
'id': id,
'info': market,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': active,
'precision': precision,
'taker': takerFeeRate / 100,
'maker': makerFeeRate / 100,
'limits': {
'amount': {
'min': self.safe_float(market, 'min_quantity'),
'max': self.safe_float(market, 'max_quantity'),
},
'price': {
'min': self.safe_float(market, 'min_price'),
'max': self.safe_float(market, 'max_price'),
},
'cost': {
'min': self.safe_float(market, 'min_cost'),
'max': self.safe_float(market, 'max_cost'),
},
},
})
return result
def fetch_currencies(self, params={}):
response = self.publicGetCurrencyWithPlatform(params)
#
# {
# "data":[
# {
# "id":"USDT",
# "display_name":{"ko-kr":"테더","en-us":"Tether"},
# "show_in_ui":true,
# "platform":[
# {
# "id":"ETH",
# "priority":1,
# "deposit":true,
# "withdrawal":true,
# "currency_id":"USDT",
# "precision":6,
# "min_confirmation_count":15,
# "require_destination_tag":false,
# "display_name":{"name":{"ko-kr":"ERC-20","en-us":"ERC-20"}},
# "min_deposit_amount":"0",
# "min_withdrawal_amount":"1",
# "withdrawal_fee":[
# {"amount":"0.01","priority":2,"currency_id":"ETH"},
# {"amount":"1.5","priority":1,"currency_id":"USDT"},
# ],
# "deposit_fee":{},
# "suspended_reason":"",
# "deposit_suspended":false,
# "withdrawal_suspended":false
# },
# {
# "id":"OMNI",
# "priority":2,
# "deposit":true,
# "withdrawal":true,
# "currency_id":"USDT",
# "precision":6,
# "min_confirmation_count":3,
# "require_destination_tag":false,
# "display_name":{"name":{"ko-kr":"OMNI","en-us":"OMNI"}},
# "min_deposit_amount":"0",
# "min_withdrawal_amount":"5",
# "withdrawal_fee":[{"amount":"5","priority":1,"currency_id":"USDT"}],
# "deposit_fee":{},
# "suspended_reason":"wallet_maintenance",
# "deposit_suspended":false,
# "withdrawal_suspended":false
# }
# ],
# "stakeable":false,
# "unstakeable":false,
# "auto_stake":false,
# "auto_stake_amount":"0"
# }
# ]
# }
#
currencies = self.safe_value(response, 'data')
result = {}
for i in range(0, len(currencies)):
currency = currencies[i]
id = self.safe_string(currency, 'id')
code = self.safe_currency_code(id)
displayName = self.safe_value(currency, 'display_name')
name = self.safe_string(displayName, 'en-us')
platforms = self.safe_value(currency, 'platform', [])
platformsByPriority = self.sort_by(platforms, 'priority')
platform = self.safe_value(platformsByPriority, 0, {})
precision = self.safe_integer(platform, 'precision')
depositSuspended = self.safe_value(platform, 'deposit_suspended')
withdrawalSuspended = self.safe_value(platform, 'withdrawal_suspended')
active = not (depositSuspended and withdrawalSuspended)
withdrawalFees = self.safe_value(platform, 'withdrawal_fee', {})
fees = []
# sometimes the withdrawal fee is an empty object
# [{'amount': '0.015', 'priority': 1, 'currency_id': 'ETH'}, {}]
for j in range(0, len(withdrawalFees)):
withdrawalFee = withdrawalFees[j]
amount = self.safe_float(withdrawalFee, 'amount')
priority = self.safe_integer(withdrawalFee, 'priority')
if (amount is not None) and (priority is not None):
fees.append(withdrawalFee)
withdrawalFeesByPriority = self.sort_by(fees, 'priority')
withdrawalFee = self.safe_value(withdrawalFeesByPriority, 0, {})
fee = self.safe_float(withdrawalFee, 'amount')
result[code] = {
'id': id,
'code': code,
'info': currency,
'name': name,
'active': active,
'fee': fee,
'precision': precision,
'limits': {
'amount': {
'min': math.pow(10, -precision),
'max': math.pow(10, precision),
},
'price': {
'min': math.pow(10, -precision),
'max': math.pow(10, precision),
},
'cost': {
'min': None,
'max': None,
},
'deposit': {
'min': self.safe_float(platform, 'min_deposit_amount'),
'max': None,
},
'withdraw': {
'min': self.safe_float(platform, 'min_withdrawal_amount'),
'max': None,
},
},
}
return result
def fetch_balance(self, params={}):
self.load_markets()
response = self.privateGetBalance(params)
#
# {
# data: [
# {
# "currency_id":"XRP",
# "total":"100",
# "available":"0",
# }
# ]
# }
#
data = self.safe_value(response, 'data')
result = {'info': data}
for i in range(0, len(data)):
balance = data[i]
currencyId = self.safe_string(balance, 'currency_id')
code = self.safe_currency_code(currencyId)
account = self.account()
account['total'] = self.safe_float(balance, 'total')
account['free'] = self.safe_float(balance, 'available')
result[code] = account
return self.parse_balance(result)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'market_id': market['id'],
}
response = self.publicGetOrderBook(self.extend(request, params))
#
# {
# data: [
# {side: 'buy', price: '0.000031', quantity: '10'},
# {side: 'buy', price: '0.00356007', quantity: '4.92156877'},
# {side: 'sell', price: '0.1857', quantity: '0.17'},
# ]
# }
#
data = self.safe_value(response, 'data', [])
dataBySide = self.group_by(data, 'side')
return self.parse_order_book(dataBySide, None, 'buy', 'sell', 'price', 'quantity')
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
request = {}
if symbols is not None:
marketIds = self.market_ids(symbols)
request['market_ids'] = ','.join(marketIds)
response = self.publicGetTicker(self.extend(request, params))
#
# {
# "data":[
# {
# "last":"0.022902",
# "low":"0.021693",
# "high":"0.024093",
# "change":"-0.000047",
# "base_volume":"15681.986",
# "quote_volume":"360.514403624",
# "market_id":"ETH-BTC",
# "time":"2020-04-12T18:43:38.000Z"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_tickers(data, symbols)
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'market_ids': market['id'],
}
response = self.publicGetTicker(self.extend(request, params))
#
# {
# "data":[
# {
# "last":"0.022902",
# "low":"0.021693",
# "high":"0.024093",
# "change":"-0.000047",
# "base_volume":"15681.986",
# "quote_volume":"360.514403624",
# "market_id":"ETH-BTC",
# "time":"2020-04-12T18:43:38.000Z"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
ticker = self.safe_value(data, 0)
if ticker is None:
raise BadResponse(self.id + ' fetchTicker() returned an empty response')
return self.parse_ticker(ticker, market)
def parse_ticker(self, ticker, market=None):
#
# {
# "last":"0.022902",
# "low":"0.021693",
# "high":"0.024093",
# "change":"-0.000047",
# "base_volume":"15681.986",
# "quote_volume":"360.514403624",
# "market_id":"ETH-BTC",
# "time":"2020-04-12T18:43:38.000Z"
# }
#
timestamp = self.parse8601(self.safe_string(ticker, 'time'))
marketId = self.safe_string(ticker, 'market_id')
symbol = self.safe_symbol(marketId, market, '-')
close = self.safe_float(ticker, 'last')
change = self.safe_float(ticker, 'change')
percentage = None
open = None
if change is not None:
if close is not None:
open = close - change
percentage = (change / open) * 100
baseVolume = self.safe_float(ticker, 'base_volume')
quoteVolume = self.safe_float(ticker, 'quote_volume')
vwap = self.vwap(baseVolume, quoteVolume)
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': None,
'bidVolume': None,
'ask': None,
'askVolume': None,
'vwap': vwap,
'open': open,
'close': close,
'last': close,
'previousClose': None, # previous day close
'change': change,
'percentage': percentage,
'average': None,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
market = None
request = {
'limit': 100,
'start_time': self.iso8601(0),
'end_time': self.iso8601(self.milliseconds()),
}
if symbol is not None:
market = self.market(symbol)
request['market_id'] = market['id']
if since is not None:
request['start_time'] = self.iso8601(since)
if limit is not None:
request['limit'] = limit
response = self.privateGetTradeHistory(self.extend(request, params))
#
# {
# data: [
# {
# "id":"BTC-USDT:183566",
# "order_id":"17209376",
# "side":"sell",
# "fee_amount":"0.657396569175",
# "fee_currency_id":"USDT",
# "status":"settled",
# "price":"6573.96569175",
# "quantity":"0.1",
# "cost":"657.396569175",
# "time":"2018-08-10T06:06:46.000Z",
# "market_id":"BTC-USDT"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_trades(data, market, since, limit)
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'market_id': market['id'],
'limit': 100,
'start_time': '1970-01-01T00:00:00.000Z',
'end_time': self.iso8601(self.milliseconds()),
}
if since is not None:
request['start_time'] = self.iso8601(since)
if limit is not None:
request['limit'] = limit
response = self.publicGetTrade(self.extend(request, params))
#
# {
# "data":[
# {
# "id":"ETH-BTC:3331886",
# "price":"0.022981",
# "quantity":"12.337",
# "time":"2020-04-12T20:55:42.371Z",
# "side":"sell",
# "tick_direction":"down"
# },
# {
# "id":"ETH-BTC:3331885",
# "price":"0.022982",
# "quantity":"6.472",
# "time":"2020-04-12T20:55:39.652Z",
# "side":"sell",
# "tick_direction":"down"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_trades(data, market, since, limit)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {
# "id":"ETH-BTC:3331886",
# "price":"0.022981",
# "quantity":"12.337",
# "time":"2020-04-12T20:55:42.371Z",
# "side":"sell",
# "tick_direction":"down"
# }
#
# fetchMyTrades(private)
#
# {
# "id":"BTC-USDT:183566",
# "order_id":"17209376",
# "side":"sell",
# "fee_amount":"0.657396569175",
# "fee_currency_id":"USDT",
# "status":"settled",
# "price":"6573.96569175",
# "quantity":"0.1",
# "cost":"657.396569175",
# "time":"2018-08-10T06:06:46.000Z",
# "market_id":"BTC-USDT"
# }
#
timestamp = self.parse8601(self.safe_string(trade, 'time'))
id = self.safe_string(trade, 'id')
marketId = None
if id is not None:
parts = id.split(':')
marketId = self.safe_string(parts, 0)
marketId = self.safe_string(trade, 'market_id', marketId)
symbol = self.safe_symbol(marketId, market, '-')
side = self.safe_string(trade, 'side')
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'quantity')
cost = None
if price is not None:
if amount is not None:
cost = price * amount
orderId = self.safe_string(trade, 'order_id')
feeCost = self.safe_float(trade, 'fee_amount')
fee = None
if feeCost is not None:
feeCurrencyId = self.safe_string(trade, 'fee_currency_id')
feeCurrencyCode = self.safe_currency_code(feeCurrencyId)
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
}
return {
'id': id,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': orderId,
'type': None,
'side': side,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
}
def fetch_time(self, params={}):
response = self.publicGetTime(params)
#
# {"data":"2020-04-12T18:54:25.390Z"}
#
timestamp = self.parse8601(self.safe_string(response, 'data'))
return timestamp
def normalize_ohlcv_timestamp(self, timestamp, timeframe, after=False):
duration = self.parse_timeframe(timeframe)
if timeframe == '1M':
iso8601 = self.iso8601(timestamp)
parts = iso8601.split('-')
year = self.safe_string(parts, 0)
month = self.safe_integer(parts, 1)
if after:
month = self.sum(month, 1)
if month < 10:
month = '0' + str(month)
else:
month = str(month)
return year + '-' + month + '-01T00:00:00.000Z'
elif timeframe == '1w':
timestamp = int(timestamp / 1000)
firstSunday = 259200 # 1970-01-04T00:00:00.000Z
difference = timestamp - firstSunday
numWeeks = self.integer_divide(difference, duration)
previousSunday = self.sum(firstSunday, numWeeks * duration)
if after:
previousSunday = self.sum(previousSunday, duration)
return self.iso8601(previousSunday * 1000)
else:
timestamp = int(timestamp / 1000)
timestamp = duration * int(timestamp / duration)
if after:
timestamp = self.sum(timestamp, duration)
return self.iso8601(timestamp * 1000)
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
interval = self.timeframes[timeframe]
limit = 100 if (limit is None) else limit
requestLimit = self.sum(limit, 1)
requestLimit = min(1000, requestLimit) # max 1000
request = {
'market_ids': market['id'],
'interval': interval,
'sort': 'asc', # 'asc' will always include the start_time, 'desc' will always include end_time
'limit': requestLimit, # max 1000
}
now = self.milliseconds()
duration = self.parse_timeframe(timeframe)
startTime = since
endTime = now
if since is None:
if limit is None:
raise ArgumentsRequired(self.id + ' fetchOHLCV() requires either a since argument or a limit argument')
else:
startTime = now - limit * duration * 1000
else:
if limit is None:
endTime = now
else:
endTime = self.sum(since, self.sum(limit, 1) * duration * 1000)
startTimeNormalized = self.normalize_ohlcv_timestamp(startTime, timeframe)
endTimeNormalized = self.normalize_ohlcv_timestamp(endTime, timeframe, True)
request['start_time'] = startTimeNormalized
request['end_time'] = endTimeNormalized
response = self.publicGetCandle(self.extend(request, params))
#
# {
# "data":[
# {
# "market_id":"ETH-BTC",
# "open":"0.02811",
# "close":"0.02811",
# "low":"0.02811",
# "high":"0.02811",
# "base_volume":"0.0005",
# "quote_volume":"0.000014055",
# "start_time":"2018-11-30T18:19:00.000Z",
# "end_time":"2018-11-30T18:20:00.000Z"
# },
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_ohlcvs(data, market, timeframe, since, limit)
def parse_ohlcv(self, ohlcv, market=None):
#
# {
# "market_id":"ETH-BTC",
# "open":"0.02811",
# "close":"0.02811",
# "low":"0.02811",
# "high":"0.02811",
# "base_volume":"0.0005",
# "quote_volume":"0.000014055",
# "start_time":"2018-11-30T18:19:00.000Z",
# "end_time":"2018-11-30T18:20:00.000Z"
# }
#
return [
self.parse8601(self.safe_string(ohlcv, 'start_time')),
self.safe_float(ohlcv, 'open'),
self.safe_float(ohlcv, 'high'),
self.safe_float(ohlcv, 'low'),
self.safe_float(ohlcv, 'close'),
self.safe_float(ohlcv, 'base_volume'),
]
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
since = self.parse8601(since)
request = {}
market = None
if symbol is not None:
market = self.market(symbol)
request['market_id'] = market['id']
response = self.privateGetOpenOrder(self.extend(request, params))
data = self.safe_value(response, 'data')
return self.parse_orders(data, market, since, limit)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
request = {
'start_time': self.iso8601(0),
'end_time': self.iso8601(self.milliseconds()),
'limit': 100,
}
market = None
if symbol is not None:
market = self.market(symbol)
request['market_id'] = market['id']
if since:
request['start_time'] = self.iso8601(since)
if limit:
request['limit'] = limit
response = self.privateGetOrderHistory(self.extend(request, params))
data = self.safe_value(response, 'data')
return self.parse_orders(data, market, since, limit)
def fetch_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrder() requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'market_id': market['id'],
}
clientOrderId = self.safe_string_2(params, 'clientOrderId', 'client_order_id')
if clientOrderId is not None:
request['client_order_id'] = clientOrderId
else:
request['order_id'] = id
query = self.omit(params, ['clientOrderId', 'client_order_id'])
response = self.privateGetOrder(self.extend(request, query))
data = self.safe_value(response, 'data', [])
order = self.safe_value(data, 0)
return self.parse_order(order, market)
def parse_order_status(self, status):
statuses = {
'open': 'open',
'cancelled': 'canceled',
'filled': 'closed',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# {
# id: string,
# user_id: string,
# market_id: string,
# type: 'orderType',
# side: 'side',
# quantity: string,
# limit_price: string,
# time_in_force: 'timeInForce',
# filled_cost: string,
# filled_quantity: string,
# open_quantity: string,
# cancelled_quantity: string,
# status: 'orderStatus',
# time: 'date',
# client_order_id: string,
# }
#
status = self.parse_order_status(self.safe_string(order, 'status'))
id = self.safe_string(order, 'id')
type = self.safe_string(order, 'type')
side = self.safe_string(order, 'side')
marketId = self.safe_string(order, 'market_id')
symbol = self.safe_symbol(marketId, market, '-')
timestamp = self.parse8601(self.safe_string(order, 'time'))
price = self.safe_float(order, 'limit_price')
filled = self.safe_float(order, 'filled_quantity')
remaining = self.safe_float(order, 'open_quantity')
canceledAmount = self.safe_float(order, 'cancelled_quantity')
if canceledAmount is not None:
remaining = self.sum(remaining, canceledAmount)
amount = self.safe_float(order, 'quantity', self.sum(filled, remaining))
cost = self.safe_float_2(order, 'filled_cost', 'cost')
if type == 'market':
price = None
clientOrderId = self.safe_string(order, 'client_order_id')
if clientOrderId == '':
clientOrderId = None
timeInForce = self.safe_string_upper(order, 'time_in_force')
return self.safe_order({
'id': id,
'info': order,
'clientOrderId': clientOrderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': type,
'timeInForce': timeInForce,
'side': side,
'status': status,
'price': price,
'stopPrice': None,
'amount': amount,
'filled': filled,
'remaining': remaining,
'average': None,
'cost': cost,
'fee': None,
'trades': None,
})
def cost_to_precision(self, symbol, cost):
return self.decimal_to_precision(cost, TRUNCATE, self.markets[symbol]['precision']['cost'], self.precisionMode)
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
options = self.safe_value(self.options, 'timeInForce')
defaultTimeInForce = self.safe_value(options, type)
timeInForce = self.safe_string_2(params, 'timeInForce', 'time_in_force', defaultTimeInForce)
request = {
'market_id': market['id'],
'type': type,
'side': side,
'time_in_force': timeInForce,
}
clientOrderId = self.safe_string_2(params, 'clientOrderId', 'client_order_id')
if clientOrderId is not None:
request['client_order_id'] = clientOrderId
costToPrecision = None
if type == 'limit':
request['limit_price'] = self.price_to_precision(symbol, price)
request['quantity'] = self.amount_to_precision(symbol, amount)
elif type == 'market':
# for market buy it requires the amount of quote currency to spend
if side == 'buy':
cost = self.safe_float(params, 'cost')
createMarketBuyOrderRequiresPrice = self.safe_value(self.options, 'createMarketBuyOrderRequiresPrice', True)
if createMarketBuyOrderRequiresPrice:
if price is not None:
if cost is None:
cost = amount * price
elif cost is None:
raise InvalidOrder(self.id + " createOrder() requires the price argument for market buy orders to calculate total order cost(amount to spend), where cost = amount * price. Supply a price argument to createOrder() call if you want the cost to be calculated for you from price and amount, or, alternatively, add .options['createMarketBuyOrderRequiresPrice'] = False and supply the total cost value in the 'amount' argument or in the 'cost' extra parameter(the exchange-specific behaviour)")
else:
cost = amount if (cost is None) else cost
costToPrecision = self.cost_to_precision(symbol, cost)
request['cost'] = costToPrecision
else:
request['quantity'] = self.amount_to_precision(symbol, amount)
query = self.omit(params, ['timeInForce', 'time_in_force', 'clientOrderId', 'client_order_id'])
response = self.privatePostNewOrder(self.extend(request, query))
#
# {
# data: {
# id: string,
# user_id: string,
# market_id: string,
# type: 'orderType',
# side: 'side',
# quantity: string,
# limit_price: string,
# time_in_force: 'timeInForce',
# filled_cost: string,
# filled_quantity: string,
# open_quantity: string,
# cancelled_quantity: string,
# status: 'orderStatus',
# time: 'date',
# client_order_id: string,
# }
# }
#
data = self.safe_value(response, 'data')
order = self.parse_order(data, market)
# a workaround for incorrect huge amounts
# returned by the exchange on market buys
if (type == 'market') and (side == 'buy'):
order['amount'] = None
order['cost'] = float(costToPrecision)
order['remaining'] = None
return order
def cancel_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder() requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'market_id': market['id'],
'order_id': id,
}
response = self.privatePostCancelOrder(self.extend(request, params))
data = self.safe_value(response, 'data')
return self.parse_order(data)
def parse_deposit_address(self, depositAddress, currency=None):
address = self.safe_string(depositAddress, 'address')
tag = self.safe_string(depositAddress, 'destination_tag')
currencyId = self.safe_string(depositAddress, 'currency_id')
code = self.safe_currency_code(currencyId)
self.check_address(address)
return {
'currency': code,
'address': address,
'tag': tag,
'info': depositAddress,
}
def fetch_deposit_address(self, code, params={}):
self.load_markets()
currency = self.currency(code)
request = {
'currency_id': currency['id'],
}
response = self.privateGetDepositAddress(self.extend(request, params))
#
# {
# "data":[
# {
# "currency_id":"ETH",
# "address":"0x12e2caf3c4051ba1146e612f532901a423a9898a",
# "destination_tag":null
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
firstAddress = self.safe_value(data, 0)
if firstAddress is None:
raise InvalidAddress(self.id + ' fetchDepositAddress returned an empty response')
return self.parse_deposit_address(firstAddress, currency)
def fetch_deposit_addresses(self, codes=None, params={}):
self.load_markets()
request = {}
if codes:
currencyIds = []
for i in range(0, len(codes)):
currency = self.currency(codes[i])
currencyIds.append(currency['id'])
request['currency_id'] = ','.join(codes)
response = self.privateGetDepositAddress(self.extend(request, params))
data = self.safe_value(response, 'data', [])
return self.parse_deposit_addresses(data)
def withdraw(self, code, amount, address, tag=None, params={}):
# In order to use self method
# you need to allow API withdrawal from the API Settings Page, and
# and register the list of withdrawal addresses and destination tags on the API Settings page
# you can only withdraw to the registered addresses using the API
self.check_address(address)
self.load_markets()
currency = self.currency(code)
if tag is None:
tag = ''
request = {
'currency_id': currency['id'],
# 'platform_id': 'ETH', # if omitted it will use the default platform for the currency
'address': address,
'destination_tag': tag,
'amount': self.currency_to_precision(code, amount),
# which currency to pay the withdrawal fees
# only applicable for currencies that accepts multiple withdrawal fee options
# 'fee_currency_id': 'ETH', # if omitted it will use the default fee policy for each currency
# whether the amount field includes fees
# 'include_fee': False, # makes sense only when fee_currency_id is equal to currency_id
}
response = self.privatePostWithdrawal(self.extend(request, params))
data = self.safe_value(response, 'data')
return self.parse_transaction(data, currency)
def parse_transaction(self, transaction, currency=None):
id = self.safe_string(transaction, 'id')
amount = self.safe_float(transaction, 'amount')
address = self.safe_string(transaction, 'address')
tag = self.safe_string(transaction, 'destination_tag')
txid = self.safe_string(transaction, 'hash')
timestamp = self.parse8601(self.safe_string(transaction, 'time'))
type = self.safe_string(transaction, 'type')
currencyId = self.safe_string(transaction, 'currency_id')
code = self.safe_currency_code(currencyId)
status = self.parse_transaction_status(self.safe_string(transaction, 'status'))
feeCost = self.safe_float(transaction, 'fee')
fee = None
if feeCost is not None and feeCost != 0:
fee = {
'currency': code,
'cost': feeCost,
}
return {
'id': id,
'currency': code,
'amount': amount,
'addressFrom': None,
'address': address,
'addressTo': address,
'tagFrom': None,
'tag': tag,
'tagTo': tag,
'status': status,
'type': type,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'fee': fee,
'info': transaction,
}
def parse_transaction_status(self, status):
statuses = {
'requested': 'pending',
'pending': 'pending',
'confirming': 'pending',
'confirmed': 'pending',
'applying': 'pending',
'done': 'ok',
'cancelled': 'canceled',
'cancelling': 'canceled',
}
return self.safe_string(statuses, status, status)
def nonce(self):
return self.milliseconds()
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'][api] + '/'
query = self.omit(params, self.extract_params(path))
if api == 'accounts':
self.check_required_credentials()
url += self.implode_params(path, params)
auth = self.apiKey + ':' + self.secret
auth64 = self.string_to_base64(auth)
headers = {
'Authorization': 'Basic ' + self.decode(auth64),
'Content-Type': 'application/json',
}
if query:
body = self.json(query)
else:
url += self.version + '/'
if api == 'public':
url += self.implode_params(path, params)
if query:
url += '?' + self.urlencode(query)
elif api == 'private':
now = self.milliseconds()
self.check_required_credentials()
expires = self.safe_integer(self.options, 'expires')
if (expires is None) or (expires < now):
raise AuthenticationError(self.id + ' access token expired, call signIn() method')
accessToken = self.safe_string(self.options, 'accessToken')
headers = {
'Authorization': 'Bearer ' + accessToken,
}
url += self.implode_params(path, params)
if method == 'GET':
if query:
url += '?' + self.urlencode(query)
elif query:
body = self.json(query)
headers['Content-Type'] = 'application/json'
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def sign_in(self, params={}):
self.check_required_credentials()
request = {
'grant_type': 'client_credentials', # the only supported value
}
response = self.accountsPostToken(self.extend(request, params))
#
# {
# access_token: '0ttDv/2hTTn3bLi8GP1gKaneiEQ6+0hOBenPrxNQt2s=',
# token_type: 'bearer',
# expires_in: 900
# }
#
expiresIn = self.safe_integer(response, 'expires_in')
accessToken = self.safe_string(response, 'access_token')
self.options['accessToken'] = accessToken
self.options['expires'] = self.sum(self.milliseconds(), expiresIn * 1000)
return response
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return # fallback to default error handler
if 'errorCode' in response:
errorCode = self.safe_string(response, 'errorCode')
message = self.safe_string(response, 'message')
if errorCode is not None:
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
self.throw_broadly_matched_exception(self.exceptions['exact'], errorCode, feedback)
raise ExchangeError(feedback)
| [
"[email protected]"
] | |
8810e80afe9d5667581d1c646a07dad52c3242c2 | 131ccf66fb787e9b1f0773a25fa518d1f2a3c5d0 | /gui_programming/guimaker.py | f88dcbdb765fc650380d10a48a44bdb26e259768 | [] | no_license | jocogum10/learning-python-programming | a0ba62abde49fd79762bcb7ba4a94bf8126afa77 | 035858bd332e3970d95db8bce7b1175e450802db | refs/heads/master | 2020-07-07T17:08:00.743196 | 2019-12-13T05:32:47 | 2019-12-13T05:32:47 | 203,416,201 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,637 | py | """
################################################################################
An extended Frame that makes window menus and toolbars automatically.
Use GuiMakerMenu for embedded components (makes frame-based menus).
Use GuiMakerWindowMenu for top-level windows (makes Tk8.0 window menus).
See the self-test code (and PyEdit) for an example layout tree format.
################################################################################
"""
import sys
from tkinter import * # widget classes
from tkinter.messagebox import showinfo
class GuiMaker(Frame):
menuBar = [] # class defaults
toolBar = [] # change per instance in subclasses
helpButton = True # set these in start() if need self
def __init__(self, parent=None):
Frame.__init__(self, parent)
self.pack(expand=YES, fill=BOTH) # make frame stretchable
self.start() # for subclass: set menu/toolBar
self.makeMenuBar() # done here: build menu bar
self.makeToolBar() # done here: build toolbar
self.makeWidgets() # for subclass: add middle part
def makeMenuBar(self):
"""
make menu bar at the top (Tk8.0 menus below)
expand=no, fill=x so same width on resize
"""
menubar = Frame(self, relief=RAISED, bd=2)
menubar.pack(side=TOP, fill=X)
for (name, key, items) in self.menuBar:
mbutton = Menubutton(menubar, text=name, underline=key)
mbutton.pack(side=LEFT)
pulldown = Menu(mbutton)
self.addMenuItems(pulldown, items)
mbutton.config(menu=pulldown)
if self.helpButton:
Button(menubar, text = 'Help',
cursor = 'gumby',
relief = FLAT,
command = self.help).pack(side=RIGHT)
def addMenuItems(self, menu, items):
for item in items: # scan nested items list
if item == 'separator': # string: add separator
menu.add_separator({})
elif type(item) == list: # list: disabled item list
for num in item:
menu.entryconfig(num, state=DISABLED)
elif type(item[2]) != list:
menu.add_command(label = item[0], # command:
underline = item[1], # add command
command = item[2]) # cmd=callable
else:
pullover = Menu(menu)
self.addMenuItems(pullover, item[2]) # sublist:
menu.add_cascade(label = item[0], # make submenu
underline = item[1], # add cascade
menu = pullover)
def makeToolBar(self):
"""
make button bar at bottom, if any
expand=no, fill=x so same width on resize
this could support images too: see chapter 9,
would need prebuilt gifs or PIL for thumbnails
"""
if self.toolBar:
toolbar = Frame(self, cursor='hand2', relief=SUNKEN, bd=2)
toolbar.pack(side=BOTTOM, fill=X)
for (name, action, where) in self.toolBar:
Button(toolbar, text=name, command=action).pack(where)
def makeWidgets(self):
"""
make 'middle' part last, so menu/toolbar
is always on top/bottom and clipped last;
override this default, pack middle any side;
for grid: grid middle part in packed frame
"""
name = Label(self,
width=40, height=10,
relief=SUNKEN, bg='white',
text = self.__class__.__name__,
cursor = 'crosshair')
name.pack(expand=YES, fill=BOTH, side=TOP)
def help(self):
"override me in subclass"
showinfo('Help', 'Sorry, no help for ' + self.__class__.__name__)
def start(self):
"override me in subclass: set menu/toolbar with self"
pass
################################################################################
# Customize for Tk 8.0 main window menu bar, instead of a frame
################################################################################
GuiMakerFrameMenu = GuiMaker # use this for embedded component menus
class GuiMakerWindowMenu(GuiMaker): # use this for top-level window menus
def makeMenuBar(self):
menubar = Menu(self.master)
self.master.config(menu=menubar)
for (name, key, items) in self.menuBar:
pulldown = Menu(menubar)
self.addMenuItems(pulldown, items)
menubar.add_cascade(label=name, underline=key, menu=pulldown)
if self.helpButton:
if sys.platform[:3] == 'win':
menubar.add_command(label='Help', command=self.help)
else:
pulldown = Menu(menubar) # Linux needs real pull down
pulldown.add_command(label='About', command=self.help)
menubar.add_cascade(label='Help', menu=pulldown)
################################################################################
# Self-test when file run standalone: 'python guimaker.py'
################################################################################
if __name__ == '__main__':
from guimixin import GuiMixin # mix in a help method
menuBar = [
('File', 0,
[('Open', 0, lambda:0),
('Quit', 0, sys.exit)]),
('Edit', 0,
[('Cut', 0, lambda:0),
('Paste', 0, lambda:0)]) ]
toolBar = [('Quit', sys.exit, {'side': LEFT})]
class TestAppFrameMenu(GuiMixin, GuiMakerFrameMenu):
def start(self):
self.menuBar = menuBar
self.toolBar = toolBar
class TestAppWindowMenu(GuiMixin, GuiMakerWindowMenu):
def start(self):
self.menuBar = menuBar
self.toolBar = toolBar
class TestAppWindowMenuBasic(GuiMakerWindowMenu):
def start(self):
self.menuBar = menuBar
self.toolBar = toolBar # guimaker help, not guimixin
root = Tk()
TestAppFrameMenu(Toplevel())
TestAppWindowMenu(Toplevel())
TestAppWindowMenuBasic(root)
root.mainloop() | [
"[email protected]"
] | |
347f8b54dfb2cd1482e50fb225597255d806a74b | a2d36e471988e0fae32e9a9d559204ebb065ab7f | /huaweicloud-sdk-elb/huaweicloudsdkelb/v3/model/list_load_balancers_request.py | 65c6fc83e8a69f5da83a58da3b5a9f60ed29c66c | [
"Apache-2.0"
] | permissive | zhouxy666/huaweicloud-sdk-python-v3 | 4d878a90b8e003875fc803a61414788e5e4c2c34 | cc6f10a53205be4cb111d3ecfef8135ea804fa15 | refs/heads/master | 2023-09-02T07:41:12.605394 | 2021-11-12T03:20:11 | 2021-11-12T03:20:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,415 | py | # coding: utf-8
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ListLoadBalancersRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'admin_state_up': 'bool',
'availability_zone_list': 'list[str]',
'billing_info': 'list[str]',
'deletion_protection_enable': 'bool',
'description': 'list[str]',
'eips': 'list[str]',
'enterprise_project_id': 'list[str]',
'guaranteed': 'bool',
'id': 'list[str]',
'ip_version': 'list[int]',
'ipv6_vip_address': 'list[str]',
'ipv6_vip_port_id': 'list[str]',
'ipv6_vip_virsubnet_id': 'list[str]',
'l4_flavor_id': 'list[str]',
'l4_scale_flavor_id': 'list[str]',
'l7_flavor_id': 'list[str]',
'l7_scale_flavor_id': 'list[str]',
'limit': 'int',
'marker': 'str',
'member_address': 'list[str]',
'member_device_id': 'list[str]',
'name': 'list[str]',
'operating_status': 'list[str]',
'page_reverse': 'bool',
'provisioning_status': 'list[str]',
'publicips': 'list[str]',
'vip_address': 'list[str]',
'vip_port_id': 'list[str]',
'vip_subnet_cidr_id': 'list[str]',
'vpc_id': 'list[str]'
}
attribute_map = {
'admin_state_up': 'admin_state_up',
'availability_zone_list': 'availability_zone_list',
'billing_info': 'billing_info',
'deletion_protection_enable': 'deletion_protection_enable',
'description': 'description',
'eips': 'eips',
'enterprise_project_id': 'enterprise_project_id',
'guaranteed': 'guaranteed',
'id': 'id',
'ip_version': 'ip_version',
'ipv6_vip_address': 'ipv6_vip_address',
'ipv6_vip_port_id': 'ipv6_vip_port_id',
'ipv6_vip_virsubnet_id': 'ipv6_vip_virsubnet_id',
'l4_flavor_id': 'l4_flavor_id',
'l4_scale_flavor_id': 'l4_scale_flavor_id',
'l7_flavor_id': 'l7_flavor_id',
'l7_scale_flavor_id': 'l7_scale_flavor_id',
'limit': 'limit',
'marker': 'marker',
'member_address': 'member_address',
'member_device_id': 'member_device_id',
'name': 'name',
'operating_status': 'operating_status',
'page_reverse': 'page_reverse',
'provisioning_status': 'provisioning_status',
'publicips': 'publicips',
'vip_address': 'vip_address',
'vip_port_id': 'vip_port_id',
'vip_subnet_cidr_id': 'vip_subnet_cidr_id',
'vpc_id': 'vpc_id'
}
def __init__(self, admin_state_up=None, availability_zone_list=None, billing_info=None, deletion_protection_enable=None, description=None, eips=None, enterprise_project_id=None, guaranteed=None, id=None, ip_version=None, ipv6_vip_address=None, ipv6_vip_port_id=None, ipv6_vip_virsubnet_id=None, l4_flavor_id=None, l4_scale_flavor_id=None, l7_flavor_id=None, l7_scale_flavor_id=None, limit=None, marker=None, member_address=None, member_device_id=None, name=None, operating_status=None, page_reverse=None, provisioning_status=None, publicips=None, vip_address=None, vip_port_id=None, vip_subnet_cidr_id=None, vpc_id=None):
"""ListLoadBalancersRequest - a model defined in huaweicloud sdk"""
self._admin_state_up = None
self._availability_zone_list = None
self._billing_info = None
self._deletion_protection_enable = None
self._description = None
self._eips = None
self._enterprise_project_id = None
self._guaranteed = None
self._id = None
self._ip_version = None
self._ipv6_vip_address = None
self._ipv6_vip_port_id = None
self._ipv6_vip_virsubnet_id = None
self._l4_flavor_id = None
self._l4_scale_flavor_id = None
self._l7_flavor_id = None
self._l7_scale_flavor_id = None
self._limit = None
self._marker = None
self._member_address = None
self._member_device_id = None
self._name = None
self._operating_status = None
self._page_reverse = None
self._provisioning_status = None
self._publicips = None
self._vip_address = None
self._vip_port_id = None
self._vip_subnet_cidr_id = None
self._vpc_id = None
self.discriminator = None
if admin_state_up is not None:
self.admin_state_up = admin_state_up
if availability_zone_list is not None:
self.availability_zone_list = availability_zone_list
if billing_info is not None:
self.billing_info = billing_info
if deletion_protection_enable is not None:
self.deletion_protection_enable = deletion_protection_enable
if description is not None:
self.description = description
if eips is not None:
self.eips = eips
if enterprise_project_id is not None:
self.enterprise_project_id = enterprise_project_id
if guaranteed is not None:
self.guaranteed = guaranteed
if id is not None:
self.id = id
if ip_version is not None:
self.ip_version = ip_version
if ipv6_vip_address is not None:
self.ipv6_vip_address = ipv6_vip_address
if ipv6_vip_port_id is not None:
self.ipv6_vip_port_id = ipv6_vip_port_id
if ipv6_vip_virsubnet_id is not None:
self.ipv6_vip_virsubnet_id = ipv6_vip_virsubnet_id
if l4_flavor_id is not None:
self.l4_flavor_id = l4_flavor_id
if l4_scale_flavor_id is not None:
self.l4_scale_flavor_id = l4_scale_flavor_id
if l7_flavor_id is not None:
self.l7_flavor_id = l7_flavor_id
if l7_scale_flavor_id is not None:
self.l7_scale_flavor_id = l7_scale_flavor_id
if limit is not None:
self.limit = limit
if marker is not None:
self.marker = marker
if member_address is not None:
self.member_address = member_address
if member_device_id is not None:
self.member_device_id = member_device_id
if name is not None:
self.name = name
if operating_status is not None:
self.operating_status = operating_status
if page_reverse is not None:
self.page_reverse = page_reverse
if provisioning_status is not None:
self.provisioning_status = provisioning_status
if publicips is not None:
self.publicips = publicips
if vip_address is not None:
self.vip_address = vip_address
if vip_port_id is not None:
self.vip_port_id = vip_port_id
if vip_subnet_cidr_id is not None:
self.vip_subnet_cidr_id = vip_subnet_cidr_id
if vpc_id is not None:
self.vpc_id = vpc_id
@property
def admin_state_up(self):
"""Gets the admin_state_up of this ListLoadBalancersRequest.
负载均衡器的管理状态。只支持设定为true。
:return: The admin_state_up of this ListLoadBalancersRequest.
:rtype: bool
"""
return self._admin_state_up
@admin_state_up.setter
def admin_state_up(self, admin_state_up):
"""Sets the admin_state_up of this ListLoadBalancersRequest.
负载均衡器的管理状态。只支持设定为true。
:param admin_state_up: The admin_state_up of this ListLoadBalancersRequest.
:type: bool
"""
self._admin_state_up = admin_state_up
@property
def availability_zone_list(self):
"""Gets the availability_zone_list of this ListLoadBalancersRequest.
可用区。 注: 可用AZ的查询方式可用通过调用nova接口查询 /v2/{project_id}/os-availability-zone
:return: The availability_zone_list of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._availability_zone_list
@availability_zone_list.setter
def availability_zone_list(self, availability_zone_list):
"""Sets the availability_zone_list of this ListLoadBalancersRequest.
可用区。 注: 可用AZ的查询方式可用通过调用nova接口查询 /v2/{project_id}/os-availability-zone
:param availability_zone_list: The availability_zone_list of this ListLoadBalancersRequest.
:type: list[str]
"""
self._availability_zone_list = availability_zone_list
@property
def billing_info(self):
"""Gets the billing_info of this ListLoadBalancersRequest.
预留资源账单信息,默认为空表示按需计费, 非空为包周期。admin权限才能更新此字段。
:return: The billing_info of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._billing_info
@billing_info.setter
def billing_info(self, billing_info):
"""Sets the billing_info of this ListLoadBalancersRequest.
预留资源账单信息,默认为空表示按需计费, 非空为包周期。admin权限才能更新此字段。
:param billing_info: The billing_info of this ListLoadBalancersRequest.
:type: list[str]
"""
self._billing_info = billing_info
@property
def deletion_protection_enable(self):
"""Gets the deletion_protection_enable of this ListLoadBalancersRequest.
是否开启删除保护,false不开启,默认为空都查询
:return: The deletion_protection_enable of this ListLoadBalancersRequest.
:rtype: bool
"""
return self._deletion_protection_enable
@deletion_protection_enable.setter
def deletion_protection_enable(self, deletion_protection_enable):
"""Sets the deletion_protection_enable of this ListLoadBalancersRequest.
是否开启删除保护,false不开启,默认为空都查询
:param deletion_protection_enable: The deletion_protection_enable of this ListLoadBalancersRequest.
:type: bool
"""
self._deletion_protection_enable = deletion_protection_enable
@property
def description(self):
"""Gets the description of this ListLoadBalancersRequest.
负载均衡器的描述信息。
:return: The description of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._description
@description.setter
def description(self, description):
"""Sets the description of this ListLoadBalancersRequest.
负载均衡器的描述信息。
:param description: The description of this ListLoadBalancersRequest.
:type: list[str]
"""
self._description = description
@property
def eips(self):
"""Gets the eips of this ListLoadBalancersRequest.
公网ELB实例绑定EIP。 示例如下: \"eips\": [ { \"eip_id\": \"a6ded276-c88a-4c58-95e0-5b6d1d2297b3\", \"eip_address\": \"2001:db8:a583:86:cf24:5cc5:8117:6eaa\", \"ip_version\": 6 } ] 查询时指定:eips=eip_id=XXXX
:return: The eips of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._eips
@eips.setter
def eips(self, eips):
"""Sets the eips of this ListLoadBalancersRequest.
公网ELB实例绑定EIP。 示例如下: \"eips\": [ { \"eip_id\": \"a6ded276-c88a-4c58-95e0-5b6d1d2297b3\", \"eip_address\": \"2001:db8:a583:86:cf24:5cc5:8117:6eaa\", \"ip_version\": 6 } ] 查询时指定:eips=eip_id=XXXX
:param eips: The eips of this ListLoadBalancersRequest.
:type: list[str]
"""
self._eips = eips
@property
def enterprise_project_id(self):
"""Gets the enterprise_project_id of this ListLoadBalancersRequest.
企业项目ID。
:return: The enterprise_project_id of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._enterprise_project_id
@enterprise_project_id.setter
def enterprise_project_id(self, enterprise_project_id):
"""Sets the enterprise_project_id of this ListLoadBalancersRequest.
企业项目ID。
:param enterprise_project_id: The enterprise_project_id of this ListLoadBalancersRequest.
:type: list[str]
"""
self._enterprise_project_id = enterprise_project_id
@property
def guaranteed(self):
"""Gets the guaranteed of this ListLoadBalancersRequest.
共享型:false 性能保障型:true
:return: The guaranteed of this ListLoadBalancersRequest.
:rtype: bool
"""
return self._guaranteed
@guaranteed.setter
def guaranteed(self, guaranteed):
"""Sets the guaranteed of this ListLoadBalancersRequest.
共享型:false 性能保障型:true
:param guaranteed: The guaranteed of this ListLoadBalancersRequest.
:type: bool
"""
self._guaranteed = guaranteed
@property
def id(self):
"""Gets the id of this ListLoadBalancersRequest.
负载均衡器ID。
:return: The id of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this ListLoadBalancersRequest.
负载均衡器ID。
:param id: The id of this ListLoadBalancersRequest.
:type: list[str]
"""
self._id = id
@property
def ip_version(self):
"""Gets the ip_version of this ListLoadBalancersRequest.
IP版本信息。 取值范围:4和6 4:IPv4 6:IPv6
:return: The ip_version of this ListLoadBalancersRequest.
:rtype: list[int]
"""
return self._ip_version
@ip_version.setter
def ip_version(self, ip_version):
"""Sets the ip_version of this ListLoadBalancersRequest.
IP版本信息。 取值范围:4和6 4:IPv4 6:IPv6
:param ip_version: The ip_version of this ListLoadBalancersRequest.
:type: list[int]
"""
self._ip_version = ip_version
@property
def ipv6_vip_address(self):
"""Gets the ipv6_vip_address of this ListLoadBalancersRequest.
双栈实例对应v6的ip地址。
:return: The ipv6_vip_address of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._ipv6_vip_address
@ipv6_vip_address.setter
def ipv6_vip_address(self, ipv6_vip_address):
"""Sets the ipv6_vip_address of this ListLoadBalancersRequest.
双栈实例对应v6的ip地址。
:param ipv6_vip_address: The ipv6_vip_address of this ListLoadBalancersRequest.
:type: list[str]
"""
self._ipv6_vip_address = ipv6_vip_address
@property
def ipv6_vip_port_id(self):
"""Gets the ipv6_vip_port_id of this ListLoadBalancersRequest.
双栈实例对应v6的端口。
:return: The ipv6_vip_port_id of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._ipv6_vip_port_id
@ipv6_vip_port_id.setter
def ipv6_vip_port_id(self, ipv6_vip_port_id):
"""Sets the ipv6_vip_port_id of this ListLoadBalancersRequest.
双栈实例对应v6的端口。
:param ipv6_vip_port_id: The ipv6_vip_port_id of this ListLoadBalancersRequest.
:type: list[str]
"""
self._ipv6_vip_port_id = ipv6_vip_port_id
@property
def ipv6_vip_virsubnet_id(self):
"""Gets the ipv6_vip_virsubnet_id of this ListLoadBalancersRequest.
双栈实例对应v6的网络id 。 说明:vpc_id , vip_subnet_cidr_id, ipv6_vip_virsubnet_id不能同时为空。
:return: The ipv6_vip_virsubnet_id of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._ipv6_vip_virsubnet_id
@ipv6_vip_virsubnet_id.setter
def ipv6_vip_virsubnet_id(self, ipv6_vip_virsubnet_id):
"""Sets the ipv6_vip_virsubnet_id of this ListLoadBalancersRequest.
双栈实例对应v6的网络id 。 说明:vpc_id , vip_subnet_cidr_id, ipv6_vip_virsubnet_id不能同时为空。
:param ipv6_vip_virsubnet_id: The ipv6_vip_virsubnet_id of this ListLoadBalancersRequest.
:type: list[str]
"""
self._ipv6_vip_virsubnet_id = ipv6_vip_virsubnet_id
@property
def l4_flavor_id(self):
"""Gets the l4_flavor_id of this ListLoadBalancersRequest.
四层Flavor, 按需计费不填, 包周期由用户设置。
:return: The l4_flavor_id of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._l4_flavor_id
@l4_flavor_id.setter
def l4_flavor_id(self, l4_flavor_id):
"""Sets the l4_flavor_id of this ListLoadBalancersRequest.
四层Flavor, 按需计费不填, 包周期由用户设置。
:param l4_flavor_id: The l4_flavor_id of this ListLoadBalancersRequest.
:type: list[str]
"""
self._l4_flavor_id = l4_flavor_id
@property
def l4_scale_flavor_id(self):
"""Gets the l4_scale_flavor_id of this ListLoadBalancersRequest.
预留弹性flavor。
:return: The l4_scale_flavor_id of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._l4_scale_flavor_id
@l4_scale_flavor_id.setter
def l4_scale_flavor_id(self, l4_scale_flavor_id):
"""Sets the l4_scale_flavor_id of this ListLoadBalancersRequest.
预留弹性flavor。
:param l4_scale_flavor_id: The l4_scale_flavor_id of this ListLoadBalancersRequest.
:type: list[str]
"""
self._l4_scale_flavor_id = l4_scale_flavor_id
@property
def l7_flavor_id(self):
"""Gets the l7_flavor_id of this ListLoadBalancersRequest.
七层Flavor, 按需计费不填, 包周期由用户设置。
:return: The l7_flavor_id of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._l7_flavor_id
@l7_flavor_id.setter
def l7_flavor_id(self, l7_flavor_id):
"""Sets the l7_flavor_id of this ListLoadBalancersRequest.
七层Flavor, 按需计费不填, 包周期由用户设置。
:param l7_flavor_id: The l7_flavor_id of this ListLoadBalancersRequest.
:type: list[str]
"""
self._l7_flavor_id = l7_flavor_id
@property
def l7_scale_flavor_id(self):
"""Gets the l7_scale_flavor_id of this ListLoadBalancersRequest.
预留弹性flavor。
:return: The l7_scale_flavor_id of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._l7_scale_flavor_id
@l7_scale_flavor_id.setter
def l7_scale_flavor_id(self, l7_scale_flavor_id):
"""Sets the l7_scale_flavor_id of this ListLoadBalancersRequest.
预留弹性flavor。
:param l7_scale_flavor_id: The l7_scale_flavor_id of this ListLoadBalancersRequest.
:type: list[str]
"""
self._l7_scale_flavor_id = l7_scale_flavor_id
@property
def limit(self):
"""Gets the limit of this ListLoadBalancersRequest.
每页返回的个数。
:return: The limit of this ListLoadBalancersRequest.
:rtype: int
"""
return self._limit
@limit.setter
def limit(self, limit):
"""Sets the limit of this ListLoadBalancersRequest.
每页返回的个数。
:param limit: The limit of this ListLoadBalancersRequest.
:type: int
"""
self._limit = limit
@property
def marker(self):
"""Gets the marker of this ListLoadBalancersRequest.
上一页最后一条记录的ID。 使用说明: - 必须与limit一起使用。 - 不指定时表示查询第一页。 - 该字段不允许为空或无效的ID。
:return: The marker of this ListLoadBalancersRequest.
:rtype: str
"""
return self._marker
@marker.setter
def marker(self, marker):
"""Sets the marker of this ListLoadBalancersRequest.
上一页最后一条记录的ID。 使用说明: - 必须与limit一起使用。 - 不指定时表示查询第一页。 - 该字段不允许为空或无效的ID。
:param marker: The marker of this ListLoadBalancersRequest.
:type: str
"""
self._marker = marker
@property
def member_address(self):
"""Gets the member_address of this ListLoadBalancersRequest.
后端云服务器的IP地址。
:return: The member_address of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._member_address
@member_address.setter
def member_address(self, member_address):
"""Sets the member_address of this ListLoadBalancersRequest.
后端云服务器的IP地址。
:param member_address: The member_address of this ListLoadBalancersRequest.
:type: list[str]
"""
self._member_address = member_address
@property
def member_device_id(self):
"""Gets the member_device_id of this ListLoadBalancersRequest.
后端云服务器对应的弹性云服务器的ID。
:return: The member_device_id of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._member_device_id
@member_device_id.setter
def member_device_id(self, member_device_id):
"""Sets the member_device_id of this ListLoadBalancersRequest.
后端云服务器对应的弹性云服务器的ID。
:param member_device_id: The member_device_id of this ListLoadBalancersRequest.
:type: list[str]
"""
self._member_device_id = member_device_id
@property
def name(self):
"""Gets the name of this ListLoadBalancersRequest.
负载均衡器名称。
:return: The name of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this ListLoadBalancersRequest.
负载均衡器名称。
:param name: The name of this ListLoadBalancersRequest.
:type: list[str]
"""
self._name = name
@property
def operating_status(self):
"""Gets the operating_status of this ListLoadBalancersRequest.
负载均衡器的操作状态。 可以为:ONLINE、OFFLINE、DEGRADED、DISABLED或NO_MONITOR。 说明 该字段为预留字段,暂未启用。
:return: The operating_status of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._operating_status
@operating_status.setter
def operating_status(self, operating_status):
"""Sets the operating_status of this ListLoadBalancersRequest.
负载均衡器的操作状态。 可以为:ONLINE、OFFLINE、DEGRADED、DISABLED或NO_MONITOR。 说明 该字段为预留字段,暂未启用。
:param operating_status: The operating_status of this ListLoadBalancersRequest.
:type: list[str]
"""
self._operating_status = operating_status
@property
def page_reverse(self):
"""Gets the page_reverse of this ListLoadBalancersRequest.
分页的顺序,true表示从后往前分页,false表示从前往后分页,默认为false。 使用说明:必须与limit一起使用。
:return: The page_reverse of this ListLoadBalancersRequest.
:rtype: bool
"""
return self._page_reverse
@page_reverse.setter
def page_reverse(self, page_reverse):
"""Sets the page_reverse of this ListLoadBalancersRequest.
分页的顺序,true表示从后往前分页,false表示从前往后分页,默认为false。 使用说明:必须与limit一起使用。
:param page_reverse: The page_reverse of this ListLoadBalancersRequest.
:type: bool
"""
self._page_reverse = page_reverse
@property
def provisioning_status(self):
"""Gets the provisioning_status of this ListLoadBalancersRequest.
负载均衡器的配置状态。 可以为:ACTIVE、PENDING_CREATE 或者ERROR。 说明 该字段为预留字段,暂未启用。
:return: The provisioning_status of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._provisioning_status
@provisioning_status.setter
def provisioning_status(self, provisioning_status):
"""Sets the provisioning_status of this ListLoadBalancersRequest.
负载均衡器的配置状态。 可以为:ACTIVE、PENDING_CREATE 或者ERROR。 说明 该字段为预留字段,暂未启用。
:param provisioning_status: The provisioning_status of this ListLoadBalancersRequest.
:type: list[str]
"""
self._provisioning_status = provisioning_status
@property
def publicips(self):
"""Gets the publicips of this ListLoadBalancersRequest.
公网IP 示例如下: \"publicips\": [ { \"publicip_id\": \"a6ded276-c88a-4c58-95e0-5b6d1d2297b3\", \"publicip_address\": \"2001:db8:a583:86:cf24:5cc5:8117:6eaa\", \"publicip_ip_version\": 6 } ] 查询时指定:publicips=publicip_id=XXXX,YYYY
:return: The publicips of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._publicips
@publicips.setter
def publicips(self, publicips):
"""Sets the publicips of this ListLoadBalancersRequest.
公网IP 示例如下: \"publicips\": [ { \"publicip_id\": \"a6ded276-c88a-4c58-95e0-5b6d1d2297b3\", \"publicip_address\": \"2001:db8:a583:86:cf24:5cc5:8117:6eaa\", \"publicip_ip_version\": 6 } ] 查询时指定:publicips=publicip_id=XXXX,YYYY
:param publicips: The publicips of this ListLoadBalancersRequest.
:type: list[str]
"""
self._publicips = publicips
@property
def vip_address(self):
"""Gets the vip_address of this ListLoadBalancersRequest.
负载均衡器的虚拟IP。
:return: The vip_address of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._vip_address
@vip_address.setter
def vip_address(self, vip_address):
"""Sets the vip_address of this ListLoadBalancersRequest.
负载均衡器的虚拟IP。
:param vip_address: The vip_address of this ListLoadBalancersRequest.
:type: list[str]
"""
self._vip_address = vip_address
@property
def vip_port_id(self):
"""Gets the vip_port_id of this ListLoadBalancersRequest.
负载均衡器虚拟IP对应的端口ID。
:return: The vip_port_id of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._vip_port_id
@vip_port_id.setter
def vip_port_id(self, vip_port_id):
"""Sets the vip_port_id of this ListLoadBalancersRequest.
负载均衡器虚拟IP对应的端口ID。
:param vip_port_id: The vip_port_id of this ListLoadBalancersRequest.
:type: list[str]
"""
self._vip_port_id = vip_port_id
@property
def vip_subnet_cidr_id(self):
"""Gets the vip_subnet_cidr_id of this ListLoadBalancersRequest.
负载均衡器所在的子网ID,仅支持内网类型。 说明:vpc_id , vip_subnet_cidr_id, ipv6_vip_virsubnet_id不能同时为空。
:return: The vip_subnet_cidr_id of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._vip_subnet_cidr_id
@vip_subnet_cidr_id.setter
def vip_subnet_cidr_id(self, vip_subnet_cidr_id):
"""Sets the vip_subnet_cidr_id of this ListLoadBalancersRequest.
负载均衡器所在的子网ID,仅支持内网类型。 说明:vpc_id , vip_subnet_cidr_id, ipv6_vip_virsubnet_id不能同时为空。
:param vip_subnet_cidr_id: The vip_subnet_cidr_id of this ListLoadBalancersRequest.
:type: list[str]
"""
self._vip_subnet_cidr_id = vip_subnet_cidr_id
@property
def vpc_id(self):
"""Gets the vpc_id of this ListLoadBalancersRequest.
实例对应的vpc属性。 若无,则从vip_subnet_cidr_id获取。 说明:vpc_id , vip_subnet_cidr_id, ipv6_vip_virsubnet_id不能同时为空。
:return: The vpc_id of this ListLoadBalancersRequest.
:rtype: list[str]
"""
return self._vpc_id
@vpc_id.setter
def vpc_id(self, vpc_id):
"""Sets the vpc_id of this ListLoadBalancersRequest.
实例对应的vpc属性。 若无,则从vip_subnet_cidr_id获取。 说明:vpc_id , vip_subnet_cidr_id, ipv6_vip_virsubnet_id不能同时为空。
:param vpc_id: The vpc_id of this ListLoadBalancersRequest.
:type: list[str]
"""
self._vpc_id = vpc_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ListLoadBalancersRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
01593c9ffc95662e33bc80059daecc2592dd829f | 08e26af5604fda61846c421d739c82ea0bd17271 | /product_account_purchase_sale/account_invoice.py | c25db1448d9f73d91fe3eab11331df7acb6e59cc | [] | no_license | germanponce/nishikawa_addons | 376342d6d45250eec85443abf4eb4f760256de85 | 765dd185272407175fbc14a8f4d702bf6e5e759d | refs/heads/master | 2021-01-25T04:09:07.391100 | 2014-07-14T14:57:21 | 2014-07-14T14:57:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,410 | py | # -*- encoding: utf-8 -*-
###########################################################################
# Module Writen to OpenERP, Open Source Management Solution
#
# Copyright (c) 2010 moylop260 - http://www.hesatecnica.com.com/
# All Rights Reserved.
# info skype: german_442 email: ([email protected])
############################################################################
# Coded by: german_442 email: ([email protected])
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv, fields
import time
from datetime import datetime, date
from tools.translate import _
from openerp import SUPERUSER_ID
class account_invoice(osv.osv):
_name = 'account.invoice'
_inherit ='account.invoice'
_columns = {
'department_id': fields.many2one('hr.department', 'Departamento', help='Define el Departamento encargado de la Solicitud de la Compra' ),
}
_default = {
}
account_invoice()
class account_invoice_line(osv.osv):
_inherit ='account.invoice.line'
_columns = {
'analytics_accounts_required': fields.boolean('Cuentas Analiticas Requeridas') ,
}
def product_id_change(self, cr, uid, ids, product, uom_id, qty=0, name='', type='out_invoice', partner_id=False, fposition_id=False, price_unit=False, currency_id=False, context=None, company_id=None):
value = {}
if context is None:
context = {}
company_id = company_id if company_id != None else context.get('company_id',False)
context = dict(context)
context.update({'company_id': company_id, 'force_company': company_id})
if not partner_id:
raise osv.except_osv(_('No Partner Defined!'),_("You must first select a partner!") )
if not product:
if type in ('in_invoice', 'in_refund'):
return {'value': {}, 'domain':{'product_uom':[]}}
else:
return {'value': {'price_unit': 0.0}, 'domain':{'product_uom':[]}}
part = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context)
fpos_obj = self.pool.get('account.fiscal.position')
fpos = fposition_id and fpos_obj.browse(cr, uid, fposition_id, context=context) or False
if part.lang:
context.update({'lang': part.lang})
result = {}
res = self.pool.get('product.product').browse(cr, uid, product, context=context)
if type in ('out_invoice','out_refund'):
a = res.property_account_income.id
if not a:
a = res.categ_id.property_account_income_categ.id
else:
a = res.property_account_expense.id
if not a:
a = res.categ_id.property_account_expense_categ.id
a = fpos_obj.map_account(cr, uid, fpos, a)
if a:
result['account_id'] = a
if type in ('out_invoice', 'out_refund'):
taxes = res.taxes_id and res.taxes_id or (a and self.pool.get('account.account').browse(cr, uid, a, context=context).tax_ids or False)
else:
taxes = res.supplier_taxes_id and res.supplier_taxes_id or (a and self.pool.get('account.account').browse(cr, uid, a, context=context).tax_ids or False)
tax_id = fpos_obj.map_tax(cr, uid, fpos, taxes)
if type in ('in_invoice', 'in_refund'):
result.update( {'price_unit': price_unit or res.standard_price,'invoice_line_tax_id': tax_id} )
else:
result.update({'price_unit': res.list_price, 'invoice_line_tax_id': tax_id})
result['name'] = res.partner_ref
result['uos_id'] = uom_id or res.uom_id.id
if res.description:
result['name'] += '\n'+res.description
domain = {'uos_id':[('category_id','=',res.uom_id.category_id.id)]}
res_final = {'value':result, 'domain':domain}
if not company_id or not currency_id:
return res_final
company = self.pool.get('res.company').browse(cr, uid, company_id, context=context)
currency = self.pool.get('res.currency').browse(cr, uid, currency_id, context=context)
if company.currency_id.id != currency.id:
if type in ('in_invoice', 'in_refund'):
res_final['value']['price_unit'] = res.standard_price
new_price = res_final['value']['price_unit'] * currency.rate
res_final['value']['price_unit'] = new_price
if result['uos_id'] and result['uos_id'] != res.uom_id.id:
selected_uom = self.pool.get('product.uom').browse(cr, uid, result['uos_id'], context=context)
new_price = self.pool.get('product.uom')._compute_price(cr, uid, res.uom_id.id, res_final['value']['price_unit'], result['uos_id'])
res_final['value']['price_unit'] = new_price
#### Validamos que el producto requiera las cuentas Analiticas
prod_obj = self.pool.get('product.product')
prod_b = prod_obj.browse(cr, uid, [product], context=None)[0]
if prod_b.analytics_accounts_required:
res_final['value'].update({'analytics_accounts_required':True})
return res_final
account_invoice_line()
class account_account_template(osv.osv):
_name = "account.account.template"
_inherit = "account.account.template"
_columns = {
'name': fields.char('Name', size=256, required=True, select=True, translate=True),
}
account_account_template()
class account_account(osv.osv):
_name = "account.account"
_inherit = "account.account"
_columns = {
'name': fields.char('Name', size=256, required=True, select=True, translate=True),
}
account_account() | [
"[email protected]"
] | |
30563f1f0d1d655fea8cc0dad2b55e5530bab2b8 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-cph/huaweicloudsdkcph/v1/model/list_resource_instances_request.py | 0823fdc434041d9670e7c3631928d7a2eaaf42b5 | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 4,217 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ListResourceInstancesRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'resource_type': 'str',
'body': 'ListResourceInstancesRequestBody'
}
attribute_map = {
'resource_type': 'resource_type',
'body': 'body'
}
def __init__(self, resource_type=None, body=None):
"""ListResourceInstancesRequest
The model defined in huaweicloud sdk
:param resource_type: 资源类型。 - cph-server,云手机服务器
:type resource_type: str
:param body: Body of the ListResourceInstancesRequest
:type body: :class:`huaweicloudsdkcph.v1.ListResourceInstancesRequestBody`
"""
self._resource_type = None
self._body = None
self.discriminator = None
self.resource_type = resource_type
if body is not None:
self.body = body
@property
def resource_type(self):
"""Gets the resource_type of this ListResourceInstancesRequest.
资源类型。 - cph-server,云手机服务器
:return: The resource_type of this ListResourceInstancesRequest.
:rtype: str
"""
return self._resource_type
@resource_type.setter
def resource_type(self, resource_type):
"""Sets the resource_type of this ListResourceInstancesRequest.
资源类型。 - cph-server,云手机服务器
:param resource_type: The resource_type of this ListResourceInstancesRequest.
:type resource_type: str
"""
self._resource_type = resource_type
@property
def body(self):
"""Gets the body of this ListResourceInstancesRequest.
:return: The body of this ListResourceInstancesRequest.
:rtype: :class:`huaweicloudsdkcph.v1.ListResourceInstancesRequestBody`
"""
return self._body
@body.setter
def body(self, body):
"""Sets the body of this ListResourceInstancesRequest.
:param body: The body of this ListResourceInstancesRequest.
:type body: :class:`huaweicloudsdkcph.v1.ListResourceInstancesRequestBody`
"""
self._body = body
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ListResourceInstancesRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
056124ade6036e7d9c1b4817404a25f132abcf7f | ecba842cc189499da2c98248e92a458dbcc0dc67 | /apps/website/privacy/urls.py | 59aa42a56262eec8c222c517c823f3eb3f7c6516 | [] | no_license | aquaristar/hhlearn | c23e94ab93221419db74409f44d8310244212190 | ec409b7886bacb33cd3f5c3a724243a30158cd54 | refs/heads/master | 2023-03-10T15:46:39.740438 | 2019-11-16T19:19:02 | 2019-11-16T19:19:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 202 | py | from django.conf.urls import patterns, include, url
urlpatterns = patterns('apps.website.privacy.views',
url(r'^privacy/$', 'privacy', name='privacy'),
) | [
"[email protected]"
] | |
a578e9df112d8212f39e3e751254ec4e1957cceb | 99b062cb9f5f3ff10c9f1fa00e43f6e8151a43a6 | /algorithm/day21/순열2.py | 0a6081a5a78fa25bfd7c44e27f558a5b94a4ee49 | [] | no_license | HSx3/TIL | 92acc90758015c2e31660617bd927f7f100f5f64 | 981c9aaaf09c930d980205f68a28f2fc8006efcb | refs/heads/master | 2020-04-11T21:13:36.239246 | 2019-05-08T08:18:03 | 2019-05-08T08:18:03 | 162,099,042 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 302 | py | def myprint(n):
for i in range(n):
print("%d" % (a[i]), end=' ')
print()
def perm(n, k):
if n == k:
myprint(n)
else:
for i in range(k, n):
a[i], a[k] = a[k], a[i]
perm(n, k+1)
a[i], a[k] = a[k], a[i]
a = [1, 2, 3]
perm(3, 0) | [
"[email protected]"
] | |
77943a4d3e4d1148d94b9ad235dc96195e234ab2 | 0e478f3d8b6c323c093455428c9094c45de13bac | /src/OTLMOW/OTLModel/Datatypes/KlVerkeersregelaarVoltage.py | 34ad4ce0c129e30a204bef55ade1b07e3f23d16f | [
"MIT"
] | permissive | davidvlaminck/OTLMOW | c6eae90b2cab8a741271002cde454427ca8b75ba | 48f8c357c475da1d2a1bc7820556843d4b37838d | refs/heads/main | 2023-01-12T05:08:40.442734 | 2023-01-10T15:26:39 | 2023-01-10T15:26:39 | 432,681,113 | 3 | 1 | MIT | 2022-06-20T20:36:00 | 2021-11-28T10:28:24 | Python | UTF-8 | Python | false | false | 1,838 | py | # coding=utf-8
import random
from OTLMOW.OTLModel.Datatypes.KeuzelijstField import KeuzelijstField
from OTLMOW.OTLModel.Datatypes.KeuzelijstWaarde import KeuzelijstWaarde
# Generated with OTLEnumerationCreator. To modify: extend, do not edit
class KlVerkeersregelaarVoltage(KeuzelijstField):
"""Keuzelijst met de voorkomende voltages gebruikt voor verkeersregelaars."""
naam = 'KlVerkeersregelaarVoltage'
label = 'Verkeersregelaar voltage'
objectUri = 'https://wegenenverkeer.data.vlaanderen.be/ns/onderdeel#KlVerkeersregelaarVoltage'
definition = 'Keuzelijst met de voorkomende voltages gebruikt voor verkeersregelaars.'
status = 'ingebruik'
codelist = 'https://wegenenverkeer.data.vlaanderen.be/id/conceptscheme/KlVerkeersregelaarVoltage'
options = {
'230': KeuzelijstWaarde(invulwaarde='230',
label='230',
status='ingebruik',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerkeersregelaarVoltage/230'),
'40': KeuzelijstWaarde(invulwaarde='40',
label='40',
status='ingebruik',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerkeersregelaarVoltage/40'),
'42': KeuzelijstWaarde(invulwaarde='42',
label='42',
status='ingebruik',
objectUri='https://wegenenverkeer.data.vlaanderen.be/id/concept/KlVerkeersregelaarVoltage/42')
}
@classmethod
def create_dummy_data(cls):
return random.choice(list(map(lambda x: x.invulwaarde,
filter(lambda option: option.status == 'ingebruik', cls.options.values()))))
| [
"[email protected]"
] | |
6177c3e5145dab0ebb77f902ac0a558478083544 | 1ee2cd179e9eb2ec7541dec5b14ce993624181b8 | /openrasp_iast/plugin/scanner/directory_basic.py | 4883ffe987a3f1031767825e28eed46ad47c6f17 | [
"Apache-2.0"
] | permissive | Ze4lfRoG/openrasp-iast | 0c8492d5c9fbe0c5b3d994f8aa703628361dd405 | 0fd4cdaae642a759cffe214de51c392b75aa828e | refs/heads/master | 2020-10-01T09:05:36.359241 | 2019-12-11T12:06:43 | 2019-12-11T12:06:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,299 | py | #!/usr/bin/env python3
# -*- coding: UTF-8 -*-
"""
Copyright 2017-2019 Baidu Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from core.components.plugin import scan_plugin_base
class ScanPlugin(scan_plugin_base.ScanPluginBase):
plugin_info = {
"name": "directory_basic",
"show_name": "目录遍历检测插件",
"description": "基础目录遍历漏洞检测插件"
}
def mutant(self, rasp_result_ins):
"""
测试向量生成
"""
if not rasp_result_ins.has_hook_type("directory"):
return
linux_payload_list = [
("../../../../../../../../../../../../../../../../../../../../etc", "/etc"),
("../../../../etc", "/etc"),
("/etc", "/etc")
]
windows_payload_list = [
("..\\..\\..\\..\\..\\..\\..\\..\\..\\openrasp_dir", ":/openrasp_dir"),
("file://c:\\openrasp_dir", "c:\\openrasp_dir")
]
mac_payload_list = [
("../../../../../../../../../../../../../../../../../../../../private/etc", "/private/etc"),
("../../../private/etc", "/private/etc"),
("/private/etc", "/private/etc")
]
server_os = rasp_result_ins.get_server_info()["os"]
if server_os == "Windows":
payload_list = windows_payload_list
elif server_os == "Mac":
payload_list = mac_payload_list
else:
payload_list = linux_payload_list
# 获取所有待测试参数
request_data_ins = self.new_request_data(rasp_result_ins)
test_params = self.mutant_helper.get_params_list(
request_data_ins, ["get", "post", "json", "headers", "cookies"])
for param in test_params:
if not request_data_ins.is_param_concat_in_hook("directory", param["value"].rstrip("/\\")):
continue
payload_seq = self.gen_payload_seq()
for payload in payload_list:
request_data_ins = self.new_request_data(
rasp_result_ins, payload_seq, payload[1])
request_data_ins.set_param(
param["type"], param["name"], payload[0])
request_data_list = [request_data_ins]
yield request_data_list
def check(self, request_data_list):
"""
请求结果检测
"""
request_data_ins = request_data_list[0]
feature = request_data_ins.get_payload_info()["feature"]
rasp_result_ins = request_data_ins.get_rasp_result()
if rasp_result_ins is None:
return None
if self.checker.check_concat_in_hook(rasp_result_ins, "directory", feature):
return "读取的目录可被用户输入控制"
else:
return None
| [
"[email protected]"
] | |
cad384be9aede5c74227c0ca4d556d1ada8cbe9a | 772e04b18f36fe1bffb05c16ef4eff3ba765fd13 | /gcnvisualizer/test/test_visualizer.py | 853d514babfbb1580b6492b0b4ad3a106332f9ae | [
"LicenseRef-scancode-other-permissive"
] | permissive | clinfo/kGCN | 3c74f552dd9d71d470a3173012b01733a1262688 | 32328d5a41e6ed7491b3edb705ff94658fc95d3f | refs/heads/master | 2023-08-16T19:43:17.149381 | 2023-08-03T00:08:11 | 2023-08-03T00:08:11 | 194,075,235 | 110 | 38 | NOASSERTION | 2022-02-04T17:09:55 | 2019-06-27T10:31:57 | Python | UTF-8 | Python | false | false | 503 | py | import unittest
import numpy as np
import pytest
from gcnvisualizer import GCNVisualizer
def test_load_normal_pickle_file(multi_modal_profeat):
for filename in multi_modal_profeat:
g = GCNVisualizer(filename, loglevel='ERROR')
assert ['smiles', 'feature',
'adjacency', 'check_scores',
'feature_IG', 'adjacency_IG',
'profeat_IG', 'vector_modal'] == (list(g.ig_dict.keys()))
if __name__ == "__main__":
unittest.run()
| [
"[email protected]"
] | |
ca837dfcbf930d6469d02412264df601a0216855 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_347/ch1_2020_03_11_20_09_49_458556.py | a589fa5640321f7bdfd640839fc404d8e838e5a3 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 69 | py | def calcula_valor_devido (c, t, i):
M = c*((1+i)**t)
return M | [
"[email protected]"
] | |
4941f9ebcf4458f99db5d63c738817d92fa69967 | 5e557741c8867bca4c4bcf2d5e67409211d059a3 | /.circleci/cimodel/data/pytorch_build_data.py | e2172b660e3114b5e40a445762f40a2702cc4f22 | [
"BSD-2-Clause",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSL-1.0",
"Apache-2.0"
] | permissive | Pandinosaurus/pytorch | a2bb724cfc548f0f2278b5af2fd8b1d2758adb76 | bb8978f605e203fbb780f03010fefbece35ac51c | refs/heads/master | 2023-05-02T20:07:23.577610 | 2021-11-05T14:01:30 | 2021-11-05T14:04:40 | 119,666,381 | 2 | 0 | NOASSERTION | 2021-11-05T19:55:56 | 2018-01-31T09:37:34 | C++ | UTF-8 | Python | false | false | 9,147 | py | from cimodel.lib.conf_tree import ConfigNode, X, XImportant
CONFIG_TREE_DATA = [
("xenial", [
("gcc", [
("5.4", [ # All this subtree rebases to master and then build
("3.6", [
("important", [X(True)]),
]),
]),
# TODO: bring back libtorch test
]),
("cuda", [
("10.2", [
("3.6", [
# Build are needed for slow_gradcheck
('build_only', [X(True)]),
("slow_gradcheck", [
# If you update this slow gradcheck, you should
# also update docker_definitions.py to make sure
# the docker image match the config used here
(True, [
('shard_test', [XImportant(True)]),
]),
]),
# UNCOMMENT THE BELOW TO REENABLE LIBTORCH
# ("libtorch", [
# (True, [
# ('build_only', [X(True)]),
# ]),
# ]),
]),
]),
]),
]),
("bionic", [
("clang", [
("9", [
("3.6", [
("xla", [XImportant(True)]),
]),
]),
]),
# @jithunnair-amd believes Jenkins builds are sufficient
# ("rocm", [
# ("3.9", [
# ("3.6", [
# ('build_only', [XImportant(True)]),
# ]),
# ]),
# ]),
]),
]
def get_major_pyver(dotted_version):
parts = dotted_version.split(".")
return "py" + parts[0]
class TreeConfigNode(ConfigNode):
def __init__(self, parent, node_name, subtree):
super(TreeConfigNode, self).__init__(parent, self.modify_label(node_name))
self.subtree = subtree
self.init2(node_name)
def modify_label(self, label):
return label
def init2(self, node_name):
pass
def get_children(self):
return [self.child_constructor()(self, k, v) for (k, v) in self.subtree]
class TopLevelNode(TreeConfigNode):
def __init__(self, node_name, subtree):
super(TopLevelNode, self).__init__(None, node_name, subtree)
# noinspection PyMethodMayBeStatic
def child_constructor(self):
return DistroConfigNode
class DistroConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["distro_name"] = node_name
def child_constructor(self):
distro = self.find_prop("distro_name")
next_nodes = {
"xenial": XenialCompilerConfigNode,
"bionic": BionicCompilerConfigNode,
}
return next_nodes[distro]
class PyVerConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["pyver"] = node_name
self.props["abbreviated_pyver"] = get_major_pyver(node_name)
if node_name == "3.9":
self.props["abbreviated_pyver"] = "py3.9"
# noinspection PyMethodMayBeStatic
def child_constructor(self):
return ExperimentalFeatureConfigNode
class ExperimentalFeatureConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["experimental_feature"] = node_name
def child_constructor(self):
experimental_feature = self.find_prop("experimental_feature")
next_nodes = {
"asan": AsanConfigNode,
"xla": XlaConfigNode,
"mlc": MLCConfigNode,
"vulkan": VulkanConfigNode,
"parallel_tbb": ParallelTBBConfigNode,
"noarch": NoarchConfigNode,
"parallel_native": ParallelNativeConfigNode,
"onnx": ONNXConfigNode,
"libtorch": LibTorchConfigNode,
"important": ImportantConfigNode,
"build_only": BuildOnlyConfigNode,
"shard_test": ShardTestConfigNode,
"cuda_gcc_override": CudaGccOverrideConfigNode,
"pure_torch": PureTorchConfigNode,
"slow_gradcheck": SlowGradcheckConfigNode,
}
return next_nodes[experimental_feature]
class SlowGradcheckConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["is_slow_gradcheck"] = True
def child_constructor(self):
return ExperimentalFeatureConfigNode
class PureTorchConfigNode(TreeConfigNode):
def modify_label(self, label):
return "PURE_TORCH=" + str(label)
def init2(self, node_name):
self.props["is_pure_torch"] = node_name
def child_constructor(self):
return ImportantConfigNode
class XlaConfigNode(TreeConfigNode):
def modify_label(self, label):
return "XLA=" + str(label)
def init2(self, node_name):
self.props["is_xla"] = node_name
def child_constructor(self):
return ImportantConfigNode
class MLCConfigNode(TreeConfigNode):
def modify_label(self, label):
return "MLC=" + str(label)
def init2(self, node_name):
self.props["is_mlc"] = node_name
def child_constructor(self):
return ImportantConfigNode
class AsanConfigNode(TreeConfigNode):
def modify_label(self, label):
return "Asan=" + str(label)
def init2(self, node_name):
self.props["is_asan"] = node_name
def child_constructor(self):
return ExperimentalFeatureConfigNode
class ONNXConfigNode(TreeConfigNode):
def modify_label(self, label):
return "Onnx=" + str(label)
def init2(self, node_name):
self.props["is_onnx"] = node_name
def child_constructor(self):
return ImportantConfigNode
class VulkanConfigNode(TreeConfigNode):
def modify_label(self, label):
return "Vulkan=" + str(label)
def init2(self, node_name):
self.props["is_vulkan"] = node_name
def child_constructor(self):
return ImportantConfigNode
class ParallelTBBConfigNode(TreeConfigNode):
def modify_label(self, label):
return "PARALLELTBB=" + str(label)
def init2(self, node_name):
self.props["parallel_backend"] = "paralleltbb"
def child_constructor(self):
return ImportantConfigNode
class NoarchConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["is_noarch"] = node_name
def child_constructor(self):
return ImportantConfigNode
class ParallelNativeConfigNode(TreeConfigNode):
def modify_label(self, label):
return "PARALLELNATIVE=" + str(label)
def init2(self, node_name):
self.props["parallel_backend"] = "parallelnative"
def child_constructor(self):
return ImportantConfigNode
class LibTorchConfigNode(TreeConfigNode):
def modify_label(self, label):
return "BUILD_TEST_LIBTORCH=" + str(label)
def init2(self, node_name):
self.props["is_libtorch"] = node_name
def child_constructor(self):
return ExperimentalFeatureConfigNode
class CudaGccOverrideConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["cuda_gcc_override"] = node_name
def child_constructor(self):
return ExperimentalFeatureConfigNode
class BuildOnlyConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["build_only"] = node_name
def child_constructor(self):
return ExperimentalFeatureConfigNode
class ShardTestConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["shard_test"] = node_name
def child_constructor(self):
return ImportantConfigNode
class ImportantConfigNode(TreeConfigNode):
def modify_label(self, label):
return "IMPORTANT=" + str(label)
def init2(self, node_name):
self.props["is_important"] = node_name
def get_children(self):
return []
class XenialCompilerConfigNode(TreeConfigNode):
def modify_label(self, label):
return label or "<unspecified>"
def init2(self, node_name):
self.props["compiler_name"] = node_name
# noinspection PyMethodMayBeStatic
def child_constructor(self):
return XenialCompilerVersionConfigNode if self.props["compiler_name"] else PyVerConfigNode
class BionicCompilerConfigNode(TreeConfigNode):
def modify_label(self, label):
return label or "<unspecified>"
def init2(self, node_name):
self.props["compiler_name"] = node_name
# noinspection PyMethodMayBeStatic
def child_constructor(self):
return BionicCompilerVersionConfigNode if self.props["compiler_name"] else PyVerConfigNode
class XenialCompilerVersionConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["compiler_version"] = node_name
# noinspection PyMethodMayBeStatic
def child_constructor(self):
return PyVerConfigNode
class BionicCompilerVersionConfigNode(TreeConfigNode):
def init2(self, node_name):
self.props["compiler_version"] = node_name
# noinspection PyMethodMayBeStatic
def child_constructor(self):
return PyVerConfigNode
| [
"[email protected]"
] | |
182933ad1e32acc47eb2cfc12c855e4c86b85ade | e116a28a8e4d07bb4de1812fde957a38155eb6df | /polar_test2.py | 0ee481b4a0480d2965c11c666d56aaca4fe2291a | [] | no_license | gl-coding/EasyPyEcharts | 5582ddf6be3158f13663778c1038767a87756216 | f9dbe8ad7389a6e2629643c9b7af7b9dc3bfccd5 | refs/heads/master | 2020-09-29T20:48:46.260306 | 2019-12-10T12:52:24 | 2019-12-10T12:52:24 | 227,119,587 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 500 | py | #encoding=utf-8
from pyecharts import Polar
radius =['周一', '周二', '周三', '周四', '周五', '周六', '周日']
polar =Polar("极坐标系-堆叠柱状图示例", width=1200, height=600)
polar.add("", [1, 2, 3, 4, 3, 5, 1], radius_data=radius, type='barAngle', is_stack=True)
polar.add("", [2, 4, 6, 1, 2, 3, 1], radius_data=radius, type='barAngle', is_stack=True)
polar.add("", [1, 2, 3, 4, 1, 2, 5], radius_data=radius, type='barAngle', is_stack=True)
polar.show_config()
polar.render()
| [
"[email protected]"
] | |
bae46eca925d4eec064cfa40ac5ad479eccddd16 | 6a01a9287a4c23c7f11b7c5399cfb96bbe42eba8 | /python/scripts/get_nearest_k_features.py | 068a702adc3492255203e07630813f3fd49b6ade | [
"MIT"
] | permissive | xguse/gmm-to-gff-transcripts-vs-snps | 3c25bf2752aee76174d5dab92060fe7269caf99f | 75337135ab8ff6d840af3cfccfe6404a06777a54 | refs/heads/master | 2021-01-19T01:50:33.473897 | 2016-08-02T20:31:18 | 2016-08-02T20:31:18 | 54,731,430 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,943 | py | """For each SNP file, produce a bed representing the nearest k gene or mapped transcript features and its distance from the SNP."""
import pybedtools as pbt
import pandas as pd
k_number = snakemake.params.k_number
snp_beds = snakemake.input.snp_beds
gene_model_subtracted = snakemake.input.gene_model_subtracted
gene_models = snakemake.input.gene_models
nearest_features_beds = snakemake.output.nearest_features_beds
snps_in_features = snakemake.output.snps_in_features
headers = ["SNP_chrom",
"SNP_start",
"SNP_end",
"feature_set_name",
"chrom",
"chromStart",
"chromEnd",
"name",
"score",
"strand",
"thickStart",
"thickEnd",
"itemRgb",
"blockCount",
"blockSizes",
"blockStarts",
"distance"
]
for snp_bed, nearest_bed, feature_hit_file in zip(snp_beds, nearest_features_beds, snps_in_features):
snp_bed = pbt.BedTool(snp_bed)
gene_model_subtracted_bed = pbt.BedTool(gene_model_subtracted)
gene_models_bed = pbt.BedTool(gene_models)
k_nearest = snp_bed.closest([gene_model_subtracted_bed.fn,
gene_models_bed.fn],
k=k_number,
names=['novel_mapped_tx', 'official_annotations'],
D='ref', # Include SIGNED distances from SNP based on the ref genome
t='all', # Return all members of a distance "tie"
mdb='each', # Return `k_number` of neighboors for each `names`
)
k_nearest.saveas(nearest_bed)
nearest_df = pd.read_csv(nearest_bed, sep="\t", names=headers)
nearest_df
in_features = nearest_df.query(""" abs(distance) <= 0 """)
in_features.to_excel(feature_hit_file, index=False)
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.