blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
58a0b89d9a44e9b44d96235ba45354df6142d209 | b15848c78b6ed07d27cae74b90ae99a27d7acf24 | /DataParser/DataParser/settings.py | dae1081c2349a6f3414aead9e32dbee48c5bbd29 | [
"MIT"
]
| permissive | CodeBoss86/DataParser | ba988462de6e1cc1ae156e3407fbdea06fa5efc8 | c9e09f0975145a4ca0a3645699ee91adee49cd2c | refs/heads/main | 2023-01-19T01:51:31.178645 | 2020-11-17T13:38:47 | 2020-11-17T13:38:47 | 316,596,514 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,872 | py | """
Django settings for DataParser project.
Generated by 'django-admin startproject' using Django 3.1.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
# from corsheaders.defaults import default_headers
import os
from dotenv import load_dotenv
from pathlib import Path
# from celery.schedules import crontab
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
ENV_PATH = BASE_DIR / '.env'
load_dotenv(ENV_PATH)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.getenv('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = os.getenv('DEBUG')
splittedHosts = os.getenv('ALLOWED_HOSTS').split(',')
ALLOWED_HOSTS = splittedHosts
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'core',
]
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
# 'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
DJANGO_ALLOW_ASYNC_UNSAFE = True
ROOT_URLCONF = 'DataParser.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'DataParser.wsgi.application'
ASGI_APPLICATION = 'DataParser.asgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': BASE_DIR / 'db.sqlite3',
# }
# }
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.getenv('DB_NAME'),
'USER': os.getenv('DB_USER'),
'PASSWORD': os.getenv('DB_PASSWORD'),
'HOST': os.getenv('DB_HOST'),
'PORT': os.getenv('DB_PORT'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
ASGI_APPLICATION = 'DataParser.routing.application'
CORS_ORIGIN_ALLOW_ALL = True
CORS_ALLOW_CREDENTIALS = False
| [
"[email protected]"
]
| |
ca34e03ef4a90a8d5d4c34a0ada17be32fc3c867 | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/BuildLinks1.10/test_input/CJ_16_1/16_1_1_evan176_solve.py | 176dc3d014d5ea7ed28dc4e8bea96de713789acf | []
| no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 581 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import math
import time
def compute(word):
result = [word[0]]
for i in range(1, len(word)):
alpha = word[i]
if alpha >= result[0]:
result.insert(0, alpha)
else:
result.append(alpha)
return ''.join(result)
if __name__ == "__main__":
with open(sys.argv[1], 'r') as f:
cases = int(f.readline())
for i in range(cases):
word = f.readline().strip()
result = compute(word)
print('Case #{}: {}'.format(i+1, result))
| [
"[[email protected]]"
]
| |
62fc3c89e7939ee66309da0c228d3a0ca205b6c6 | 71eb367210e8ffd3b4964a8c99e3ac6f2920fdbb | /wedding/management/commands/make_backup.py | f92cd208fa723e3a4afbcc78c347424c2bb91e03 | [
"MIT"
]
| permissive | jsayles/wedding | 392771dc894fb311414b2d34ceb4319318d8eefb | 242d28d0271d58909b2c5ff5457d909efaecd3c0 | refs/heads/master | 2020-04-18T01:26:57.433729 | 2015-09-04T15:18:03 | 2015-09-04T15:18:03 | 28,720,827 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 996 | py | import os
import time
import urllib
import sys
import datetime
from django.core.management.base import BaseCommand, CommandError
from wedding.backup import BackupManager
class Command(BaseCommand):
help = "Creates a backup containing an SQL dump and the media files."
args = ""
requires_model_validation = False
def handle(self, *labels, **options):
manager = BackupManager()
print manager.make_backup()
# Copyright 2011 Trevor F. Smith (http://trevor.smith.name/) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
| [
"[email protected]"
]
| |
f8b71f47242faeeccc05326262d862d05d57a7fe | e7b7cc34f77c71e61aa0fa05bcc62f54fc2fc0e1 | /BinarySearch/q374_guess_number_higher_or_lower.py | 000686ff073c0f98c294124c4f8a8ca531d32f01 | []
| no_license | sevenhe716/LeetCode | 41d2ef18f5cb317858c9b69d00bcccb743cbdf48 | 4a1747b6497305f3821612d9c358a6795b1690da | refs/heads/master | 2020-03-16T16:12:27.461172 | 2019-04-22T13:27:54 | 2019-04-22T13:27:54 | 130,221,784 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,562 | py | # Time: O(n)
# Space: O(1)
# 解题思路:
# 二分查找
# The guess API is already defined for you.
# @param num, your guess
# @return -1 if my number is lower, 1 if my number is higher, otherwise return 0
import bisect
pick = 0
def guess(num):
if num == pick:
return 0
elif num > pick:
return -1
else:
return 1
class Solution(object):
def guessNumber(self, n):
"""
:type n: int
:rtype: int
"""
lo, hi = 1, n
while lo <= hi:
mid = lo + (hi - lo) // 2
result = guess(mid)
if result == 0:
return mid
elif result == 1:
lo = mid + 1
else:
hi = mid - 1
return -1
# 三分查找,时间复杂度降为log3(2n)
class Solution1:
def guessNumber(self, n):
"""
:type n: int
:rtype: int
"""
low, high = 1, n
while low <= high:
mid1 = low + (high - low) // 3
mid2 = high - (high - low) // 3
res1, res2 = guess(mid1), guess(mid2)
if res1 == 0:
return mid1
if res2 == 0:
return mid2
elif res1 < 0:
high = mid1 - 1
elif res2 > 0:
low = mid2 + 1
else:
low, high = mid1 + 1, mid2 - 1
return -1
def guessNumber1(self, n):
class C: __getitem__ = lambda _, i: -guess(i)
return bisect.bisect(C(), -1, 1, n)
| [
"[email protected]"
]
| |
b1347c88770f1eb0a81a06dfaf9e693cbf5b465a | b4afd14e3b4e9cff0a99906a69587e348b243aeb | /mocc/beida/pythonds/stackop.py | 424989f76facb6d29739792e959118a1d1b1b7d9 | []
| no_license | zhankq/pythonlearn | d694df23826cda6ba662e852e531e96a10ab2092 | cb714fbb8257193029f958e73e0f9bd6a68d77f1 | refs/heads/master | 2021-12-16T13:51:23.381206 | 2021-12-03T01:13:36 | 2021-12-03T01:13:36 | 205,632,135 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 342 | py | class Stack:
def __init__(self):
self.items = []
def isEmpty(self):
return self.items == []
def push(self,item):
self.items.append(item)
def pop(self):
return self.items.pop()
def peek(self):
return self.items[len(self.items)-1]
def size(self):
return len(self.items)
| [
"[email protected]"
]
| |
7a81a710368d8388719fd9da8283fa4d6989e5c2 | d13ee6238418d047f9fe6ddbd5525fd0487d4233 | /hc/front/tests/test_channels.py | 1007a821f8abff8784f2b2d318f195c0357cf4d7 | [
"BSD-3-Clause"
]
| permissive | iphoting/healthchecks | b4ffb7cd2a254c1a8daa490608ff4d5a96c560da | 924fc7df60dbf97b82a1f82989507459802f7028 | refs/heads/heroku | 2022-03-06T08:32:11.626016 | 2019-10-07T14:37:20 | 2022-02-19T09:37:57 | 82,822,882 | 11 | 7 | BSD-3-Clause | 2021-09-28T07:59:39 | 2017-02-22T15:51:02 | Python | UTF-8 | Python | false | false | 6,130 | py | import json
from hc.api.models import Channel
from hc.test import BaseTestCase
class ChannelsTestCase(BaseTestCase):
def test_it_formats_complex_slack_value(self):
ch = Channel(kind="slack", project=self.project)
ch.value = json.dumps(
{
"ok": True,
"team_name": "foo-team",
"incoming_webhook": {"url": "http://example.org", "channel": "#bar"},
}
)
ch.save()
self.client.login(username="[email protected]", password="password")
r = self.client.get(self.channels_url)
self.assertContains(r, "foo-team", status_code=200)
self.assertContains(r, "#bar")
def test_it_shows_webhook_post_data(self):
ch = Channel(kind="webhook", project=self.project)
ch.value = json.dumps(
{
"method_down": "POST",
"url_down": "http://down.example.com",
"body_down": "foobar",
"headers_down": {},
"method_up": "GET",
"url_up": "http://up.example.com",
"body_up": "",
"headers_up": {},
}
)
ch.save()
self.client.login(username="[email protected]", password="password")
r = self.client.get(self.channels_url)
self.assertEqual(r.status_code, 200)
# These are inside a modal:
self.assertContains(r, "http://down.example.com")
self.assertContains(r, "http://up.example.com")
self.assertContains(r, "foobar")
def test_it_shows_pushover_details(self):
ch = Channel(kind="po", project=self.project)
ch.value = "fake-key|0"
ch.save()
self.client.login(username="[email protected]", password="password")
r = self.client.get(self.channels_url)
self.assertEqual(r.status_code, 200)
self.assertContains(r, "(normal priority)")
def test_it_shows_unconfirmed_email(self):
channel = Channel(project=self.project, kind="email")
channel.value = "[email protected]"
channel.save()
self.client.login(username="[email protected]", password="password")
r = self.client.get(self.channels_url)
self.assertEqual(r.status_code, 200)
self.assertContains(r, "Unconfirmed")
def test_it_shows_down_only_note_for_email(self):
channel = Channel(project=self.project, kind="email")
channel.value = json.dumps(
{"value": "[email protected]", "up": False, "down": True}
)
channel.save()
self.client.login(username="[email protected]", password="password")
r = self.client.get(self.channels_url)
self.assertEqual(r.status_code, 200)
self.assertContains(r, "(down only)")
def test_it_shows_up_only_note_for_email(self):
channel = Channel(project=self.project, kind="email")
channel.value = json.dumps(
{"value": "[email protected]", "up": True, "down": False}
)
channel.save()
self.client.login(username="[email protected]", password="password")
r = self.client.get(self.channels_url)
self.assertEqual(r.status_code, 200)
self.assertContains(r, "(up only)")
def test_it_shows_sms_number(self):
ch = Channel(kind="sms", project=self.project)
ch.value = json.dumps({"value": "+123"})
ch.save()
self.client.login(username="[email protected]", password="password")
r = self.client.get(self.channels_url)
self.assertEqual(r.status_code, 200)
self.assertContains(r, "SMS to +123")
def test_it_shows_channel_issues_indicator(self):
Channel.objects.create(kind="sms", project=self.project, last_error="x")
self.client.login(username="[email protected]", password="password")
r = self.client.get(self.channels_url)
self.assertContains(r, "broken-channels", status_code=200)
def test_it_hides_actions_from_readonly_users(self):
self.bobs_membership.role = "r"
self.bobs_membership.save()
Channel.objects.create(project=self.project, kind="webhook", value="{}")
self.client.login(username="[email protected]", password="password")
r = self.client.get(self.channels_url)
self.assertNotContains(r, "Add Integration", status_code=200)
self.assertNotContains(r, "ic-delete")
self.assertNotContains(r, "edit_webhook")
def test_it_shows_down_only_note_for_sms(self):
channel = Channel(project=self.project, kind="sms")
channel.value = json.dumps({"value": "+123123123", "up": False, "down": True})
channel.save()
self.client.login(username="[email protected]", password="password")
r = self.client.get(self.channels_url)
self.assertEqual(r.status_code, 200)
self.assertContains(r, "(down only)")
def test_it_shows_up_only_note_for_sms(self):
channel = Channel(project=self.project, kind="sms")
channel.value = json.dumps({"value": "+123123123", "up": True, "down": False})
channel.save()
self.client.login(username="[email protected]", password="password")
r = self.client.get(self.channels_url)
self.assertEqual(r.status_code, 200)
self.assertContains(r, "(up only)")
def test_it_shows_disabled_note(self):
ch = Channel(kind="slack", project=self.project)
ch.value = "https://example.org"
ch.disabled = True
ch.save()
self.client.login(username="[email protected]", password="password")
r = self.client.get(self.channels_url)
self.assertContains(r, "label-danger", status_code=200)
def test_it_shows_fix_button_for_disabled_email(self):
ch = Channel(kind="email", project=self.project)
ch.value = "[email protected]"
ch.disabled = True
ch.save()
self.client.login(username="[email protected]", password="password")
r = self.client.get(self.channels_url)
self.assertContains(r, "Fix…", status_code=200)
| [
"[email protected]"
]
| |
3cd329b8c34f33fda57e67ec19ffd58aa08cc7d6 | 6044266e775c87afed99397c8bb88366fbbca0e7 | /scrapy_projt/python_itertools/zip_longest_fillvalue.py | b9edce215a1bab2bb5e70645bae16021409cd99a | []
| no_license | ranafge/all-documnent-projects | e4434b821354076f486639419598fd54039fb5bd | c9d65ddea291c53b8e101357547ac63a36406ed9 | refs/heads/main | 2023-05-08T20:01:20.343856 | 2021-05-30T10:44:28 | 2021-05-30T10:44:28 | 372,186,355 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,270 | py | from itertools import zip_longest
import re
from itertools import chain
dates = ['21/11/2044', '31/12/2018', '23/9/3000', '25/12/2007']
text = ['What are dates? ', ', is an example.\n', ', is another format as well.\n',
', also exists, but is a bit ludicrous\n', ', are examples but more commonly used']
print([w for x in zip_longest(text, dates, fillvalue='') for w in x if w])
ls = ['1 Paris-SG 42 20 13 3 4 +33',
'2 Lille 42 20 12 6 2 +20',
'3 Lyon 40 20 11 7 2 +20',
'4 Monaco 36 20 11 3 6 +10']
convert_2d_list = [i.split(maxsplit=2) for i in ls]
print(convert_2d_list)
my_list_dict = {
'L1': ['a', 'b', 'c', 'd'],
'L2': ['e', 'f', 'g', 'h']
}
def check_value_return_key(c):
for k, v in my_list_dict.items():
if c in v:
return k
else:
return None
print(check_value_return_key('g'))
def find_key(c):
for k, v in my_list_dict.items():
if c in v:
return k
else:
raise Exception("value '{}' not found".format(c))
find_key("a")
a = [[[5],[3]],[[4],[5]],[[6],[7]]]
print([list(chain.from_iterable(l)) for l in a])
my_list = [0, 1, 2, 2, 1, 20, 21, 21, 20, 3, 23, 22]
num_map = {j:i for i, j in enumerate(sorted(set(my_list)))}
print(num_map)
| [
"[email protected]"
]
| |
b6c69394d9cb24e853932d6a9d1f96608694f81a | 79b1d3d8ffbda5297fff6fefe2528e303bf2110a | /RSGGenFragment/RSToQQ/RSGravitonToQuarkQuark_W-0p25_M_1500_TuneCUETP8M1_13TeV_pythia8_cfi.py | 6e503b562929e62717577f7d52137212a9732aca | []
| no_license | yguler/MCFragments-1 | 25745a043653d02be3a4c242c1a85af221fc34b3 | 7c4d10ee59e00f997221109bf006819fd645b92f | refs/heads/master | 2021-01-13T14:09:12.811554 | 2016-12-11T15:57:37 | 2016-12-11T15:57:37 | 76,184,433 | 0 | 0 | null | 2016-12-11T15:59:22 | 2016-12-11T15:59:22 | null | UTF-8 | Python | false | false | 1,323 | py | import FWCore.ParameterSet.Config as cms
from Configuration.Generator.Pythia8CommonSettings_cfi import *
from Configuration.Generator.Pythia8CUEP8M1Settings_cfi import *
generator = cms.EDFilter("Pythia8GeneratorFilter",
comEnergy = cms.double(13000.0),
crossSection = cms.untracked.double(0.00000782),
filterEfficiency = cms.untracked.double(1),
maxEventsToPrint = cms.untracked.int32(0),
pythiaHepMCVerbosity = cms.untracked.bool(False),
pythiaPylistVerbosity = cms.untracked.int32(1),
PythiaParameters = cms.PSet(
pythia8CommonSettingsBlock,
pythia8CUEP8M1SettingsBlock,
processParameters = cms.vstring(
'ExtraDimensionsG*:ffbar2G* = on',
'ExtraDimensionsG*:kappaMG = 2.276101242',
'5100039:m0 = 1500',
'5100039:onMode = off',
'5100039:onIfAny = 1 2 3 4 5'
),
parameterSets = cms.vstring('pythia8CommonSettings',
'pythia8CUEP8M1Settings',
'processParameters',
)
)
)
ProductionFilterSequence = cms.Sequence(generator)
| [
"[email protected]"
]
| |
567e938c3da300c10dac470fe7bba73fefa308e1 | 8ca34f6da28f4b2cb2ae7a242e2156581426a950 | /apps/customer/migrations/0006_remove_job_job_type_remove_job_status.py | 501fd52e93fbb98072802b9b099caa2cb8297ea6 | []
| no_license | gray-adeyi/prime | 7e2360424560beb24742f93aa3f7b3b5cd484150 | 83b728db767e6f1b2237e10400fa95861ce1c8f3 | refs/heads/main | 2022-06-17T19:00:52.432315 | 2022-05-19T10:19:56 | 2022-05-19T10:19:56 | 225,469,493 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 426 | py | # Generated by Django 4.0.3 on 2022-05-04 10:43
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('customer', '0005_alter_job_copies'),
]
operations = [
migrations.RemoveField(
model_name='job',
name='job_type',
),
migrations.RemoveField(
model_name='job',
name='status',
),
]
| [
"[email protected]"
]
| |
b581261136eb5820caa1c37ee4e42eee9145a808 | 32dda10669e459cf37c31f426fa709001d2c75b0 | /leetcode_cn/solved/pg_709.py | 3d384ea50d36704b8ae5931bf4436c70958659b5 | []
| no_license | fastso/learning-python | 3300f50d06871245d0bfcbe9d201224580f70852 | d21dbd1b9f31017cdb1ed9b9ffd1e53ffe326572 | refs/heads/master | 2023-02-10T14:43:53.726247 | 2023-01-26T10:14:59 | 2023-01-26T10:14:59 | 193,454,718 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 244 | py | class Solution:
def toLowerCase(self, s: str) -> str:
l = list(s)
for i in range(len(l)):
o = ord(l[i])
if 64 < o < 91:
o += 32
l[i] = chr(o)
return ''.join(l)
| [
"[email protected]"
]
| |
6e638314f02ee8aa6919f68c5b79ab506004a312 | df9a467c0d47eafde9bf5d2181347ad00bf53c06 | /leetcode/most_liked/739_daily_temperatures.py | b1783bb29cf96d7abdb26011f592ae371ea26b9f | []
| no_license | eunjungchoi/algorithm | 63d904d92e16ab0917faa585326e9281d61d6000 | 1c9528e26752b723e1d128b020f6c5291ed5ca19 | refs/heads/master | 2023-01-06T20:54:06.567512 | 2020-11-14T11:13:05 | 2020-11-14T11:13:05 | 288,323,344 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,646 | py | # Given a list of daily temperatures T, return a list such that, for each day in the input,
# tells you how many days you would have to wait until a warmer temperature.
# If there is no future day for which this is possible, put 0 instead.
#
# For example, given the list of temperatures T = [73, 74, 75, 71, 69, 72, 76, 73],
# your output should be [1, 1, 4, 2, 1, 1, 0, 0].
#
# Note: The length of temperatures will be in the range [1, 30000].
# Each temperature will be an integer in the range [30, 100].
# 매일의 화씨 온도(F) 리스트 T를 받아, 더 따듯한 날씨를 위해서는 며칠을 더 기다려야 하는 지를 출력하라
from typing import List
class Solution:
def dailyTemperatures(self, T: List[int]) -> List[int]:
# 스택값 비교
stack = [0]
results = [0] * len(T)
# 현재의 인덱스를 계속 스택에 쌓아두다가, 이전보다 상승하는 지점에서 현재 온도와 스택에 쌓아둔 인덱스 지점의 온도 차이를 비교해서,
# 더 높다면 스택의 값을 pop으로 꺼내고, 현재 인덱스와 스택에 쌓아둔 인덱스의 차이를 정답으로 처리한다.
for i, temp in enumerate(T):
while stack and temp > T[stack[-1]]:
last = stack.pop()
results[last] = i - last
stack.append(i)
return results
# 37 / 37 test cases passed.
# Status: Accepted
# Runtime: 492 ms
# Memory Usage: 17.2 MB
#
# Your runtime beats 71.54 % of python3 submissions.
# Your memory usage beats 89.19 % of python3 submissions.
# <파이썬 알고리즘 인터뷰> 참고.
| [
"[email protected]"
]
| |
c949fe10046ed1243b9b5b457337815e7cd492b2 | 124df74bce796598d224c4380c60c8e95756f761 | /pythonPackages/matplotlib/doc/conf.py | f5e23c3021a3bf6281ee5318d4e0041ff5fd7269 | []
| no_license | Mapoet/AWIPS-Test | 19059bbd401573950995c8cc442ddd45588e6c9f | 43c5a7cc360b3cbec2ae94cb58594fe247253621 | refs/heads/master | 2020-04-17T03:35:57.762513 | 2017-02-06T17:17:58 | 2017-02-06T17:17:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,924 | py | # -*- coding: utf-8 -*-
#
# Matplotlib documentation build configuration file, created by
# sphinx-quickstart on Fri May 2 12:33:25 2008.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# The contents of this file are pickled, so don't put values in the namespace
# that aren't pickleable (module imports are okay, they're removed automatically).
#
# All configuration values have a default value; values that are commented out
# serve to show the default value.
import sys, os
# If your extensions are in another directory, add it here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
sys.path.append(os.path.abspath('sphinxext'))
# Import support for ipython console session syntax highlighting (lives
# in the sphinxext directory defined above)
import ipython_console_highlighting
# General configuration
# ---------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['matplotlib.sphinxext.mathmpl', 'math_symbol_table',
'sphinx.ext.autodoc', 'matplotlib.sphinxext.only_directives',
'matplotlib.sphinxext.plot_directive', 'inheritance_diagram',
'gen_gallery', 'gen_rst']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'contents'
# General substitutions.
project = 'Matplotlib'
copyright = '2008, John Hunter, Darren Dale, Michael Droettboom'
# The default replacements for |version| and |release|, also used in various
# other places throughout the built documents.
#
# The short X.Y version.
import matplotlib
version = matplotlib.__version__
# The full version, including alpha/beta/rc tags.
release = version
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
unused_docs = []
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# Plot directive configuration
# ----------------------------
plot_formats = ['png', 'hires.png', 'pdf']
# Options for HTML output
# -----------------------
# The style sheet to use for HTML and HTML Help pages. A file of that name
# must exist either in Sphinx' static/ path, or in one of the custom paths
# given in html_static_path.
#html_style = 'matplotlib.css'
html_style = 'mpl.css'
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# The name of an image file (within the static path) to place at the top of
# the sidebar.
#html_logo = 'logo.png'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If nonempty, this is the file name suffix for generated HTML files. The
# default is ``".html"``.
html_file_suffix = '.html'
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Content template for the index page.
html_index = 'index.html'
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Custom sidebar templates, maps page names to templates.
html_sidebars = {'index': 'indexsidebar.html',
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
html_additional_pages = {'index': 'index.html', 'gallery':'gallery.html'}
# If false, no module index is generated.
#html_use_modindex = True
# If true, the reST sources are included in the HTML build as _sources/<name>.
#html_copy_source = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it.
html_use_opensearch = 'False'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Matplotlibdoc'
# Options for LaTeX output
# ------------------------
# The paper size ('letter' or 'a4').
latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
latex_font_size = '11pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, document class [howto/manual]).
latex_documents = [
('contents', 'Matplotlib.tex', 'Matplotlib', 'Darren Dale, Michael Droettboom, Eric Firing, John Hunter', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
latex_logo = None
# Additional stuff for the LaTeX preamble.
latex_preamble = """
\usepackage{amsmath}
\usepackage{amsfonts}
\usepackage{amssymb}
\usepackage{txfonts}
"""
# Documents to append as an appendix to all manuals.
latex_appendices = []
# If false, no module index is generated.
latex_use_modindex = True
latex_use_parts = True
# Show both class-level docstring and __init__ docstring in class
# documentation
autoclass_content = 'both'
| [
"[email protected]"
]
| |
805056a25de493b432d80c6096bb9e9609fc3573 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /EjjBGn7hkmhgxqJej_11.py | ad87f45d4681248fbbf11c2febfac2a7ccef7ffa | []
| no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 881 | py | """
A word nest is created by taking a starting word, and generating a new string
by placing the word _inside_ itself. This process is then repeated.
Nesting 3 times with the word "incredible":
start = incredible
first = incre|incredible|dible
second = increin|incredible|credibledible
third = increinincr|incredible|ediblecredibledible
The final nest is `"increinincrincredibleediblecredibledible"` (depth = 3).
Given a _starting word_ and the _final word nest_ , return the _depth_ of the
word nest.
### Examples
word_nest("floor", "floor") ➞ 0
word_nest("code", "cocodccococodededeodeede") ➞ 5
word_nest("incredible", "increinincrincredibleediblecredibledible") ➞ 3
### Notes
N/A
"""
def word_nest(word, nest,c=0):
if nest == word: return c
else:
nest=nest.replace(word,'')
return word_nest(word,nest,c+1)
| [
"[email protected]"
]
| |
bceec50928f3d2382b8e0575b6918c9538c23f91 | 6bd223ac5bbfe95d45a5f2f052b8b26cf4a4722d | /hydrocode/scripts/dump_replayer.py | bf86ab19442023e3bed9a08314cbb4866c61ebf3 | [
"BSD-3-Clause"
]
| permissive | ajaykumarr123/software | ff2ddf9589571e5ed62f6f1e2325e4553686f436 | e0b46eed87636afedc9be3a671edf70fc6cc6cb5 | refs/heads/master | 2022-04-23T11:36:55.535254 | 2020-04-27T02:16:34 | 2020-04-27T02:18:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,147 | py | #!/usr/bin/env python3
#Script for replaying raw FPGA data dumps. Read Hydrophones Code wiki entry.
import socket, time, sys
import scipy.io
import numpy
PKT_LEN = 512 #total number of samples in an FPGA packet
NO_CH = 4 #number of channels
SAMPL_RATE = 200000
ADDR = "127.0.0.1" #local host because we are sending the data to the same machine
PORT = 8899 #hydromathd listens on this port
#loading mat file specified from terminal
data = scipy.io.loadmat(sys.argv[1])
#initializing UDP networking
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
#sending packets
for pkt_no in range(len(data["raw_samples_interleaved"]) // PKT_LEN):
#forming a packet from the data. 'H' is unsigned 16 bit integer
send_buff = data["raw_samples_interleaved"][pkt_no * PKT_LEN : (pkt_no + 1) * PKT_LEN].astype('H')
#converting packet into a bytes array
payload = numpy.asarray(send_buff)
payload.tobytes()
#sending packet
sock.sendto(payload, (ADDR, PORT))
#waiting for the amount of time the FPGA would take to send another packet
time.sleep(float(PKT_LEN) / float(NO_CH) / float(SAMPL_RATE))
| [
"[email protected]"
]
| |
d5b520dadfbbdd4d46f80f779e68c7bee555ae7c | 0613b082bd90462e190bc51943356ce6ce990815 | /baseinfo/forms.py | f92317a93a34016d026958d648ff845db9dae301 | []
| no_license | Hamidnet220/salary | 1068aac4bc921436c03b627899370a86ca5e99be | 4dc1f32dfa1d990e6c9f527b4a8d0e1df939262a | refs/heads/master | 2020-05-04T18:09:24.086491 | 2019-04-22T20:22:32 | 2019-04-22T20:22:32 | 179,342,004 | 0 | 1 | null | 2019-04-11T10:43:29 | 2019-04-03T17:53:36 | Python | UTF-8 | Python | false | false | 6,158 | py | from django import forms
from .models import *
from django.utils.translation import ugettext_lazy as _
class EmployerForm(forms.ModelForm):
class Meta:
model= Employer
fields='__all__'
def save_record(self):
Employer.objects.create(**self.cleaned_data)
def update_record(self,id):
Employer.objects.filter(id=id).update(**self.cleaned_data)
class EmployeeStatusForm(forms.Form):
title = forms.CharField(label="عنوان وضعیت کارکنان:",max_length=50)
description = forms.CharField(label="توضیحات:",widget=forms.Textarea)
def save_record(self):
EmployeeStatus.objects.create(**self.cleaned_data)
class WorkStatusForm(forms.Form):
title = forms.CharField(label="عنوان وضعیت کاری:",max_length=50)
description = forms.CharField(label="توضیحات:",widget=forms.Textarea,required=False)
def save_record(self):
WorkStatus.objects.create(**self.cleaned_data)
class MaritalStatusForm(forms.Form):
title = forms.CharField(label="عنوان وضعیت تاهل:",max_length=20)
description = forms.CharField(label="توضیحات:",widget=forms.Textarea,required=False)
def save_record(self):
MaritalStatus.objects.create(**self.cleaned_data)
class BankForm(forms.Form):
title = forms.CharField(label="نام بانک:",max_length=50)
description = forms.CharField(label="توضیحات:",required=False,widget=forms.Textarea)
def save_record(self):
Bank.objects.create(**self.cleaned_data)
class WorkGroupForm(forms.Form):
title = forms.CharField(label="عنوان گروه شغلی:",max_length=100)
child_benefit = forms.DecimalField(label="مبلغ حق اولاد برای یک نفر:",max_digits=50,decimal_places=2)
dwelling_benefit= forms.DecimalField(label="مبلغ حق مسکن:",max_digits=50,decimal_places=2)
Bon_benefit = forms.DecimalField(label="مبلغ بن:",max_digits=50,decimal_places=2)
def save_record(self):
WorkGroup.objects.create(**self.cleaned_data)
class WorkPlaceForm(forms.Form):
title = forms.CharField(label="عنوان محل کار:",max_length=60)
description = forms.CharField(label="توضیحات:",required=False,widget=forms.Textarea)
def save_record(self):
WorkPlace.objects.create(**self.cleaned_data)
class PostPlaceForm(forms.Form):
title = forms.CharField(label="عنوان محل پست:",max_length=60)
number_of_employee = forms.IntegerField(label="تعداد نفرات پست")
post_status = forms.ModelChoiceField(WorkStatus.objects.all(),label="وضعیت پست")
decription = forms.CharField(label="توضیحات:",required=False,widget=forms.Textarea)
def save_record(self):
PostPlace.objects.create(**self.cleaned_data)
class AddMilitarySerStatus(forms.ModelForm):
class Meta:
model=MilitaryServiceStat
fields= '__all__'
def save_record(self):
MilitaryServiceStat.objects.create(**self.cleaned_data)
def update_record(self,id):
MilitaryServiceStat.objects.filter(id=id).update(**self.cleaned_data)
class AddCityForm(forms.ModelForm):
class Meta:
model=City
fields= '__all__'
def save_record(self):
City.objects.create(**self.cleaned_data)
def update_record(self,id):
City.objects.filter(id=id).update(**self.cleaned_data)
class AddCountryForm(forms.ModelForm):
class Meta:
model=Country
fields= '__all__'
def save_record(self):
Country.objects.create(**self.cleaned_data)
def update_record(self,id):
Country.objects.filter(id=id).update(**self.cleaned_data)
class EmployeeForm(forms.Form):
employer = forms.ModelChoiceField(Employer.objects.all(),label="نام کارفرما:")
firstname = forms.CharField(label="نام:",max_length=50)
lastname = forms.CharField(label="نام خانوادگی:",max_length=50)
fathername = forms.CharField(label="نام پدر:",max_length=50)
national_code = forms.CharField(label="شماره ملی:",max_length=10)
id_number = forms.CharField(label="شماره شناسنامه:",max_length=10)
insurance_id = forms.CharField(label="کد بیمه:",max_length=10)
employee_status = forms.ModelChoiceField(EmployeeStatus.objects.all(),label="وضعیت پرسنل:")
work_place = forms.ModelChoiceField(WorkPlace.objects.all(),label="محل کار:")
post_place = forms.ModelChoiceField(PostPlace.objects.all(),label="محل پست:")
work_status = forms.ModelChoiceField(WorkStatus.objects.all(),label="وضعیت شغلی:")
marital_status = forms.ModelChoiceField(MaritalStatus.objects.all(),label="وضعیت تاهل:")
children_count = forms.IntegerField(label="تعداد فرزند")
work_group = forms.ModelChoiceField(WorkGroup.objects.all(),label="گروه شغلی:")
tax_exempt = forms.BooleanField(label="معافیت از پرداخت مالیات:")
indsurence_exempt= forms.BooleanField(label="معافیت از پرداخت بیمه:")
tel = forms.CharField(label="تلفن تماس:",max_length=19,required=False)
mobile = forms.CharField(label="شماره همراه:",max_length=19,required=False)
description = forms.CharField(label="توضسحات:",required=False,widget=forms.Textarea)
def save_record(self):
Employee.objects.create(**self.cleaned_data)
class EmployeeFormModel(forms.ModelForm):
class Meta:
model=Employee
fields='__all__'
def update_record(self,id):
Employee.objects.filter(id=id).update(**self.cleaned_data)
# Constant form
class ConstantForm(forms.ModelForm):
class Meta:
model=Constant
fields="__all__"
def save_record(self):
Constant.objects.create(**self.cleaned_data)
def update_record(self,id):
Constant.objects.filter(id=id).update(**self.cleaned_data)
| [
"[email protected]"
]
| |
fccd134ed2431e7cce33642e9fc7705ec4904734 | 9092e62932da86fb2af69e0529e4cbb082cfea22 | /wifiName32Pwd63.py | 9c30ac8432ecedb77930c68c8c6746ec52684028 | []
| no_license | FengZiQ/flushbonding | d09915ce4285530e3d082c0aaea029790ffbdd9d | 5ce631c9d09790846a31332eb8e76460e5f3f08e | refs/heads/master | 2020-04-01T22:29:13.256997 | 2019-06-05T02:25:14 | 2019-06-05T02:25:14 | 153,711,075 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,614 | py | # coding=utf-8
import time
from to_log import to_log
from QRCodeOfNetworkConfig import wifi_mode
from dmSupport import get_device_attribute
from configFile import data_for_networkTest, open_picture
from honorRouter import Configuration
rc = Configuration()
to_log('SSID长度32/密码长度63网络配置测试\n')
if rc.wc(name='123a'*8, pwd='12'*30 + 'abc', secure=2):
# 生成SSID长度32/密码长度63网络配置二维码
wifi_mode(name='123a'*8, pwd='12'*30 + 'abc', pr='usb', dh='dhcp')
# 配网时间
time.sleep(15)
# 获取系统当前时间
nowTimestamp = time.strftime('%Y-%m-%d %H-%M-%S', time.localtime(time.time()))
# 获取设备属性
da = get_device_attribute(data_for_networkTest.get('deviceNo'))
# 修正时间
correction_time = nowTimestamp[:-4] + str(int(nowTimestamp[-4]) + 1)
if da.get('time', 'failed')[:-3] == nowTimestamp[:-3] or da.get('time', 'failed')[:-3] == correction_time:
if da.get('persist.net.type') == 'wifi' and da.get('persist.net.dhcp') == 'true':
to_log('SSID长度32/密码长度63网络配置测试Pass\n')
to_log('配网方式:'+da.get('persist.net.type', ''))
to_log('DHCP:' + da.get('persist.net.dhcp', ''))
to_log('IP:' + da.get('sys.net.ip', ''))
to_log('MAC:' + da.get('system.net.wifi.mac', '') + '\n')
else:
to_log('请检查断言参数\n')
# 打开设备信息码
open_picture('deviceInfoCode.png')
else:
to_log('SSID长度32/密码长度63网络配置测试Failed\n')
rc.finished()
| [
"[email protected]"
]
| |
16840e785de669798985dd9040d55e3037b2f01a | 66a82c2eb7f9facff4cb0aa72f21a713dbb1cf61 | /devices/SIP04_FZJ/01_Test/test_sip04_01.py | 8f9e8dfee1412bdb8d75db1ffa146684f3c7300e | [
"MIT"
]
| permissive | geophysics-ubonn/reda_testing | 894eefa8f5cddf288c639c00404c6bd12339dad7 | c32f3faa685b77974b88ba1126a02afabfe5fd2d | refs/heads/master | 2023-06-04T00:16:43.503287 | 2020-12-21T13:23:48 | 2020-12-21T13:23:48 | 110,421,246 | 0 | 1 | NOASSERTION | 2019-06-25T09:50:57 | 2017-11-12T09:50:26 | Python | UTF-8 | Python | false | false | 314 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import reda
basepath = os.path.dirname(__file__) + os.sep
def test_load_mat():
sip = reda.SIP()
sip.import_sip04(basepath + 'Data/sip_dataA.mat')
def test_load_csv():
sip2 = reda.SIP()
sip2.import_sip04(basepath + 'Data/sip_dataA.csv')
| [
"[email protected]"
]
| |
43178e2ed1238f75334f622fe978141d5825a140 | e5e2b7da41fda915cb849f031a0223e2ac354066 | /sdk/python/pulumi_azure_native/certificateregistration/v20190801/app_service_certificate_order_certificate.py | bc9c20160c568bfb9fc5b6e93455dd3c70b706ac | [
"BSD-3-Clause",
"Apache-2.0"
]
| permissive | johnbirdau/pulumi-azure-native | b7d3bdddeb7c4b319a7e43a892ddc6e25e3bfb25 | d676cc331caa0694d8be99cb90b93fa231e3c705 | refs/heads/master | 2023-05-06T06:48:05.040357 | 2021-06-01T20:42:38 | 2021-06-01T20:42:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,243 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = ['AppServiceCertificateOrderCertificateArgs', 'AppServiceCertificateOrderCertificate']
@pulumi.input_type
class AppServiceCertificateOrderCertificateArgs:
def __init__(__self__, *,
certificate_order_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
key_vault_id: Optional[pulumi.Input[str]] = None,
key_vault_secret_name: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a AppServiceCertificateOrderCertificate resource.
:param pulumi.Input[str] certificate_order_name: Name of the certificate order.
:param pulumi.Input[str] resource_group_name: Name of the resource group to which the resource belongs.
:param pulumi.Input[str] key_vault_id: Key Vault resource Id.
:param pulumi.Input[str] key_vault_secret_name: Key Vault secret name.
:param pulumi.Input[str] kind: Kind of resource.
:param pulumi.Input[str] location: Resource Location.
:param pulumi.Input[str] name: Name of the certificate.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
pulumi.set(__self__, "certificate_order_name", certificate_order_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if key_vault_id is not None:
pulumi.set(__self__, "key_vault_id", key_vault_id)
if key_vault_secret_name is not None:
pulumi.set(__self__, "key_vault_secret_name", key_vault_secret_name)
if kind is not None:
pulumi.set(__self__, "kind", kind)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="certificateOrderName")
def certificate_order_name(self) -> pulumi.Input[str]:
"""
Name of the certificate order.
"""
return pulumi.get(self, "certificate_order_name")
@certificate_order_name.setter
def certificate_order_name(self, value: pulumi.Input[str]):
pulumi.set(self, "certificate_order_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
Name of the resource group to which the resource belongs.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="keyVaultId")
def key_vault_id(self) -> Optional[pulumi.Input[str]]:
"""
Key Vault resource Id.
"""
return pulumi.get(self, "key_vault_id")
@key_vault_id.setter
def key_vault_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_vault_id", value)
@property
@pulumi.getter(name="keyVaultSecretName")
def key_vault_secret_name(self) -> Optional[pulumi.Input[str]]:
"""
Key Vault secret name.
"""
return pulumi.get(self, "key_vault_secret_name")
@key_vault_secret_name.setter
def key_vault_secret_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_vault_secret_name", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
Resource Location.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the certificate.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class AppServiceCertificateOrderCertificate(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
certificate_order_name: Optional[pulumi.Input[str]] = None,
key_vault_id: Optional[pulumi.Input[str]] = None,
key_vault_secret_name: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Key Vault container ARM resource for a certificate that is purchased through Azure.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] certificate_order_name: Name of the certificate order.
:param pulumi.Input[str] key_vault_id: Key Vault resource Id.
:param pulumi.Input[str] key_vault_secret_name: Key Vault secret name.
:param pulumi.Input[str] kind: Kind of resource.
:param pulumi.Input[str] location: Resource Location.
:param pulumi.Input[str] name: Name of the certificate.
:param pulumi.Input[str] resource_group_name: Name of the resource group to which the resource belongs.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AppServiceCertificateOrderCertificateArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Key Vault container ARM resource for a certificate that is purchased through Azure.
:param str resource_name: The name of the resource.
:param AppServiceCertificateOrderCertificateArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AppServiceCertificateOrderCertificateArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
certificate_order_name: Optional[pulumi.Input[str]] = None,
key_vault_id: Optional[pulumi.Input[str]] = None,
key_vault_secret_name: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AppServiceCertificateOrderCertificateArgs.__new__(AppServiceCertificateOrderCertificateArgs)
if certificate_order_name is None and not opts.urn:
raise TypeError("Missing required property 'certificate_order_name'")
__props__.__dict__["certificate_order_name"] = certificate_order_name
__props__.__dict__["key_vault_id"] = key_vault_id
__props__.__dict__["key_vault_secret_name"] = key_vault_secret_name
__props__.__dict__["kind"] = kind
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["tags"] = tags
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:certificateregistration/v20190801:AppServiceCertificateOrderCertificate"), pulumi.Alias(type_="azure-native:certificateregistration:AppServiceCertificateOrderCertificate"), pulumi.Alias(type_="azure-nextgen:certificateregistration:AppServiceCertificateOrderCertificate"), pulumi.Alias(type_="azure-native:certificateregistration/v20150801:AppServiceCertificateOrderCertificate"), pulumi.Alias(type_="azure-nextgen:certificateregistration/v20150801:AppServiceCertificateOrderCertificate"), pulumi.Alias(type_="azure-native:certificateregistration/v20180201:AppServiceCertificateOrderCertificate"), pulumi.Alias(type_="azure-nextgen:certificateregistration/v20180201:AppServiceCertificateOrderCertificate"), pulumi.Alias(type_="azure-native:certificateregistration/v20200601:AppServiceCertificateOrderCertificate"), pulumi.Alias(type_="azure-nextgen:certificateregistration/v20200601:AppServiceCertificateOrderCertificate"), pulumi.Alias(type_="azure-native:certificateregistration/v20200901:AppServiceCertificateOrderCertificate"), pulumi.Alias(type_="azure-nextgen:certificateregistration/v20200901:AppServiceCertificateOrderCertificate"), pulumi.Alias(type_="azure-native:certificateregistration/v20201001:AppServiceCertificateOrderCertificate"), pulumi.Alias(type_="azure-nextgen:certificateregistration/v20201001:AppServiceCertificateOrderCertificate"), pulumi.Alias(type_="azure-native:certificateregistration/v20201201:AppServiceCertificateOrderCertificate"), pulumi.Alias(type_="azure-nextgen:certificateregistration/v20201201:AppServiceCertificateOrderCertificate"), pulumi.Alias(type_="azure-native:certificateregistration/v20210101:AppServiceCertificateOrderCertificate"), pulumi.Alias(type_="azure-nextgen:certificateregistration/v20210101:AppServiceCertificateOrderCertificate")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(AppServiceCertificateOrderCertificate, __self__).__init__(
'azure-native:certificateregistration/v20190801:AppServiceCertificateOrderCertificate',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'AppServiceCertificateOrderCertificate':
"""
Get an existing AppServiceCertificateOrderCertificate resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = AppServiceCertificateOrderCertificateArgs.__new__(AppServiceCertificateOrderCertificateArgs)
__props__.__dict__["key_vault_id"] = None
__props__.__dict__["key_vault_secret_name"] = None
__props__.__dict__["kind"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
return AppServiceCertificateOrderCertificate(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="keyVaultId")
def key_vault_id(self) -> pulumi.Output[Optional[str]]:
"""
Key Vault resource Id.
"""
return pulumi.get(self, "key_vault_id")
@property
@pulumi.getter(name="keyVaultSecretName")
def key_vault_secret_name(self) -> pulumi.Output[Optional[str]]:
"""
Key Vault secret name.
"""
return pulumi.get(self, "key_vault_secret_name")
@property
@pulumi.getter
def kind(self) -> pulumi.Output[Optional[str]]:
"""
Kind of resource.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
Resource Location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource Name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[str]:
"""
Status of the Key Vault secret.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
| [
"[email protected]"
]
| |
26e617807d9d999827f851c37be1d219170066df | 484c462c29e3c2f8ac280b79c11db6982c6a8ca6 | /python-driver-master/tests/unit/test_policies.py | 56fd5440fee65e8a31452c8ce87cf75ac5064999 | [
"Apache-2.0"
]
| permissive | thelma1944/Python_Stuff | b5fa53bf008bb5e865204201b144fe20e7f87565 | 077131a2c9f247396dca86fdf18933d38ae8d501 | refs/heads/master | 2021-06-05T12:25:35.779070 | 2020-10-03T18:20:16 | 2020-10-03T18:20:16 | 16,077,931 | 0 | 1 | null | 2021-03-26T00:30:14 | 2014-01-20T17:36:16 | Python | UTF-8 | Python | false | false | 31,810 | py | try:
import unittest2 as unittest
except ImportError:
import unittest # noqa
from itertools import islice, cycle
from mock import Mock
from random import randint
import sys
import struct
from threading import Thread
from cassandra import ConsistencyLevel
from cassandra.cluster import Cluster
from cassandra.metadata import Metadata
from cassandra.policies import (RoundRobinPolicy, DCAwareRoundRobinPolicy,
TokenAwarePolicy, SimpleConvictionPolicy,
HostDistance, ExponentialReconnectionPolicy,
RetryPolicy, WriteType,
DowngradingConsistencyRetryPolicy, ConstantReconnectionPolicy,
LoadBalancingPolicy, ConvictionPolicy, ReconnectionPolicy, FallthroughRetryPolicy)
from cassandra.pool import Host
from cassandra.query import Statement
class TestLoadBalancingPolicy(unittest.TestCase):
def test_non_implemented(self):
"""
Code coverage for interface-style base class
"""
policy = LoadBalancingPolicy()
host = Host("ip1", SimpleConvictionPolicy)
host.set_location_info("dc1", "rack1")
self.assertRaises(NotImplementedError, policy.distance, host)
self.assertRaises(NotImplementedError, policy.populate, None, host)
self.assertRaises(NotImplementedError, policy.make_query_plan)
self.assertRaises(NotImplementedError, policy.on_up, host)
self.assertRaises(NotImplementedError, policy.on_down, host)
self.assertRaises(NotImplementedError, policy.on_add, host)
self.assertRaises(NotImplementedError, policy.on_remove, host)
class TestRoundRobinPolicy(unittest.TestCase):
def test_basic(self):
hosts = [0, 1, 2, 3]
policy = RoundRobinPolicy()
policy.populate(None, hosts)
qplan = list(policy.make_query_plan())
self.assertEqual(sorted(qplan), hosts)
def test_multiple_query_plans(self):
hosts = [0, 1, 2, 3]
policy = RoundRobinPolicy()
policy.populate(None, hosts)
for i in xrange(20):
qplan = list(policy.make_query_plan())
self.assertEqual(sorted(qplan), hosts)
def test_single_host(self):
policy = RoundRobinPolicy()
policy.populate(None, [0])
qplan = list(policy.make_query_plan())
self.assertEqual(qplan, [0])
def test_status_updates(self):
hosts = [0, 1, 2, 3]
policy = RoundRobinPolicy()
policy.populate(None, hosts)
policy.on_down(0)
policy.on_remove(1)
policy.on_up(4)
policy.on_add(5)
qplan = list(policy.make_query_plan())
self.assertEqual(sorted(qplan), [2, 3, 4, 5])
def test_thread_safety(self):
hosts = range(100)
policy = RoundRobinPolicy()
policy.populate(None, hosts)
def check_query_plan():
for i in range(100):
qplan = list(policy.make_query_plan())
self.assertEqual(sorted(qplan), hosts)
threads = [Thread(target=check_query_plan) for i in range(4)]
map(lambda t: t.start(), threads)
map(lambda t: t.join(), threads)
def test_thread_safety_during_modification(self):
hosts = range(100)
policy = RoundRobinPolicy()
policy.populate(None, hosts)
errors = []
def check_query_plan():
try:
for i in xrange(100):
list(policy.make_query_plan())
except Exception as exc:
errors.append(exc)
def host_up():
for i in xrange(1000):
policy.on_up(randint(0, 99))
def host_down():
for i in xrange(1000):
policy.on_down(randint(0, 99))
threads = []
for i in range(5):
threads.append(Thread(target=check_query_plan))
threads.append(Thread(target=host_up))
threads.append(Thread(target=host_down))
# make the GIL switch after every instruction, maximizing
# the chace of race conditions
original_interval = sys.getcheckinterval()
try:
sys.setcheckinterval(0)
map(lambda t: t.start(), threads)
map(lambda t: t.join(), threads)
finally:
sys.setcheckinterval(original_interval)
if errors:
self.fail("Saw errors: %s" % (errors,))
def test_no_live_nodes(self):
"""
Ensure query plan for a downed cluster will execute without errors
"""
hosts = [0, 1, 2, 3]
policy = RoundRobinPolicy()
policy.populate(None, hosts)
for i in range(4):
policy.on_down(i)
qplan = list(policy.make_query_plan())
self.assertEqual(qplan, [])
class DCAwareRoundRobinPolicyTest(unittest.TestCase):
def test_no_remote(self):
hosts = []
for i in range(4):
h = Host(i, SimpleConvictionPolicy)
h.set_location_info("dc1", "rack1")
hosts.append(h)
policy = DCAwareRoundRobinPolicy("dc1")
policy.populate(None, hosts)
qplan = list(policy.make_query_plan())
self.assertEqual(sorted(qplan), sorted(hosts))
def test_with_remotes(self):
hosts = [Host(i, SimpleConvictionPolicy) for i in range(4)]
for h in hosts[:2]:
h.set_location_info("dc1", "rack1")
for h in hosts[2:]:
h.set_location_info("dc2", "rack1")
local_hosts = set(h for h in hosts if h.datacenter == "dc1")
remote_hosts = set(h for h in hosts if h.datacenter != "dc1")
# allow all of the remote hosts to be used
policy = DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=2)
policy.populate(Mock(spec=Metadata), hosts)
qplan = list(policy.make_query_plan())
self.assertEqual(set(qplan[:2]), local_hosts)
self.assertEqual(set(qplan[2:]), remote_hosts)
# allow only one of the remote hosts to be used
policy = DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=1)
policy.populate(Mock(spec=Metadata), hosts)
qplan = list(policy.make_query_plan())
self.assertEqual(set(qplan[:2]), local_hosts)
used_remotes = set(qplan[2:])
self.assertEqual(1, len(used_remotes))
self.assertIn(qplan[2], remote_hosts)
# allow no remote hosts to be used
policy = DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=0)
policy.populate(Mock(spec=Metadata), hosts)
qplan = list(policy.make_query_plan())
self.assertEqual(2, len(qplan))
self.assertEqual(local_hosts, set(qplan))
def test_get_distance(self):
policy = DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=0)
host = Host("ip1", SimpleConvictionPolicy)
host.set_location_info("dc1", "rack1")
policy.populate(Mock(spec=Metadata), [host])
self.assertEqual(policy.distance(host), HostDistance.LOCAL)
# used_hosts_per_remote_dc is set to 0, so ignore it
remote_host = Host("ip2", SimpleConvictionPolicy)
remote_host.set_location_info("dc2", "rack1")
self.assertEqual(policy.distance(remote_host), HostDistance.IGNORED)
# dc2 isn't registered in the policy's live_hosts dict
policy.used_hosts_per_remote_dc = 1
self.assertEqual(policy.distance(remote_host), HostDistance.IGNORED)
# make sure the policy has both dcs registered
policy.populate(Mock(spec=Metadata), [host, remote_host])
self.assertEqual(policy.distance(remote_host), HostDistance.REMOTE)
# since used_hosts_per_remote_dc is set to 1, only the first
# remote host in dc2 will be REMOTE, the rest are IGNORED
second_remote_host = Host("ip3", SimpleConvictionPolicy)
second_remote_host.set_location_info("dc2", "rack1")
policy.populate(Mock(spec=Metadata), [host, remote_host, second_remote_host])
distances = set([policy.distance(remote_host), policy.distance(second_remote_host)])
self.assertEqual(distances, set([HostDistance.REMOTE, HostDistance.IGNORED]))
def test_status_updates(self):
hosts = [Host(i, SimpleConvictionPolicy) for i in range(4)]
for h in hosts[:2]:
h.set_location_info("dc1", "rack1")
for h in hosts[2:]:
h.set_location_info("dc2", "rack1")
policy = DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=1)
policy.populate(Mock(spec=Metadata), hosts)
policy.on_down(hosts[0])
policy.on_remove(hosts[2])
new_local_host = Host(4, SimpleConvictionPolicy)
new_local_host.set_location_info("dc1", "rack1")
policy.on_up(new_local_host)
new_remote_host = Host(5, SimpleConvictionPolicy)
new_remote_host.set_location_info("dc9000", "rack1")
policy.on_add(new_remote_host)
# we now have two local hosts and two remote hosts in separate dcs
qplan = list(policy.make_query_plan())
self.assertEqual(set(qplan[:2]), set([hosts[1], new_local_host]))
self.assertEqual(set(qplan[2:]), set([hosts[3], new_remote_host]))
# since we have hosts in dc9000, the distance shouldn't be IGNORED
self.assertEqual(policy.distance(new_remote_host), HostDistance.REMOTE)
policy.on_down(new_local_host)
policy.on_down(hosts[1])
qplan = list(policy.make_query_plan())
self.assertEqual(set(qplan), set([hosts[3], new_remote_host]))
policy.on_down(new_remote_host)
policy.on_down(hosts[3])
qplan = list(policy.make_query_plan())
self.assertEqual(qplan, [])
def test_no_live_nodes(self):
"""
Ensure query plan for a downed cluster will execute without errors
"""
hosts = []
for i in range(4):
h = Host(i, SimpleConvictionPolicy)
h.set_location_info("dc1", "rack1")
hosts.append(h)
policy = DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=1)
policy.populate(Mock(spec=Metadata), hosts)
for host in hosts:
policy.on_down(host)
qplan = list(policy.make_query_plan())
self.assertEqual(qplan, [])
def test_no_nodes(self):
"""
Ensure query plan for an empty cluster will execute without errors
"""
policy = DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=1)
policy.populate(None, [])
qplan = list(policy.make_query_plan())
self.assertEqual(qplan, [])
class TokenAwarePolicyTest(unittest.TestCase):
def test_wrap_round_robin(self):
cluster = Mock(spec=Cluster)
cluster.metadata = Mock(spec=Metadata)
hosts = [Host(str(i), SimpleConvictionPolicy) for i in range(4)]
for host in hosts:
host.set_up()
def get_replicas(keyspace, packed_key):
index = struct.unpack('>i', packed_key)[0]
return list(islice(cycle(hosts), index, index + 2))
cluster.metadata.get_replicas.side_effect = get_replicas
policy = TokenAwarePolicy(RoundRobinPolicy())
policy.populate(cluster, hosts)
for i in range(4):
query = Statement(routing_key=struct.pack('>i', i))
qplan = list(policy.make_query_plan(None, query))
replicas = get_replicas(None, struct.pack('>i', i))
other = set(h for h in hosts if h not in replicas)
self.assertEquals(replicas, qplan[:2])
self.assertEquals(other, set(qplan[2:]))
# Should use the secondary policy
for i in range(4):
qplan = list(policy.make_query_plan())
self.assertEquals(set(qplan), set(hosts))
def test_wrap_dc_aware(self):
cluster = Mock(spec=Cluster)
cluster.metadata = Mock(spec=Metadata)
hosts = [Host(str(i), SimpleConvictionPolicy) for i in range(4)]
for host in hosts:
host.set_up()
for h in hosts[:2]:
h.set_location_info("dc1", "rack1")
for h in hosts[2:]:
h.set_location_info("dc2", "rack1")
def get_replicas(keyspace, packed_key):
index = struct.unpack('>i', packed_key)[0]
# return one node from each DC
if index % 2 == 0:
return [hosts[0], hosts[2]]
else:
return [hosts[1], hosts[3]]
cluster.metadata.get_replicas.side_effect = get_replicas
policy = TokenAwarePolicy(DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=1))
policy.populate(cluster, hosts)
for i in range(4):
query = Statement(routing_key=struct.pack('>i', i))
qplan = list(policy.make_query_plan(None, query))
replicas = get_replicas(None, struct.pack('>i', i))
# first should be the only local replica
self.assertIn(qplan[0], replicas)
self.assertEquals(qplan[0].datacenter, "dc1")
# then the local non-replica
self.assertNotIn(qplan[1], replicas)
self.assertEquals(qplan[1].datacenter, "dc1")
# then one of the remotes (used_hosts_per_remote_dc is 1, so we
# shouldn't see two remotes)
self.assertEquals(qplan[2].datacenter, "dc2")
self.assertEquals(3, len(qplan))
class FakeCluster:
def __init__(self):
self.metadata = Mock(spec=Metadata)
def test_get_distance(self):
"""
Same test as DCAwareRoundRobinPolicyTest.test_get_distance()
Except a FakeCluster is needed for the metadata variable and
policy.child_policy is needed to change child policy settings
"""
policy = TokenAwarePolicy(DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=0))
host = Host("ip1", SimpleConvictionPolicy)
host.set_location_info("dc1", "rack1")
policy.populate(self.FakeCluster(), [host])
self.assertEqual(policy.distance(host), HostDistance.LOCAL)
# used_hosts_per_remote_dc is set to 0, so ignore it
remote_host = Host("ip2", SimpleConvictionPolicy)
remote_host.set_location_info("dc2", "rack1")
self.assertEqual(policy.distance(remote_host), HostDistance.IGNORED)
# dc2 isn't registered in the policy's live_hosts dict
policy._child_policy.used_hosts_per_remote_dc = 1
self.assertEqual(policy.distance(remote_host), HostDistance.IGNORED)
# make sure the policy has both dcs registered
policy.populate(self.FakeCluster(), [host, remote_host])
self.assertEqual(policy.distance(remote_host), HostDistance.REMOTE)
# since used_hosts_per_remote_dc is set to 1, only the first
# remote host in dc2 will be REMOTE, the rest are IGNORED
second_remote_host = Host("ip3", SimpleConvictionPolicy)
second_remote_host.set_location_info("dc2", "rack1")
policy.populate(self.FakeCluster(), [host, remote_host, second_remote_host])
distances = set([policy.distance(remote_host), policy.distance(second_remote_host)])
self.assertEqual(distances, set([HostDistance.REMOTE, HostDistance.IGNORED]))
def test_status_updates(self):
"""
Same test as DCAwareRoundRobinPolicyTest.test_status_updates()
"""
hosts = [Host(i, SimpleConvictionPolicy) for i in range(4)]
for h in hosts[:2]:
h.set_location_info("dc1", "rack1")
for h in hosts[2:]:
h.set_location_info("dc2", "rack1")
policy = TokenAwarePolicy(DCAwareRoundRobinPolicy("dc1", used_hosts_per_remote_dc=1))
policy.populate(self.FakeCluster(), hosts)
policy.on_down(hosts[0])
policy.on_remove(hosts[2])
new_local_host = Host(4, SimpleConvictionPolicy)
new_local_host.set_location_info("dc1", "rack1")
policy.on_up(new_local_host)
new_remote_host = Host(5, SimpleConvictionPolicy)
new_remote_host.set_location_info("dc9000", "rack1")
policy.on_add(new_remote_host)
# we now have two local hosts and two remote hosts in separate dcs
qplan = list(policy.make_query_plan())
self.assertEqual(set(qplan[:2]), set([hosts[1], new_local_host]))
self.assertEqual(set(qplan[2:]), set([hosts[3], new_remote_host]))
# since we have hosts in dc9000, the distance shouldn't be IGNORED
self.assertEqual(policy.distance(new_remote_host), HostDistance.REMOTE)
policy.on_down(new_local_host)
policy.on_down(hosts[1])
qplan = list(policy.make_query_plan())
self.assertEqual(set(qplan), set([hosts[3], new_remote_host]))
policy.on_down(new_remote_host)
policy.on_down(hosts[3])
qplan = list(policy.make_query_plan())
self.assertEqual(qplan, [])
class ConvictionPolicyTest(unittest.TestCase):
def test_not_implemented(self):
"""
Code coverage for interface-style base class
"""
conviction_policy = ConvictionPolicy(1)
self.assertRaises(NotImplementedError, conviction_policy.add_failure, 1)
self.assertRaises(NotImplementedError, conviction_policy.reset)
class SimpleConvictionPolicyTest(unittest.TestCase):
def test_basic_responses(self):
"""
Code coverage for SimpleConvictionPolicy
"""
conviction_policy = SimpleConvictionPolicy(1)
self.assertEqual(conviction_policy.add_failure(1), True)
self.assertEqual(conviction_policy.reset(), None)
class ReconnectionPolicyTest(unittest.TestCase):
def test_basic_responses(self):
"""
Code coverage for interface-style base class
"""
policy = ReconnectionPolicy()
self.assertRaises(NotImplementedError, policy.new_schedule)
class ConstantReconnectionPolicyTest(unittest.TestCase):
def test_bad_vals(self):
"""
Test initialization values
"""
self.assertRaises(ValueError, ConstantReconnectionPolicy, -1, 0)
def test_schedule(self):
"""
Test ConstantReconnectionPolicy schedule
"""
delay = 2
max_attempts = 100
policy = ConstantReconnectionPolicy(delay=delay, max_attempts=max_attempts)
schedule = list(policy.new_schedule())
self.assertEqual(len(schedule), max_attempts)
for i, delay in enumerate(schedule):
self.assertEqual(delay, delay)
def test_schedule_negative_max_attempts(self):
"""
Test how negative max_attempts are handled
"""
delay = 2
max_attempts = -100
try:
ConstantReconnectionPolicy(delay=delay, max_attempts=max_attempts)
self.fail('max_attempts should throw ValueError when negative')
except ValueError:
pass
class ExponentialReconnectionPolicyTest(unittest.TestCase):
def test_bad_vals(self):
self.assertRaises(ValueError, ExponentialReconnectionPolicy, -1, 0)
self.assertRaises(ValueError, ExponentialReconnectionPolicy, 0, -1)
self.assertRaises(ValueError, ExponentialReconnectionPolicy, 9000, 1)
def test_schedule(self):
policy = ExponentialReconnectionPolicy(base_delay=2, max_delay=100)
schedule = list(policy.new_schedule())
self.assertEqual(len(schedule), 64)
for i, delay in enumerate(schedule):
if i == 0:
self.assertEqual(delay, 2)
elif i < 6:
self.assertEqual(delay, schedule[i - 1] * 2)
else:
self.assertEqual(delay, 100)
class RetryPolicyTest(unittest.TestCase):
def test_read_timeout(self):
policy = RetryPolicy()
# if this is the second or greater attempt, rethrow
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=1, received_responses=2,
data_retrieved=True, retry_num=1)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
# if we didn't get enough responses, rethrow
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=2, received_responses=1,
data_retrieved=True, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
# if we got enough responses, but also got a data response, rethrow
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=2, received_responses=2,
data_retrieved=True, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
# we got enough responses but no data response, so retry
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=2, received_responses=2,
data_retrieved=False, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETRY)
self.assertEqual(consistency, 'ONE')
def test_write_timeout(self):
policy = RetryPolicy()
# if this is the second or greater attempt, rethrow
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=WriteType.SIMPLE,
required_responses=1, received_responses=2, retry_num=1)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
# if it's not a BATCH_LOG write, don't retry it
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=WriteType.SIMPLE,
required_responses=1, received_responses=2, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
# retry BATCH_LOG writes regardless of received responses
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=WriteType.BATCH_LOG,
required_responses=10000, received_responses=1, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETRY)
self.assertEqual(consistency, 'ONE')
def test_unavailable(self):
"""
Use the same tests for test_write_timeout, but ensure they only RETHROW
"""
policy = RetryPolicy()
retry, consistency = policy.on_unavailable(
query=None, consistency="ONE",
required_replicas=1, alive_replicas=2, retry_num=1)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
retry, consistency = policy.on_unavailable(
query=None, consistency="ONE",
required_replicas=1, alive_replicas=2, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
retry, consistency = policy.on_unavailable(
query=None, consistency="ONE",
required_replicas=10000, alive_replicas=1, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
class FallthroughRetryPolicyTest(unittest.TestCase):
"""
Use the same tests for test_write_timeout, but ensure they only RETHROW
"""
def test_read_timeout(self):
policy = FallthroughRetryPolicy()
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=1, received_responses=2,
data_retrieved=True, retry_num=1)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=2, received_responses=1,
data_retrieved=True, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=2, received_responses=2,
data_retrieved=True, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=2, received_responses=2,
data_retrieved=False, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
def test_write_timeout(self):
policy = FallthroughRetryPolicy()
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=WriteType.SIMPLE,
required_responses=1, received_responses=2, retry_num=1)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=WriteType.SIMPLE,
required_responses=1, received_responses=2, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=WriteType.BATCH_LOG,
required_responses=10000, received_responses=1, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
def test_unavailable(self):
policy = FallthroughRetryPolicy()
retry, consistency = policy.on_unavailable(
query=None, consistency="ONE",
required_replicas=1, alive_replicas=2, retry_num=1)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
retry, consistency = policy.on_unavailable(
query=None, consistency="ONE",
required_replicas=1, alive_replicas=2, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
retry, consistency = policy.on_unavailable(
query=None, consistency="ONE",
required_replicas=10000, alive_replicas=1, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
class DowngradingConsistencyRetryPolicyTest(unittest.TestCase):
def test_read_timeout(self):
policy = DowngradingConsistencyRetryPolicy()
# if this is the second or greater attempt, rethrow
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=1, received_responses=2,
data_retrieved=True, retry_num=1)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
# if we didn't get enough responses, retry at a lower consistency
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=4, received_responses=3,
data_retrieved=True, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETRY)
self.assertEqual(consistency, ConsistencyLevel.THREE)
# if we didn't get enough responses, retry at a lower consistency
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=3, received_responses=2,
data_retrieved=True, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETRY)
self.assertEqual(consistency, ConsistencyLevel.TWO)
# retry consistency level goes down based on the # of recv'd responses
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=3, received_responses=1,
data_retrieved=True, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETRY)
self.assertEqual(consistency, ConsistencyLevel.ONE)
# if we got no responses, rethrow
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=3, received_responses=0,
data_retrieved=True, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
# if we got enough response but no data, retry
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=3, received_responses=3,
data_retrieved=False, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETRY)
self.assertEqual(consistency, 'ONE')
# if we got enough responses, but also got a data response, rethrow
retry, consistency = policy.on_read_timeout(
query=None, consistency="ONE", required_responses=2, received_responses=2,
data_retrieved=True, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
def test_write_timeout(self):
policy = DowngradingConsistencyRetryPolicy()
# if this is the second or greater attempt, rethrow
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=WriteType.SIMPLE,
required_responses=1, received_responses=2, retry_num=1)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
# ignore failures on these types of writes
for write_type in (WriteType.SIMPLE, WriteType.BATCH, WriteType.COUNTER):
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=write_type,
required_responses=1, received_responses=2, retry_num=0)
self.assertEqual(retry, RetryPolicy.IGNORE)
# downgrade consistency level on unlogged batch writes
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=WriteType.UNLOGGED_BATCH,
required_responses=3, received_responses=1, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETRY)
self.assertEqual(consistency, ConsistencyLevel.ONE)
# retry batch log writes at the same consistency level
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=WriteType.BATCH_LOG,
required_responses=3, received_responses=1, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETRY)
self.assertEqual(consistency, "ONE")
# timeout on an unknown write_type
retry, consistency = policy.on_write_timeout(
query=None, consistency="ONE", write_type=None,
required_responses=1, received_responses=2, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
def test_unavailable(self):
policy = DowngradingConsistencyRetryPolicy()
# if this is the second or greater attempt, rethrow
retry, consistency = policy.on_unavailable(
query=None, consistency="ONE", required_replicas=3, alive_replicas=1, retry_num=1)
self.assertEqual(retry, RetryPolicy.RETHROW)
self.assertEqual(consistency, None)
# downgrade consistency on unavailable exceptions
retry, consistency = policy.on_unavailable(
query=None, consistency="ONE", required_replicas=3, alive_replicas=1, retry_num=0)
self.assertEqual(retry, RetryPolicy.RETRY)
self.assertEqual(consistency, ConsistencyLevel.ONE)
| [
"[email protected]"
]
| |
d94ad0d4184ebc4fb4df9f9e567f480fa0b69e93 | 5a7375bdcd7fba344d9d8e424c42e4ff6e58e5cd | /00_algo_prob/2529_ineuality.py | f007230e9a61a1f36461d2b4bf68aa212163e80e | []
| no_license | jhee514/Algorithms | 1d9d9f8bf11b957393ad1a169fa1a61f86d77da5 | 0ebed8f99a63eae2f9122033ab4e13b2b499fb52 | refs/heads/master | 2021-07-21T01:33:22.838431 | 2020-10-28T15:21:19 | 2020-10-28T15:21:19 | 226,996,192 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,123 | py | import sys
sys.stdin = open("2529_input.txt", "r")
"""
10개의 숫자 중에서 k 개를 순열로 가져와
부등호를 사이사이 넣어봐
중간에 가지치기 해주면서 쭉 돌아야
"""
import itertools
def sol(k, data):
nums = list(range(10))
min_num, max_num = 10 ** (k + 1), 0
perms = itertools.permutations(nums, k + 1)
for p in perms:
if p == (1, 0, 2, 3, 4, 5, 6, 7, 9, 8):
a = 1
for i in range(k):
if data[i] == '>' and p[i] < p[i + 1]:
break
elif data[i] == '<' and p[i] > p[i + 1]:
break
# > < < < > > > < <
else:
str_num = ''
for pp in p:
str_num += str(pp)
if int(str_num) < min_num:
min_num = int(str_num)
str_min = str_num
if int(str_num) > max_num:
max_num = int(str_num)
str_max = max_num
print(str_max)
print(str_min)
T = 2
for tc in range(T):
k = int(input())
data = list(map(str, input().split()))
sol(k, data)
| [
"[email protected]"
]
| |
c8ae48a6f79a42bf74407f3d6801a041d64be011 | 6a63e40b1d30b6a810c89d910ac3f8f5954002ee | /src/pretalx/submission/migrations/0039_submission_created.py | c73cbfb2440b187adbb54d325d4ffb85e8724bf3 | [
"Apache-2.0"
]
| permissive | orlando/pretalx | 47b7ab3e3258d667183066b84227b785199711b2 | 15f90dc2545f210eaf870ffbdfe0a27c70bfa0ec | refs/heads/master | 2020-09-10T20:26:49.867462 | 2019-11-15T01:19:07 | 2019-11-15T01:19:07 | 221,826,314 | 2 | 0 | NOASSERTION | 2019-11-15T02:21:05 | 2019-11-15T02:21:04 | null | UTF-8 | Python | false | false | 411 | py | # Generated by Django 2.2.1 on 2019-05-01 20:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('submission', '0038_auto_20190429_0750'),
]
operations = [
migrations.AddField(
model_name='submission',
name='created',
field=models.DateTimeField(auto_now_add=True, null=True),
),
]
| [
"[email protected]"
]
| |
f7a4637914542b39f88b631fb0e91e6734546313 | 68b8e6549345ba020bdd7ac1eeef03af26c251fd | /tests/test_ia_markov.py | 0857a5521d2e26b80eed17cc8bbbfa75236d8b76 | [
"BSD-3-Clause",
"BSD-2-Clause"
]
| permissive | Abelarm/python-ia-markov | 8826e31746ce4de26cd023db26b986249c9cf88a | 8bef5d620b77a0944924263af6042396cf9b768b | refs/heads/master | 2020-03-30T10:44:01.936698 | 2018-10-01T18:21:45 | 2018-10-01T18:21:45 | 151,133,654 | 0 | 0 | BSD-2-Clause | 2018-10-01T18:00:03 | 2018-10-01T18:00:02 | null | UTF-8 | Python | false | false | 83 | py |
import ia_markov
def test_main():
assert ia_markov # use your library here
| [
"[email protected]"
]
| |
f2613ac43e286ee6c63cc7b579b00d0c613e1729 | d532b85841b459c61d88d380e88dd08d29836d43 | /solutions/1488_avoid_flood_in_the_city.py | 1789aba0bebf606b5ccb155577af2e6cf7b5dc09 | [
"MIT"
]
| permissive | YiqunPeng/leetcode_pro | ad942468df5506de9dc48a4019933f658e2a3121 | 4a508a982b125a3a90ea893ae70863df7c99cc70 | refs/heads/master | 2022-05-15T09:32:02.699180 | 2022-05-14T16:32:17 | 2022-05-14T16:32:17 | 182,453,966 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 621 | py | class Solution:
def avoidFlood(self, rains: List[int]) -> List[int]:
"""Hash table.
"""
n = len(rains)
res = [1] * n
f = {}
s = []
for i, r in enumerate(rains):
if r > 0:
if r in f:
idx = bisect.bisect_left(s, f[r])
if idx == len(s):
return []
else:
res[s[idx]] = r
s.pop(idx)
f[r] = i
res[i] = -1
else:
s.append(i)
return res
| [
"[email protected]"
]
| |
ffbdf922a169191795e21b24f226334344e6b2b8 | 8a08d39142c7b5c7dc9300717f0db6dad295ec92 | /antelope_core/providers/parse_math.py | 8fb0f24a50ac68f93528c7d0a658cd62da7d7e04 | [
"BSD-3-Clause"
]
| permissive | msm-sardar/core | 3eac85248914ada808882b9dedefd889756be504 | bc88a1ed3e4c1defcbc83fa86356451ac34c178c | refs/heads/master | 2023-08-24T03:56:31.892812 | 2021-10-14T01:12:02 | 2021-10-14T01:12:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 729 | py | import ast
def parse_math(expression):
"""
A function I got off stackoverflow that enables python to parse user input as a mathematical expression.
probably a huge security risk. but it enables the user to enter missing characterization values during runtime.
:param expression:
:return:
"""
try:
tree = ast.parse(expression, mode='eval')
except SyntaxError:
return # not a Python expression
if not all(isinstance(node, (ast.Expression, ast.UnaryOp, ast.unaryop, ast.BinOp, ast.operator, ast.Num))
for node in ast.walk(tree)):
return # not a mathematical expression (numbers and operators)
return eval(compile(tree, filename='', mode='eval'))
| [
"[email protected]"
]
| |
fb7be942dde3ebb78f195e731981df98417bf374 | 01df468685c9f393b9559cb68df349ef7abcf5a6 | /panelapp/panels/urls.py | 2d9e2ae0ab52028ab81a18a96b567f8bf2b09c0b | [
"Apache-2.0"
]
| permissive | victorskl/panelapp | 481af901472cd960da2d0abf17239b8d484524be | 4dfdd31f6036db5cb4e692961ef9bcbe92d39a23 | refs/heads/master | 2020-05-07T16:28:08.946472 | 2019-01-23T11:04:41 | 2019-01-23T11:04:41 | 180,684,104 | 1 | 0 | null | 2019-04-11T00:28:26 | 2019-04-11T00:28:26 | null | UTF-8 | Python | false | false | 10,310 | py | from django.conf.urls import url
from django.views.generic import RedirectView
from .views import AdminView
from .views import AdminUploadGenesView
from .views import AdminUploadPanelsView
from .views import AdminUploadReviewsView
from .views import EntitiesListView
from .views import CreatePanelView
from .views import EntityDetailView
from .views import GenePanelView
from .views import PanelsIndexView
from .views import UpdatePanelView
from .views import PromotePanelView
from .views import PanelAddEntityView
from .views import PanelEditEntityView
from .views import PanelMarkNotReadyView
from .views import GenePanelSpanshotView
from .views import EntityReviewView
from .views import MarkEntityReadyView
from .views import DownloadPanelTSVView
from .views import DownloadPanelVersionTSVView
from .views import MarkGeneNotReadyView
from .views import ComparePanelsView
from .views import CompareGeneView
from .views import CopyReviewsView
from .views import DownloadAllGenes
from .views import DownloadAllPanels
from .views import ActivityListView
from .views import DownloadAllSTRs
from .views import DownloadAllRegions
from .views import GeneDetailRedirectView
from .views import RedirectGenesToEntities
from .views import OldCodeURLRedirect
from .ajax_views import ClearPublicationsAjaxView
from .ajax_views import ClearPhoenotypesAjaxView
from .ajax_views import ClearModeOfPathogenicityAjaxView
from .ajax_views import ClearSourcesAjaxView
from .ajax_views import ClearSingleSourceAjaxView
from .ajax_views import DeletePanelAjaxView
from .ajax_views import DeleteEntityAjaxView
from .ajax_views import RejectPanelAjaxView
from .ajax_views import ApprovePanelAjaxView
from .ajax_views import UpdateEntityTagsAjaxView
from .ajax_views import UpdateEntityMOPAjaxView
from .ajax_views import UpdateEntityMOIAjaxView
from .ajax_views import UpdateEntityPhenotypesAjaxView
from .ajax_views import UpdateEntityPublicationsAjaxView
from .ajax_views import UpdateEntityRatingAjaxView
from .ajax_views import DeleteEntityEvaluationAjaxView
from .ajax_views import GetEntityCommentFormAjaxView
from .ajax_views import DeleteEntityCommentAjaxView
from .ajax_views import SubmitEntityCommentFormAjaxView
from .ajax_views import ApproveEntityAjaxView
app_name = 'panels'
entity_regex = '[\w\-\.\$\~\@\#\ ]+'
entity_types = 'gene|str|region'
urlpatterns = [
url(r'^$', PanelsIndexView.as_view(), name="index"),
url(r'^compare/$', ComparePanelsView.as_view(), name="compare_panels_form"),
url(r'^compare/(?P<panel_1_id>[0-9]+)/(?P<panel_2_id>[0-9]+)$', ComparePanelsView.as_view(), name="compare"),
url(r'^compare/(?P<panel_1_id>[0-9]+)/(?P<panel_2_id>[0-9]+)/(?P<gene_symbol>[\w\-]+)$',
CompareGeneView.as_view(), name="compare_genes"),
url(r'^copy/(?P<panel_1_id>[0-9]+)/(?P<panel_2_id>[0-9]+)$', CopyReviewsView.as_view(), name="copy_reviews"),
url(r'^(?P<pk>[0-9]+)/$', GenePanelView.as_view(), name="detail"),
url(r'^(?P<pk>[0-9]+)/update$', UpdatePanelView.as_view(), name="update"),
url(r'^(?P<pk>[0-9]+)/promote$', PromotePanelView.as_view(), name="promote"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/add'.format(types=entity_types), PanelAddEntityView.as_view(), name="add_entity"),
url(r'^(?P<pk>[0-9]+)/delete$', DeletePanelAjaxView.as_view(), name="delete_panel"),
url(r'^(?P<pk>[0-9]+)/reject$', RejectPanelAjaxView.as_view(), name="reject_panel"),
url(r'^(?P<pk>[0-9]+)/approve$', ApprovePanelAjaxView.as_view(), name="approve_panel"),
url(r'^(?P<pk>[0-9]+)/download/(?P<categories>[0-4]+)/$',
DownloadPanelTSVView.as_view(), name="download_panel_tsv"),
url(r'^(?P<pk>[0-9]+)/download_version/$',
DownloadPanelVersionTSVView.as_view(), name="download_old_panel_tsv"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_name>{})/$'.format(entity_regex), RedirectGenesToEntities.as_view(), name="redirect_previous_structure"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/$'.format(types=entity_types, regex=entity_regex), GenePanelSpanshotView.as_view(), name="evaluation"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/edit$'.format(types=entity_types, regex=entity_regex), PanelEditEntityView.as_view(), name="edit_entity"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/review$'.format(types=entity_types, regex=entity_regex), EntityReviewView.as_view(), name="review_entity"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/mark_as_ready$'.format(types=entity_types, regex=entity_regex),
MarkEntityReadyView.as_view(), name="mark_entity_as_ready"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/mark_as_not_ready$'.format(types=entity_types, regex=entity_regex),
MarkGeneNotReadyView.as_view(), name="mark_entity_as_not_ready"),
# AJAX endpoints
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/delete$'.format(types=entity_types, regex=entity_regex), DeleteEntityAjaxView.as_view(), name="delete_entity"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/approve$'.format(types=entity_types, regex=entity_regex), ApproveEntityAjaxView.as_view(), name="approve_entity"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/clear_entity_sources$'.format(types=entity_types, regex=entity_regex),
ClearSourcesAjaxView.as_view(), name="clear_entity_sources"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/clear_entity_source/(?P<source>(.*))/$'.format(types=entity_types, regex=entity_regex),
ClearSingleSourceAjaxView.as_view(), name="clear_entity_source"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/clear_entity_phenotypes$'.format(types=entity_types, regex=entity_regex),
ClearPhoenotypesAjaxView.as_view(), name="clear_entity_phenotypes"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/clear_entity_publications$'.format(types=entity_types, regex=entity_regex),
ClearPublicationsAjaxView.as_view(), name="clear_entity_publications"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/clear_entity_mode_of_pathogenicity$'.format(types=entity_types, regex=entity_regex),
ClearModeOfPathogenicityAjaxView.as_view(), name="clear_entity_mode_of_pathogenicity"),
# AJAX Review endpoints
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/update_entity_tags/$'.format(types=entity_types, regex=entity_regex),
UpdateEntityTagsAjaxView.as_view(), name="update_entity_tags"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/update_entity_rating/$'.format(types=entity_types, regex=entity_regex),
UpdateEntityRatingAjaxView.as_view(), name="update_entity_rating"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/update_entity_moi/$'.format(types=entity_types, regex=entity_regex),
UpdateEntityMOIAjaxView.as_view(), name="update_entity_moi"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/update_entity_mop/$'.format(types=entity_types, regex=entity_regex),
UpdateEntityMOPAjaxView.as_view(), name="update_entity_mop"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/update_entity_phenotypes/$'.format(types=entity_types, regex=entity_regex),
UpdateEntityPhenotypesAjaxView.as_view(), name="update_entity_phenotypes"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/update_entity_publications/$'.format(types=entity_types, regex=entity_regex),
UpdateEntityPublicationsAjaxView.as_view(), name="update_entity_publications"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/delete_evaluation/(?P<evaluation_pk>[0-9]+)/$'.format(types=entity_types, regex=entity_regex),
DeleteEntityEvaluationAjaxView.as_view(), name="delete_evaluation_by_user"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/edit_comment/(?P<comment_pk>[0-9]+)/$'.format(types=entity_types, regex=entity_regex),
GetEntityCommentFormAjaxView.as_view(), name="edit_comment_by_user"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/submit_edit_comment/(?P<comment_pk>[0-9]+)/$'.format(types=entity_types, regex=entity_regex),
SubmitEntityCommentFormAjaxView.as_view(), name="submit_edit_comment_by_user"),
url(r'^(?P<pk>[0-9]+)/(?P<entity_type>({types}))/(?P<entity_name>{regex})/delete_comment/(?P<comment_pk>[0-9]+)/$'.format(types=entity_types, regex=entity_regex),
DeleteEntityCommentAjaxView.as_view(), name="delete_comment_by_user"),
url(r'^(?P<pk>[0-9]+)/mark_not_ready$'.format(entity_regex), PanelMarkNotReadyView.as_view(), name="mark_not_ready"),
url(r'^(?P<pk>[a-z0-9]{24})/(?P<uri>.*|$)', OldCodeURLRedirect.as_view(), name="old_code_url_redirect"),
url(r'^create/', CreatePanelView.as_view(), name="create"),
url(r'^entities/$', EntitiesListView.as_view(), name="entities_list"),
url(r'^genes/$', RedirectView.as_view(url='/panels/entities'), name="genes_list"),
url(r'^entities/(?P<slug>{regex})$'.format(types=entity_types, regex=entity_regex), EntityDetailView.as_view(), name="entity_detail"),
url(r'^genes/(?P<slug>{regex})$'.format(types=entity_types, regex=entity_regex), GeneDetailRedirectView.as_view()),
url(r'^activity/$', ActivityListView.as_view(), name="activity"),
url(r'^admin/', AdminView.as_view(), name="admin"),
url(r'^upload_genes/', AdminUploadGenesView.as_view(), name="upload_genes"),
url(r'^download_genes/', DownloadAllGenes.as_view(), name="download_genes"),
url(r'^download_strs/', DownloadAllSTRs.as_view(), name="download_strs"),
url(r'^download_regions/', DownloadAllRegions.as_view(), name="download_regions"),
url(r'^upload_panel/', AdminUploadPanelsView.as_view(), name="upload_panels"),
url(r'^download_panel/', DownloadAllPanels.as_view(), name="download_panels"),
url(r'^upload_reviews/', AdminUploadReviewsView.as_view(), name="upload_reviews"),
]
| [
"[email protected]"
]
| |
19231243102cae313e9ffe1fb4aa503ac094635f | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_permutation.py | 995d0cfcfbff595c0f8b2d0a59d0d980653557db | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 392 | py |
#calss header
class _PERMUTATION():
def __init__(self,):
self.name = "PERMUTATION"
self.definitions = [u'any of the various ways in which a set of things can be ordered: ', u'one of several different forms: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
]
| |
a2676e558ee7b10567e7d3604eccdaaab446eb0f | c1b7655fbbf5e647c9de01d55bf31f044e26b7bf | /HE_cell_classification/predict/predict_Local.py | 58a36cc8ee6751d13abcac3b49b2f6dc8a825d63 | []
| no_license | sara-kassani/UNMaSk | ef170ddcfd7b8b5599e7d412d547084848308eb1 | c03f56a6e926fe14b1923470d22a112892116e38 | refs/heads/master | 2023-07-17T12:38:46.086746 | 2021-04-29T19:59:48 | 2021-04-29T19:59:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,115 | py | import os
from parse_arguments import get_parsed_arguments
from classifier.sccnn_classifier import SccnnClassifier
from classifier.subpackages import NetworkOptions
#########comment the below two lines if its running on a cpu environment###############
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
#########comment the below two lines if its running on a cpu environment###############
################################################################################################
#exp_dir-> checkpoint_path
#data_dir-> cws_path
#result_dir-> classification result_path
#detection_dir-> detection_path
#tissue_segment_dir-> tissue_segmentation_result_path if available( this parameter is optional)
################################################################################################
args = get_parsed_arguments()
opts = NetworkOptions.NetworkOptions(exp_dir=args.exp_dir,
num_examples_per_epoch_train=1,
num_examples_per_epoch_valid=1,
image_height=51,
image_width=51,
in_feat_dim=3,
in_label_dim=1,
num_of_classes=4,
batch_size=100,
data_dir=args.data_dir,
results_dir=args.results_dir,
detection_results_path=args.detection_results_path,
tissue_segment_dir=args.tissue_segment_dir,
preprocessed_dir=None,
current_epoch_num=0,
file_name_pattern=args.file_name_pattern,
pre_process=False,
color_code_file='HE_Fib_Lym_Tum_Others.csv')
opts.results_dir = (os.path.join(opts.results_dir, '2020ENS_TA_DUKE_HE_TEST'))
if not os.path.isdir(opts.results_dir):
os.makedirs(opts.results_dir)
if not os.path.isdir(os.path.join(opts.results_dir, 'mat')):
os.makedirs(os.path.join(opts.results_dir, 'mat'))
if not os.path.isdir(os.path.join(opts.results_dir, 'annotated_images')):
os.makedirs(os.path.join(opts.results_dir, 'annotated_images'))
if not os.path.isdir(os.path.join(opts.results_dir, 'csv')):
os.makedirs(os.path.join(opts.results_dir, 'csv'))
Network = SccnnClassifier(batch_size=opts.batch_size,
image_height=opts.image_height,
image_width=opts.image_width,
in_feat_dim=opts.in_feat_dim,
in_label_dim=opts.in_label_dim,
num_of_classes=opts.num_of_classes)
#print(opts)
Network.generate_output(opts=opts)
| [
"[email protected]"
]
| |
84d3852ea9e37451d2df07cf5855edabe663ba12 | 754f71f70dfd6a22944d8d872c6d2f1d6983ac14 | /tests/serial_frame_builder/test_miso_frame_builder.py | a78831ae697787e71eac1215a585a220bf59fbf5 | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
]
| permissive | Sensirion/python-shdlc-driver | 052685da8db5629fa5929da65000210db82358e7 | 31e9683c27004ee05edf89996d656bc50f5bdb3a | refs/heads/master | 2021-06-10T10:35:47.299481 | 2021-03-19T08:47:12 | 2021-03-19T08:47:12 | 144,961,065 | 6 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,667 | py | # -*- coding: utf-8 -*-
# (c) Copyright 2019 Sensirion AG, Switzerland
from __future__ import absolute_import, division, print_function
from sensirion_shdlc_driver.serial_frame_builder import \
ShdlcSerialMisoFrameBuilder
from sensirion_shdlc_driver.errors import ShdlcResponseError
import pytest
def test_initial_data_empty():
"""
Test if the initial value and type of the "data" property is correct.
"""
builder = ShdlcSerialMisoFrameBuilder()
assert type(builder.data) is bytearray
assert len(builder.data) == 0
def test_initial_start_received_false():
"""
Test if the initial value and type of the "start_received" property is
correct.
"""
builder = ShdlcSerialMisoFrameBuilder()
assert type(builder.start_received) is bool
assert builder.start_received is False
def test_add_data_appends():
"""
Test if the "add_data()" method appends the passed data to the object.
"""
builder = ShdlcSerialMisoFrameBuilder()
builder.add_data(b"\x00\x01\x02")
assert builder.data == b"\x00\x01\x02"
builder.add_data(b"\x03\x04\x05")
assert builder.data == b"\x00\x01\x02\x03\x04\x05"
builder.add_data(b"\xfd\xfe\xff")
assert builder.data == b"\x00\x01\x02\x03\x04\x05\xfd\xfe\xff"
def test_add_data_raises_if_max_length_reached():
"""
Test if the "add_data()" method raises an ShdlcResponseError if no valid
frame is contained and the maximum frame length is reached.
"""
builder = ShdlcSerialMisoFrameBuilder()
builder.add_data(b"\x00" * 500)
with pytest.raises(ShdlcResponseError):
builder.add_data(b"\x00" * 23)
def test_add_data():
"""
Test if return type and value of the "add_data()" method is correct.
"""
builder = ShdlcSerialMisoFrameBuilder()
assert type(builder.add_data(b"")) is bool
assert builder.add_data(b"") is False
assert builder.add_data(b"\x00\x01\x02") is False # some rubbish
assert builder.add_data(b"\x7e\x00\x00") is False # frame START
assert builder.add_data(b"\x00\x00\x7e") is True # frame STOP
assert builder.add_data(b"\x00\x01\x02") is True # some rubbish
def test_initial_start_received():
"""
Test if the return value of the "start_received" property is correct after
adding data with "add_data()".
"""
builder = ShdlcSerialMisoFrameBuilder()
builder.add_data(b"\x00\x01\x02") # some rubbish
assert builder.start_received is False
builder.add_data(b"\x7e\x00\x00") # frame START
assert builder.start_received is True
builder.add_data(b"\x00\x00\x7e") # frame STOP
assert builder.start_received is True
builder.add_data(b"\x00\x01\x02") # some rubbish
assert builder.start_received is True
@pytest.mark.parametrize("raw,exp_addr,exp_cmd,exp_state,exp_data", [
pytest.param(b"\x7e\x00\x00\x00\x00\xff\x7e",
0x00,
0x00,
0x00,
b"",
id="all_zeros_nodata"),
pytest.param(b"\x7e\x00\x00\x00\xff" + b"\x00" * 255 + b"\x00\x7e",
0x00,
0x00,
0x00,
b"\x00" * 255,
id="all_zeros_withdata"),
pytest.param(b"\x7e\xff\xff\xff\xff" + b"\xff" * 255 + b"\x02\x7e",
0xFF,
0xFF,
0xFF,
b"\xff" * 255,
id="all_0xFF_withdata"),
pytest.param(b"\x7e\x7d\x5e\x7d\x5d\x7d\x31\x03\x12\x7d\x33\x14\xb7\x7e",
0x7e,
0x7d,
0x11,
b"\x12\x13\x14",
id="byte_stuffing_in_address_command_state_and_data"),
pytest.param(b"\x7e\x00\x01\x00\xff" + b"\x7d\x5e" * 255 + b"\x7d\x5d\x7e",
0x00,
0x01,
0x00,
b"\x7e" * 255,
id="byte_stuffing_in_data_and_checksum"),
])
def test_interpret_data_valid(raw, exp_addr, exp_cmd, exp_state, exp_data):
"""
Test if return type and value of the "interpret_data()" method is correct.
"""
builder = ShdlcSerialMisoFrameBuilder()
assert builder.add_data(raw) is True
recv_addr, recv_cmd, recv_state, recv_data = builder.interpret_data()
assert type(recv_addr) is int
assert type(recv_cmd) is int
assert type(recv_state) is int
assert type(recv_data) is bytes
assert recv_addr == exp_addr
assert recv_cmd == exp_cmd
assert recv_state == exp_state
assert recv_data == exp_data
@pytest.mark.parametrize("raw", [
pytest.param(b"\x7e\x7e",
id="empty"),
pytest.param(b"\x7e\x00\x00\x00\xff\x7e",
id="too_short"),
pytest.param(b"\x7e\x00\x00\x00\xff" + b"\x00" * 256 + b"\x00\x7e",
id="too_long"),
pytest.param(b"\x7e\x00\x00\x00\x01\xfe\x7e",
id="too_less_data"),
pytest.param(b"\x7e\x00\x00\x00\x00\x00\xff\x7e",
id="too_much_data"),
pytest.param(b"\x7e\x00\x00\x00\x00\xfe\x7e",
id="nodata_wrong_checksum"),
pytest.param(b"\x7e\xff\xff\xff\xff" + b"\xff" * 255 + b"\x00\x7e",
id="all_0xFF_wrong_checksum"),
])
def test_interpret_data_invalid(raw):
"""
Test if "interpret_data()" raises an ShdlcResponseError on invalid data.
"""
builder = ShdlcSerialMisoFrameBuilder()
assert builder.add_data(raw) is True
with pytest.raises(ShdlcResponseError):
builder.interpret_data()
| [
"[email protected]"
]
| |
45b9872b67aeb1490a5490178ca9f94fe40a84b1 | 0b414a080c9853997bfba016c7f66e5f11d80a14 | /cj_env/lib/python3.6/site-packages/pysmi/compiler.py | 6d049e77452977b1da1898285226848b68702db1 | []
| no_license | alkhor/Cable_Journal | 2bd4bf00210f78c08fcc5508c13833b5e8aa3c46 | e64fb1bfcc4d1b7844b2e0a10653264d58039259 | refs/heads/master | 2021-01-22T19:09:33.562313 | 2018-04-15T19:42:16 | 2018-04-15T19:42:16 | 100,772,711 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,824 | py | #
# This file is part of pysmi software.
#
# Copyright (c) 2015-2017, Ilya Etingof <[email protected]>
# License: http://pysmi.sf.net/license.html
#
import sys
import os
import time
try:
from pwd import getpwuid
except ImportError:
# noinspection PyPep8
getpwuid = lambda x: ['<unknown>']
from pysmi import __name__ as packageName
from pysmi import __version__ as packageVersion
from pysmi.mibinfo import MibInfo
from pysmi.codegen.symtable import SymtableCodeGen
from pysmi import error
from pysmi import debug
class MibStatus(str):
"""Indicate MIB transformation result.
*MibStatus* is a subclass of Python string type. Some additional
attributes may be set to indicate the details.
The following *MibStatus* class instances are defined:
* *compiled* - MIB is successfully transformed
* *untouched* - fresh transformed version of this MIB already exisits
* *failed* - MIB transformation failed. *error* attribute carries details.
* *unprocessed* - MIB transformation required but waived for some reason
* *missing* - ASN.1 MIB source can't be found
* *borrowed* - MIB transformation failed but pre-transformed version was used
"""
def setOptions(self, **kwargs):
n = self.__class__(self)
for k in kwargs:
setattr(n, k, kwargs[k])
return n
statusCompiled = MibStatus('compiled')
statusUntouched = MibStatus('untouched')
statusFailed = MibStatus('failed')
statusUnprocessed = MibStatus('unprocessed')
statusMissing = MibStatus('missing')
statusBorrowed = MibStatus('borrowed')
class MibCompiler(object):
"""Top-level, user-facing, composite MIB compiler object.
MibCompiler implements high-level MIB transformation processing logic.
It executes its actions by calling the following specialized objects:
* *readers* - to acquire ASN.1 MIB data
* *searchers* - to see if transformed MIB already exists and no processing is necessary
* *parser* - to parse ASN.1 MIB into AST
* *code generator* - to perform actual MIB transformation
* *borrowers* - to fetch pre-transformed MIB if transformation is impossible
* *writer* - to store transformed MIB data
Required components must be passed to MibCompiler on instantiation. Those
components are: *parser*, *codegenerator* and *writer*.
Optional components could be set or modified at later phases of MibCompiler
life. Unlike singular, required components, optional one can be present
in sequences to address many possible sources of data. They are
*readers*, *searchers* and *borrowers*.
Examples: ::
from pysmi.reader.localfile import FileReader
from pysmi.searcher.pyfile import PyFileSearcher
from pysmi.searcher.pypackage import PyPackageSearcher
from pysmi.searcher.stub import StubSearcher
from pysmi.writer.pyfile import PyFileWriter
from pysmi.parser.smi import SmiV2Parser
from pysmi.codegen.pysnmp import PySnmpCodeGen, baseMibs
mibCompiler = MibCompiler(SmiV2Parser(),
PySnmpCodeGen(),
PyFileWriter('/tmp/pysnmp/mibs'))
mibCompiler.addSources(FileReader('/usr/share/snmp/mibs'))
mibCompiler.addSearchers(PyFileSearcher('/tmp/pysnmp/mibs'))
mibCompiler.addSearchers(PyPackageSearcher('pysnmp.mibs'))
mibCompiler.addSearchers(StubSearcher(*baseMibs))
results = mibCompiler.compile('IF-MIB', 'IP-MIB')
"""
indexFile = 'index'
def __init__(self, parser, codegen, writer):
"""Creates an instance of *MibCompiler* class.
Args:
parser: ASN.1 MIB parser object
codegen: MIB transformation object
writer: transformed MIB storing object
"""
self._parser = parser
self._codegen = codegen
self._symbolgen = SymtableCodeGen()
self._writer = writer
self._sources = []
self._searchers = []
self._borrowers = []
def addSources(self, *sources):
"""Add more ASN.1 MIB source repositories.
MibCompiler.compile will invoke each of configured source objects
in order of their addition asking each to fetch MIB module specified
by name.
Args:
sources: reader object(s)
Returns:
reference to itself (can be used for call chaining)
"""
self._sources.extend(sources)
debug.logger & debug.flagCompiler and debug.logger(
'current MIB source(s): %s' % ', '.join([str(x) for x in self._sources]))
return self
def addSearchers(self, *searchers):
"""Add more transformed MIBs repositories.
MibCompiler.compile will invoke each of configured searcher objects
in order of their addition asking each if already transformed MIB
module already exists and is more recent than specified.
Args:
searchers: searcher object(s)
Returns:
reference to itself (can be used for call chaining)
"""
self._searchers.extend(searchers)
debug.logger & debug.flagCompiler and debug.logger(
'current compiled MIBs location(s): %s' % ', '.join([str(x) for x in self._searchers]))
return self
def addBorrowers(self, *borrowers):
"""Add more transformed MIBs repositories to borrow MIBs from.
Whenever MibCompiler.compile encounters MIB module which neither of
the *searchers* can find or fetched ASN.1 MIB module can not be
parsed (due to syntax errors), these *borrowers* objects will be
invoked in order of their addition asking each if already transformed
MIB can be fetched (borrowed).
Args:
borrowers: borrower object(s)
Returns:
reference to itself (can be used for call chaining)
"""
self._borrowers.extend(borrowers)
debug.logger & debug.flagCompiler and debug.logger(
'current MIB borrower(s): %s' % ', '.join([str(x) for x in self._borrowers]))
return self
def compile(self, *mibnames, **options):
"""Transform requested and possibly referred MIBs.
The *compile* method should be invoked when *MibCompiler* object
is operational meaning at least *sources* are specified.
Once called with a MIB module name, *compile* will:
* fetch ASN.1 MIB module with given name by calling *sources*
* make sure no such transformed MIB already exists (with *searchers*)
* parse ASN.1 MIB text with *parser*
* perform actual MIB transformation into target format with *code generator*
* may attempt to borrow pre-transformed MIB through *borrowers*
* write transformed MIB through *writer*
The above sequence will be performed for each MIB name given in
*mibnames* and may be performed for all MIBs referred to from
MIBs being processed.
Args:
mibnames: list of ASN.1 MIBs names
options: options that affect the way PySMI components work
Returns:
A dictionary of MIB module names processed (keys) and *MibStatus*
class instances (values)
"""
processed = {}
parsedMibs = {}
failedMibs = {}
borrowedMibs = {}
builtMibs = {}
symbolTableMap = {}
mibsToParse = [x for x in mibnames]
while mibsToParse:
mibname = mibsToParse.pop(0)
if mibname in parsedMibs:
debug.logger & debug.flagCompiler and debug.logger('MIB %s already parsed' % mibname)
continue
if mibname in failedMibs:
debug.logger & debug.flagCompiler and debug.logger('MIB %s already failed' % mibname)
continue
for source in self._sources:
debug.logger & debug.flagCompiler and debug.logger('trying source %s' % source)
try:
fileInfo, fileData = source.getData(mibname)
for mibTree in self._parser.parse(fileData):
mibInfo, symbolTable = self._symbolgen.genCode(
mibTree, symbolTableMap
)
symbolTableMap[mibInfo.name] = symbolTable
parsedMibs[mibInfo.name] = fileInfo, mibInfo, mibTree
if mibname in failedMibs:
del failedMibs[mibname]
mibsToParse.extend(mibInfo.imported)
debug.logger & debug.flagCompiler and debug.logger(
'%s (%s) read from %s, immediate dependencies: %s' % (
mibInfo.name, mibname, fileInfo.path, ', '.join(mibInfo.imported) or '<none>'))
break
except error.PySmiReaderFileNotFoundError:
debug.logger & debug.flagCompiler and debug.logger('no %s found at %s' % (mibname, source))
continue
except error.PySmiError:
exc_class, exc, tb = sys.exc_info()
exc.source = source
exc.mibname = mibname
exc.msg += ' at MIB %s' % mibname
debug.logger & debug.flagCompiler and debug.logger('%serror %s from %s' % (
options.get('ignoreErrors') and 'ignoring ' or 'failing on ', exc, source))
failedMibs[mibname] = exc
processed[mibname] = statusFailed.setOptions(error=exc)
else:
exc = error.PySmiError('MIB source %s not found' % mibname)
exc.mibname = mibname
debug.logger & debug.flagCompiler and debug.logger('no %s found everywhare' % mibname)
if mibname not in failedMibs:
failedMibs[mibname] = exc
if mibname not in processed:
processed[mibname] = statusMissing
debug.logger & debug.flagCompiler and debug.logger(
'MIBs analized %s, MIBs failed %s' % (len(parsedMibs), len(failedMibs)))
#
# See what MIBs need generating
#
for mibname in parsedMibs.copy():
fileInfo, mibInfo, mibTree = parsedMibs[mibname]
debug.logger & debug.flagCompiler and debug.logger('checking if %s requires updating' % mibname)
for searcher in self._searchers:
try:
searcher.fileExists(mibname, fileInfo.mtime, rebuild=options.get('rebuild'))
except error.PySmiFileNotFoundError:
debug.logger & debug.flagCompiler and debug.logger(
'no compiled MIB %s available through %s' % (mibname, searcher))
continue
except error.PySmiFileNotModifiedError:
debug.logger & debug.flagCompiler and debug.logger(
'will be using existing compiled MIB %s found by %s' % (mibname, searcher))
del parsedMibs[mibname]
processed[mibname] = statusUntouched
break
except error.PySmiError:
exc_class, exc, tb = sys.exc_info()
exc.searcher = searcher
exc.mibname = mibname
exc.msg += ' at MIB %s' % mibname
debug.logger & debug.flagCompiler and debug.logger('error from %s: %s' % (searcher, exc))
continue
else:
debug.logger & debug.flagCompiler and debug.logger(
'no suitable compiled MIB %s found anywhere' % mibname)
if options.get('noDeps') and mibname not in mibnames:
debug.logger & debug.flagCompiler and debug.logger(
'excluding imported MIB %s from code generation' % mibname)
del parsedMibs[mibname]
processed[mibname] = statusUntouched
continue
debug.logger & debug.flagCompiler and debug.logger(
'MIBs parsed %s, MIBs failed %s' % (len(parsedMibs), len(failedMibs)))
#
# Generate code for parsed MIBs
#
for mibname in parsedMibs.copy():
fileInfo, mibInfo, mibTree = parsedMibs[mibname]
debug.logger & debug.flagCompiler and debug.logger('compiling %s read from %s' % (mibname, fileInfo.path))
comments = [
'ASN.1 source %s' % fileInfo.path,
'Produced by %s-%s at %s' % (packageName, packageVersion, time.asctime()),
'On host %s platform %s version %s by user %s' % (
hasattr(os, 'uname') and os.uname()[1] or '?', hasattr(os, 'uname') and os.uname()[0] or '?',
hasattr(os, 'uname') and os.uname()[2] or '?',
hasattr(os, 'getuid') and getpwuid(os.getuid())[0] or '?'),
'Using Python version %s' % sys.version.split('\n')[0]
]
try:
mibInfo, mibData = self._codegen.genCode(
mibTree,
symbolTableMap,
comments=comments,
genTexts=options.get('genTexts'),
textFilter=options.get('textFilter')
)
builtMibs[mibname] = fileInfo, mibInfo, mibData
del parsedMibs[mibname]
debug.logger & debug.flagCompiler and debug.logger(
'%s read from %s and compiled by %s' % (mibname, fileInfo.path, self._writer))
except error.PySmiError:
exc_class, exc, tb = sys.exc_info()
exc.handler = self._codegen
exc.mibname = mibname
exc.msg += ' at MIB %s' % mibname
debug.logger & debug.flagCompiler and debug.logger('error from %s: %s' % (self._codegen, exc))
processed[mibname] = statusFailed.setOptions(error=exc)
failedMibs[mibname] = exc
del parsedMibs[mibname]
debug.logger & debug.flagCompiler and debug.logger(
'MIBs built %s, MIBs failed %s' % (len(parsedMibs), len(failedMibs)))
#
# Try to borrow pre-compiled MIBs for failed ones
#
for mibname in failedMibs.copy():
if options.get('noDeps') and mibname not in mibnames:
debug.logger & debug.flagCompiler and debug.logger('excluding imported MIB %s from borrowing' % mibname)
continue
for borrower in self._borrowers:
debug.logger & debug.flagCompiler and debug.logger('trying to borrow %s from %s' % (mibname, borrower))
try:
fileInfo, fileData = borrower.getData(
mibname,
genTexts=options.get('genTexts')
)
borrowedMibs[mibname] = fileInfo, MibInfo(name=mibname, imported=[]), fileData
del failedMibs[mibname]
debug.logger & debug.flagCompiler and debug.logger('%s borrowed with %s' % (mibname, borrower))
break
except error.PySmiError:
debug.logger & debug.flagCompiler and debug.logger('error from %s: %s' % (borrower, sys.exc_info()[1]))
debug.logger & debug.flagCompiler and debug.logger(
'MIBs available for borrowing %s, MIBs failed %s' % (len(borrowedMibs), len(failedMibs)))
#
# See what MIBs need borrowing
#
for mibname in borrowedMibs.copy():
debug.logger & debug.flagCompiler and debug.logger('checking if failed MIB %s requires borrowing' % mibname)
fileInfo, mibInfo, mibData = borrowedMibs[mibname]
for searcher in self._searchers:
try:
searcher.fileExists(mibname, fileInfo.mtime, rebuild=options.get('rebuild'))
except error.PySmiFileNotFoundError:
debug.logger & debug.flagCompiler and debug.logger(
'no compiled MIB %s available through %s' % (mibname, searcher))
continue
except error.PySmiFileNotModifiedError:
debug.logger & debug.flagCompiler and debug.logger(
'will be using existing compiled MIB %s found by %s' % (mibname, searcher))
del borrowedMibs[mibname]
processed[mibname] = statusUntouched
break
except error.PySmiError:
exc_class, exc, tb = sys.exc_info()
exc.searcher = searcher
exc.mibname = mibname
exc.msg += ' at MIB %s' % mibname
debug.logger & debug.flagCompiler and debug.logger('error from %s: %s' % (searcher, exc))
continue
else:
debug.logger & debug.flagCompiler and debug.logger(
'no suitable compiled MIB %s found anywhere' % mibname)
if options.get('noDeps') and mibname not in mibnames:
debug.logger & debug.flagCompiler and debug.logger(
'excluding imported MIB %s from borrowing' % mibname)
processed[mibname] = statusUntouched
else:
debug.logger & debug.flagCompiler and debug.logger('will borrow MIB %s' % mibname)
builtMibs[mibname] = borrowedMibs[mibname]
processed[mibname] = statusBorrowed.setOptions(
path=fileInfo.path, file=fileInfo.file,
alias=fileInfo.name
)
del borrowedMibs[mibname]
debug.logger & debug.flagCompiler and debug.logger(
'MIBs built %s, MIBs failed %s' % (len(builtMibs), len(failedMibs)))
#
# We could attempt to ignore missing/failed MIBs
#
if failedMibs and not options.get('ignoreErrors'):
debug.logger & debug.flagCompiler and debug.logger('failing with problem MIBs %s' % ', '.join(failedMibs))
for mibname in builtMibs:
processed[mibname] = statusUnprocessed
return processed
debug.logger & debug.flagCompiler and debug.logger(
'proceeding with built MIBs %s, failed MIBs %s' % (', '.join(builtMibs), ', '.join(failedMibs)))
#
# Store compiled MIBs
#
for mibname in builtMibs.copy():
fileInfo, mibInfo, mibData = builtMibs[mibname]
try:
if options.get('writeMibs', True):
self._writer.putData(
mibname, mibData, dryRun=options.get('dryRun')
)
debug.logger & debug.flagCompiler and debug.logger('%s stored by %s' % (mibname, self._writer))
del builtMibs[mibname]
if mibname not in processed:
processed[mibname] = statusCompiled.setOptions(
path=fileInfo.path,
file=fileInfo.file,
alias=fileInfo.name,
oid=mibInfo.oid,
oids=mibInfo.oids,
identity=mibInfo.identity,
enterprise=mibInfo.enterprise,
compliance=mibInfo.compliance,
)
except error.PySmiError:
exc_class, exc, tb = sys.exc_info()
exc.handler = self._codegen
exc.mibname = mibname
exc.msg += ' at MIB %s' % mibname
debug.logger & debug.flagCompiler and debug.logger('error %s from %s' % (exc, self._writer))
processed[mibname] = statusFailed.setOptions(error=exc)
failedMibs[mibname] = exc
del builtMibs[mibname]
debug.logger & debug.flagCompiler and debug.logger(
'MIBs modified: %s' % ', '.join([x for x in processed if processed[x] in ('compiled', 'borrowed')]))
return processed
def buildIndex(self, processedMibs, **options):
comments = [
'Produced by %s-%s at %s' % (packageName, packageVersion, time.asctime()),
'On host %s platform %s version %s by user %s' % (
hasattr(os, 'uname') and os.uname()[1] or '?', hasattr(os, 'uname') and os.uname()[0] or '?',
hasattr(os, 'uname') and os.uname()[2] or '?', hasattr(os, 'getuid') and getpwuid(os.getuid())[0]) or '?',
'Using Python version %s' % sys.version.split('\n')[0]
]
try:
self._writer.putData(
self.indexFile,
self._codegen.genIndex(
processedMibs,
comments=comments,
old_index_data=self._writer.getData(self.indexFile)
),
dryRun=options.get('dryRun')
)
except error.PySmiError:
exc_class, exc, tb = sys.exc_info()
exc.msg += ' at MIB index %s' % self.indexFile
debug.logger & debug.flagCompiler and debug.logger('error %s when building %s' % (exc, self.indexFile))
if options.get('ignoreErrors'):
return
if hasattr(exc, 'with_traceback'):
raise exc.with_traceback(tb)
else:
raise exc
| [
"[email protected]"
]
| |
123b1cbb1b81c70709c950c532951eaeed017c86 | 1523e2fff267279bbf99a44a71b7482081dd1141 | /The_Watch/The_Watch/wsgi.py | 2cbac8cc2093ca421a722294adc2ee44bfc89a4c | [
"MIT"
]
| permissive | Kipngetich33/The-Watch | 4c77f5e365553ab5af9b7a9c4a5bea71139d47c0 | 96e39937c0015eae749836f6215d60ae5cb86e51 | refs/heads/master | 2021-05-02T07:20:46.854250 | 2018-02-12T08:37:36 | 2018-02-12T08:37:36 | 120,872,467 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 396 | py | """
WSGI config for The_Watch project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "The_Watch.settings")
application = get_wsgi_application()
| [
"[email protected]"
]
| |
6fc833e1360cd1461a185e6418da611f9ec80004 | f10db3b11131ddf2bf5026e42cdd72c275e49693 | /ToolsX/leetcode/0069/0069_4.py | fc86c98645998a5d503330fc7b69982f3ce3ac41 | []
| no_license | JunLei-MI/PythonX | 36def40e33c9ebb64ce28af2b5da010393b08356 | efea806d49f07d78e3db0390696778d4a7fc6c28 | refs/heads/master | 2023-04-07T10:58:45.647430 | 2021-01-25T16:54:37 | 2021-04-15T13:41:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,166 | py | class Solution:
def mySqrt(self, x: int) -> int:
"""
从 0 到 n 肯定不行,可以优化为到 n/2
1
只计算一次,依然超时,只好用二分的了
2
一定要注意验算 0 和 1
注意 while 和条件的变化
如果是 low <= high,那么后面 low=mid 要 +1,high=mid 要 -1
最后退出循环时,high 比 low 小 1,返回 high
3
二分
4
位运算,从高到低求值
https://leetcode.com/problems/sqrtx/discuss/25048/Share-my-O(log-n)-Solution-using-bit-manipulation
>>> Solution().mySqrt(1060472158)
32564
"""
answer = 0
bit = 1 << 15 # 假设是32位 int 所以从 16 位开始
while bit > 0:
answer |= bit # 将这一位设为 1
if answer * answer > x: # 说明加上这一位的 1 就大了,说明不能加,恢复
answer ^= bit # bit 只有最高位为 1,异或将 answer 这一位置为 0
bit >>= 1
return answer
if __name__ == '__main__':
import doctest
doctest.testmod(verbose=True)
| [
"[email protected]"
]
| |
3fbef31ab44f7f7928253701aacca5637318f44b | e267d1dbb7eb7cad239b18cffe6ddc53ae45aa9a | /tests/test_validators.py | f4ff4da249c4db81979e8286293a8a41471d1559 | []
| no_license | papercapp/DisposableEmailChecker | 038fa91f60f2798d687ca846d5836200af30f624 | 60f055f9102a4f9e967d740e4446e5c7ac76c351 | refs/heads/master | 2020-05-29T08:52:05.319215 | 2015-11-04T02:00:38 | 2015-11-04T02:00:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 732 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import random
from django.test import TestCase
from django.core.exceptions import ValidationError
from disposable_email_checker import validators
from disposable_email_checker.emails import email_domain_loader
class TestDisposableEmailValidator(TestCase):
def setUp(self):
self.disposable_email = "fake.mcfakerston@{domain}".format(
domain=random.choice(email_domain_loader())
)
self.not_a_disposable_email = "[email protected]"
def test_validator(self):
self.assertRaises(ValidationError, validators.validate_disposable_email, self.disposable_email)
validators.validate_disposable_email(self.not_a_disposable_email)
| [
"[email protected]"
]
| |
02aea388baeecdf450749332637825ef25ee1e47 | dce2e3b11804fdb141feaa48299fa8cd751f0e5d | /1154.一年中的第几天.py | a811e8c92ef10d247014b84f42c7884b8caf4f93 | []
| permissive | Cosmos-Break/leetcode | bf056efb6f3eb6448df7fb3fc4869992a3e7eb48 | 9f5f3d24e35b0a482ed40594ea665e9068324dcc | refs/heads/main | 2023-06-26T04:29:25.135826 | 2021-07-19T12:29:29 | 2021-07-19T12:29:29 | 293,397,157 | 0 | 0 | MIT | 2020-09-07T01:55:39 | 2020-09-07T01:55:38 | null | UTF-8 | Python | false | false | 427 | py | #
# @lc app=leetcode.cn id=1154 lang=python3
#
# [1154] 一年中的第几天
#
# @lc code=start
class Solution:
def dayOfYear(self, data: str) -> int:
year = int(data[0:4])
month = int(data[5:7])
day = int(data[8:])
dic = [31,28,31,30,31,30,31,31,30,31,30,31]
if year%400==0 or year%4==0 and year%100!=0:
dic[1]=29
return sum(dic[:month-1])+day
# @lc code=end
| [
"[email protected]"
]
| |
601163c7d28fcfe7bcc3af912c65849c60ba7f67 | 4e7db10524c938c8c6e687521def2889e20ec646 | /P6/list3.1.py | 4faa6a4724c1f365462ad94093c9c7a93b2d057b | []
| no_license | mpigrobot/python | e5cf60ca438e0d5e63a1e87a266a9e255bc07271 | bf9262657a7401f37de38318db768e630fab97a9 | refs/heads/master | 2020-03-15T15:37:03.228080 | 2018-03-31T07:33:07 | 2018-03-31T07:33:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 78 | py | L='ABCDEFG'[:3]
print L
X='ABCDEFG'[-3:]
print X
Y='ABCDEFG'[::2]
print Y | [
"[email protected]"
]
| |
15c60558a5d48ed336761321bdefd509bf9ccd07 | 3185dc605853fdaf942fd06e206225793b198638 | /剑指offer/No20_表示数值的字符串.py | a60b824774b72020c1d40e4aef19394be63143f8 | []
| no_license | fank-cd/books_exercise_code | cb81ee8ec8167a5f5e3bfc58d3c1d6d931ca9286 | 1e8109adb82f741df1203658d4bf272f09a651b8 | refs/heads/master | 2021-07-11T01:15:11.980179 | 2020-06-29T04:01:53 | 2020-06-29T04:01:53 | 156,671,075 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,444 | py | # 面试题20:表示数值的字符串
# 题目:请实现一个函数用来判断字符串是否表示数值(包括整数和小数)。
# 例如,字符串"+100"、"5e2"、"-123"、"3.1416"、及"-1E-16"都表示
# 数值,但"12E"、"1a3.14"、"1.2.3"、"+-5"及"12e+5.4"都不是。
# # 读不懂题意,留下代码,暂时空着
def is_numeric(string):
if not isinstance(string, str):
return False
index = 0
result, index = scan_integer(string, index)
if index < len(string) and string[index] == '.':
index += 1
has_float, index = scan_unsigned_integer(string, index)
result = result or has_float
if index < len(string) and string[index] in ('e', 'E'):
index += 1
has_exp, index = scan_integer(string, index)
result = result and has_exp
return result and index == len(string)
def scan_integer(string, index):
if index < len(string) and string[index] in ('-', '+'):
index += 1
return scan_unsigned_integer(string, index)
def scan_unsigned_integer(string, index):
old_index = index
while index < len(string) and string[index] in '0123456789':
index += 1
return (old_index != index), index
if __name__ == "__main__":
print(is_numeric("+100"))
print(is_numeric("5e2"))
print(is_numeric("-200"))
print(is_numeric("3.1415926"))
print(is_numeric("1.34e-2"))
print(is_numeric("1.34e"))
| [
"[email protected]"
]
| |
35c16b5dd609e24fbc243144ddcb65eef3a54569 | 71aea3429ecb5b4ccf415078809654b6e97c2cb6 | /server/config.py | f91344f2181cace25b677f057fdaf6951c423276 | [
"MIT"
]
| permissive | Nukesor/spacesurvival | dcbb8f0441c23367cd4c32beb260e336d8de06a7 | 1b02f2027f172ebbbf4f944641b7f0b5d0b5bb92 | refs/heads/master | 2021-01-19T09:27:03.809556 | 2017-12-04T13:03:17 | 2017-12-04T13:03:17 | 82,110,806 | 2 | 0 | null | 2017-11-20T13:16:30 | 2017-02-15T21:54:37 | Rust | UTF-8 | Python | false | false | 1,274 | py | """Various configs for different environments."""
from datetime import timedelta
class BaseConfig:
"""Base config."""
DEBUG = False
SECRET_KEY = 'lolololol'
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_DATABASE_URI = 'postgres://localhost/browsergame'
AUTH_TOKEN_TIMEOUT = timedelta(days=365)
MAIL_PORT = 465
MAIL_USE_SSL = True
MAIL_USERNAME = 'username'
MAIL_PASSWORD = 'password'
PASSLIB_SCHEMES = ["argon2"]
SECURITY_CONFIRMABLE = True
SECURITY_TRACKABLE = True
MODULE_FILE_PATH = "server/data/module_data.json"
RESEARCH_FILE_PATH = "server/data/research_data.json"
CORS_ALLOW_ORIGIN = ''
CORS_ALLOW_METHODS = ''
CORS_ALLOW_HEADERS = ''
class DevConfig(BaseConfig):
"""Develop config."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'postgres://localhost/browsergame-dev'
class TestConfig(BaseConfig):
"""Testing config."""
SQLALCHEMY_DATABASE_URI = 'postgres://localhost/browsergame-test'
class ProdConfig(BaseConfig):
"""Production config."""
SQLALCHEMY_DATABASE_URI = 'postgres://localhost/browsergame'
AUTH_TOKEN_TIMEOUT = 30 * 12 * 30 * 24 * 3600
configs = {
'develop': DevConfig,
'testing': TestConfig,
'production': ProdConfig,
}
| [
"[email protected]"
]
| |
94f7bb0c107ba916893a8ac8be11f4eaab3b3588 | f1738cd603e0b2e31143f4ebf7eba403402aecd6 | /ucs/base/univention-updater/conffiles/15_ucs-online-version.py | 69852d3acc488fb8ccf3b4f613225d51383ef948 | []
| no_license | m-narayan/smart | 92f42bf90d7d2b24f61915fac8abab70dd8282bc | 1a6765deafd8679079b64dcc35f91933d37cf2dd | refs/heads/master | 2016-08-05T17:29:30.847382 | 2013-01-04T04:50:26 | 2013-01-04T04:50:26 | 7,079,786 | 8 | 6 | null | 2015-04-29T08:54:12 | 2012-12-09T14:56:27 | Python | UTF-8 | Python | false | false | 1,777 | py | # Copyright (C) 2011-2012 Univention GmbH
#
# http://www.univention.de/
#
# All rights reserved.
#
# The source code of this program is made available
# under the terms of the GNU Affero General Public License version 3
# (GNU AGPL V3) as published by the Free Software Foundation.
#
# Binary versions of this program provided by Univention to you as
# well as other copyrighted, protected or trademarked materials like
# Logos, graphics, fonts, specific documentations and configurations,
# cryptographic keys etc. are subject to a license agreement between
# you and Univention and not subject to the GNU AGPL V3.
#
# In the case you use this program under the terms of the GNU AGPL V3,
# the program is provided in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License with the Debian GNU/Linux or Univention distribution in file
# /usr/share/common-licenses/AGPL-3; if not, see
# <http://www.gnu.org/licenses/>.
import os, shutil
FILE_NAME='/etc/apt/sources.list.d/15_ucs-online-version.list'
def preinst(baseConfig, changes):
if os.path.exists('%s.old' % FILE_NAME):
os.remove('%s.old' % FILE_NAME)
if os.path.exists(FILE_NAME):
shutil.copyfile('%s' % FILE_NAME, '%s.old' % FILE_NAME)
def postinst(baseConfig, changes):
if os.path.exists(FILE_NAME):
res=open(FILE_NAME, 'r').readlines()
if len(res) <= 1:
os.remove(FILE_NAME)
if os.path.exists('%s.old' % FILE_NAME):
shutil.copyfile('%s.old' % FILE_NAME, '%s' % FILE_NAME)
if os.path.exists('%s.old' % FILE_NAME):
os.remove('%s.old' % FILE_NAME)
pass
| [
"[email protected]"
]
| |
a17bcec1354f60b3df6a6f22d277fb5f3cb5e399 | 305e473c17f47b815668377bc90e13642510aace | /punch_version.py | 7e73a53c562eed61ec708d5241c654d92f5179d0 | [
"MIT"
]
| permissive | xrmx/mau | 5d6f66811b1356331c98547cc7c778ff3a04d6ff | 0aafa67a1b6f02eda72fe60ea2775454c3ad0866 | refs/heads/main | 2023-03-21T06:22:14.447284 | 2021-03-11T10:58:05 | 2021-03-11T10:58:05 | 347,347,655 | 0 | 0 | MIT | 2021-03-13T11:08:21 | 2021-03-13T11:08:20 | null | UTF-8 | Python | false | false | 30 | py | major = 1
minor = 3
patch = 0
| [
"[email protected]"
]
| |
ac1f4677532bd69943d43bfac731b473a9f32705 | 41e2cf24f0ff3a11a98bb00e03c598dde35452c4 | /project/migrations/0009_googleapisetup.py | f2a0baa279dd151205113e1a9a0a64bb2a0691f5 | []
| no_license | anushamokashi/mob | f5dbedc729073092f94323feca6d95dee24087a2 | 37bc0eb033bc23d37e9d4fb9bb8b2b456553ff7f | refs/heads/master | 2020-04-24T08:36:56.008212 | 2019-02-21T09:09:04 | 2019-02-21T09:09:04 | 171,810,613 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,355 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-08-31 10:27
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import django_extensions.db.fields
class Migration(migrations.Migration):
dependencies = [
('project', '0008_project_table_append_by_underscore'),
]
operations = [
migrations.CreateModel(
name='GoogleAPISetup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')),
('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')),
('apikey', models.CharField(blank=True, max_length=200, null=True)),
('clientid', models.CharField(blank=True, max_length=200, null=True)),
('project_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='project.Project')),
],
options={
'ordering': ('-modified', '-created'),
'abstract': False,
'get_latest_by': 'modified',
},
),
]
| [
"[email protected]"
]
| |
ad02f8785f62b23517182467691e772ea5ff368c | 981fbe20320ce16e5746c3d492545fbd30bcef02 | /screen_cap/http_screen_cap.py | 1e3c46dd41dba62f9d329daa7ebf9789613794af | []
| no_license | jinjin123/zabbix-api | f73e32c3433356c19df623066d457f5d7e0709e6 | 471116d0dcd5074b1047d4065c87e7f32c9aa9ff | refs/heads/master | 2021-01-25T06:45:16.371094 | 2017-07-26T12:23:39 | 2017-07-26T12:23:39 | 93,605,146 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,641 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# task argument to get the mapping grapth
#hostid and graphid its need , 每个host的id都不一样 ,从hostid 往下级拿graphid hostid=xx&graphid=xx&
import json, traceback
import datetime
import cookielib, urllib2,urllib
import time
class ZabbixGraph():
def __init__(self,url="http://172.16.102.128:81/index.php",name="admin",password="zabbix"):
self.url=url
self.name=name
self.passwd=password
#初始化的时候生成cookies
cookiejar = cookielib.CookieJar()
urlOpener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookiejar))
values = {"name":self.name,'password':self.passwd,'autologin':1,"enter":'Sign in'}
data = urllib.urlencode(values)
request = urllib2.Request(url, data)
try:
urlOpener.open(request,timeout=10)
self.urlOpener=urlOpener
except urllib2.HTTPError, e:
print e
def GetGraph(self,url="http://172.16.102.128:81/chart2.php",values={'width': 800, 'height': 200, 'hostid': '', 'graphid': '', 'stime': time.strftime('%Y%m%d%H%M%S', time.localtime(time.time())), 'period': 3600},image_dir="/home/azureuser"):
data=urllib.urlencode(values)
request = urllib2.Request(url,data)
url = self.urlOpener.open(request)
image = url.read()
imagename="%s/%s_%s_%s.jpg" % (image_dir, values["hostid"], values["graphid"], values["stime"])
#imagename="%s/%s_%s.jpg" % (image_dir, values["graphid"], values["stime"])
f=open(imagename,'wb')
f.write(image)
return '1'
if __name__ == "__main__":
#hostid = ['10107','10108','10109','10110','10111','10112']
hostid = ['10107','10108']
#graphidm = ['594','566','566','594','601','608']
graphidm = ['594','566']
graphidd = ['624','643']
#graphidd = ['624','643','','','','','']
graph = ZabbixGraph()
stime = time.strftime('%Y%m%d%H%M%S', time.localtime(time.time()))
values = {'width': 800, 'height': 200, 'hostid': '10107', 'graphid': '594', 'stime': stime, 'period': 300}
graph.GetGraph("http://172.16.102.128:81/chart2.php",values,"/root/screen")
#for h in hostid:
# for m in graphidm:
# values = {'width': 800, 'height': 200, 'hostid': h, 'graphid': m, 'stime': stime, 'period': 300}
# graph.GetGraph("http://172.16.102.128:81/chart2.php",values,"/root/screen")
#for d in graphidd:
# values = {'width': 800, 'height': 200, 'hostid': h, 'graphid': d, 'stime': stime, 'period': 300}
# graph.GetGraph("http://172.16.102.128:81/chart2.php",values,"/root/screen")
| [
"[email protected]"
]
| |
bf42f669890aa2afb5de8d642415984eadf63771 | 60a6ba6e5f3faca2b1e17c1e90917efc3cfc561a | /aoc2018/day7/day7_part2.py | 675c2a0599f50e1f486089a078f71bc1a088a2c2 | [
"MIT"
]
| permissive | GetPastTheMonkey/advent-of-code | f462f5e2b72d913e39484446ce92a043d455091c | 7a5ee30dbafaf8ef6f9bf9936e484efd024aa308 | refs/heads/master | 2023-01-14T09:45:00.553575 | 2022-12-25T10:59:19 | 2022-12-25T13:00:44 | 160,684,715 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,782 | py | from os.path import join, dirname, realpath
from re import match
# Specify number of workers
worker_count = 5
workers = [{
"task": None,
"remaining": 0
} for _ in range(worker_count)]
# Load file
tasks = dict()
for i in range(ord("A"), ord("Z")+1):
tasks[chr(i)] = dict()
tasks[chr(i)]["requirements"] = []
tasks[chr(i)]["duration"] = 60 + (i - 64) # 60 + position of character in alphabet -> A = 60+1, B = 60+2, ...
tasks[chr(i)]["has_worker"] = False
with open(join(dirname(realpath(__file__)), "input.txt")) as f:
for line in f:
m = match("^Step (?P<req>[A-Z]) must be finished before step (?P<step>[A-Z]) can begin\.$", line)
step = m.group("step")
reqs = m.group("req")
tasks[step]["requirements"].append(reqs)
def find_empty_tasks(req):
empty_list = []
for key, data in req.items():
if not data["requirements"] and not data["has_worker"]:
empty_list.append(key)
empty_list.sort()
return empty_list
def distribute_work(req, w):
empty_tasks = find_empty_tasks(req)
if empty_tasks:
print("[ITERATION {}] - Tasks with empty requirements: {}".format(iterations, empty_tasks))
for worker in w:
# If the worker is idle and there is still an empty task, then work on it
if worker["task"] is None and len(empty_tasks) > 0:
t = empty_tasks.pop(0)
worker["task"] = t
worker["remaining"] = req[t]["duration"]
req[t]["has_worker"] = True
return req, w
def do_work(w):
for worker in w:
if worker["task"] is not None:
worker["remaining"] -= 1
def remove_finished_tasks(req, w):
removed_tasks = []
# Loop through workers and remove finished tasks
for worker in w:
if worker["task"] is not None and worker["remaining"] == 0:
# Remove task from req dict
print("[ITERATION {}] - Finished task {}".format(iterations, worker["task"]))
req.pop(worker["task"])
removed_tasks.append(worker["task"])
worker["task"] = None
# Create new task dict
new_tasks = dict()
for key, value in req.items():
new_tasks[key] = {
"requirements": [],
"duration": value["duration"],
"has_worker": value["has_worker"]
}
for r in value["requirements"]:
if r not in removed_tasks:
new_tasks[key]["requirements"].append(r)
return new_tasks, w
iterations = 0
while tasks:
tasks, workers = distribute_work(tasks, workers)
do_work(workers)
iterations += 1
tasks, workers = remove_finished_tasks(tasks, workers)
print("Finished after {} iterations (with {} workers)".format(iterations, worker_count))
| [
"[email protected]"
]
| |
f5d4cf6f485d762c5643ead19f6f44edcc5d2d96 | 0485a490f466bd1d02eaae96d277888781208c0e | /tests/single_instruction_translation_validation/mcsema/register-variants/movb_r8_rh/Output/test-z3.py | e85a3ad441dea371dd1ab92ebdf22d518b6ae522 | [
"LicenseRef-scancode-unknown-license-reference",
"NCSA"
]
| permissive | Mthandazo42/validating-binary-decompilation | c0e2d54cd79e609bfa35802975bddfa52e646fad | c0fcd6f099e38195dcbbac9e8c13a825865c5cb5 | refs/heads/master | 2022-11-11T13:18:13.033044 | 2020-06-25T05:49:01 | 2020-06-25T05:49:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,269 | py | #############################################
######## Auto Generated Proof Scripts #######
#############################################
import z3
import sys
status=True
test_name="UnK"
if(len(sys.argv) > 1):
test_name = sys.argv[1]
def solve(msg, lvar, xvar, s):
global status
s.set("timeout", 60000)
res = s.check()
if(z3.unknown == res):
print(test_name + "::" + msg + "::unk")
status = "Unknown"
if(z3.sat == res):
if("UNDEF" in xvar.sexpr()):
print(test_name + "::" + msg + "::undef-sat")
else:
m = s.model()
print(test_name + "::" + msg + "::sat")
print("\n")
print("query", s)
print("\n")
print("model", m)
print("\n")
print("xvar =", m.evaluate(xvar))
print("lvar =", m.evaluate(lvar))
print("\n")
status = False
##############################
## X86 specific variables ####
##############################
### GPRs
VX_RAX = z3.BitVec('VX_RAX',64)
VX_RBX = z3.BitVec('VX_RBX',64)
VX_RCX = z3.BitVec('VX_RCX',64)
VX_RDX = z3.BitVec('VX_RDX',64)
VX_RSI = z3.BitVec('VX_RSI',64)
VX_RDI = z3.BitVec('VX_RDI',64)
### Flags
VX_CF = z3.BitVec('VX_CF',1)
VX_PF = z3.BitVec('VX_PF',1)
VX_ZF = z3.BitVec('VX_ZF',1)
VX_SF = z3.BitVec('VX_SF',1)
VX_AF = z3.BitVec('VX_AF',1)
VX_OF = z3.BitVec('VX_OF',1)
### YMM Registers
VX_YMM1 = z3.BitVec('VX_YMM1', 256)
VX_YMM2 = z3.BitVec('VX_YMM2', 256)
## Undef
VX_UNDEF_1 = z3.BitVec('VX_UNDEF_1', 1)
VX_UNDEF_BOOL = z3.Bool('VX_UNDEF_BOOL')
##############################
## X86 specific variables ####
##############################
### GPRs
VL_RAX = z3.BitVec('VL_RAX',64)
VL_RBX = z3.BitVec('VL_RBX',64)
VL_RCX = z3.BitVec('VL_RCX',64)
VL_RDX = z3.BitVec('VL_RDX',64)
VL_RSI = z3.BitVec('VL_RSI',64)
VL_RDI = z3.BitVec('VL_RDI',64)
### Flags
VL_CF = z3.BitVec('VL_CF',8)
VL_PF = z3.BitVec('VL_PF',8)
VL_ZF = z3.BitVec('VL_ZF',8)
VL_SF = z3.BitVec('VL_SF',8)
VL_AF = z3.BitVec('VL_AF',8)
VL_OF = z3.BitVec('VL_OF',8)
### YMM Registers
VL_YMM1_0 = z3.BitVec('VL_YMM1_0', 64)
VL_YMM1_1 = z3.BitVec('VL_YMM1_1', 64)
VL_YMM1_2 = z3.BitVec('VL_YMM1_2', 64)
VL_YMM1_3 = z3.BitVec('VL_YMM1_3', 64)
VL_YMM2_0 = z3.BitVec('VL_YMM2_0', 64)
VL_YMM2_1 = z3.BitVec('VL_YMM2_1', 64)
VL_YMM2_2 = z3.BitVec('VL_YMM2_2', 64)
VL_YMM2_3 = z3.BitVec('VL_YMM2_3', 64)
##############################
## Proof variables ###########
##############################
V_R = z3.BitVec('V_R',64)
V_F = z3.BitVec('V_F',1)
V_Y = z3.BitVec('V_Y',256)
## Solver instance
s = z3.Solver()
##############################
## Default constraints #######
##############################
### GPRs
s.add(VX_RAX == VL_RAX)
s.add(VX_RBX == VL_RBX)
s.add(VX_RCX == VL_RCX)
s.add(VX_RDX == VL_RDX)
s.add(VX_RDI == VL_RDI)
s.add(VX_RSI == VL_RSI)
### Flags
s.add(z3.Or(VL_CF == 0, VL_CF == 1))
s.add(z3.Or(VL_ZF == 0, VL_ZF == 1))
s.add(z3.Or(VL_PF == 0, VL_PF == 1))
s.add(z3.Or(VL_SF == 0, VL_SF == 1))
s.add(z3.Or(VL_AF == 0, VL_AF == 1))
s.add(z3.Or(VL_OF == 0, VL_OF == 1))
s.add(z3.Extract(0,0, VL_CF) == VX_CF)
s.add(z3.Extract(0,0, VL_SF) == VX_SF)
s.add(z3.Extract(0,0, VL_ZF) == VX_ZF)
s.add(z3.Extract(0,0, VL_PF) == VX_PF)
s.add(z3.Extract(0,0, VL_AF) == VX_AF)
s.add(z3.Extract(0,0, VL_OF) == VX_OF)
### Ymms
s.add(z3.Concat(VL_YMM1_3, VL_YMM1_2, VL_YMM1_1, VL_YMM1_0) == VX_YMM1)
s.add(z3.Concat(VL_YMM2_3, VL_YMM2_2, VL_YMM2_1, VL_YMM2_0) == VX_YMM2)
## =******= AF =******=
s.push()
lvar = (V_F == z3.Extract(0, 0, z3.Extract(7, 0, VL_AF)))
xvar = (V_F == VX_AF)
s.add(lvar != xvar)
solve("AF", lvar, xvar, s)
s.pop()
## =******= CF =******=
s.push()
lvar = (V_F == z3.Extract(0, 0, z3.Extract(7, 0, VL_CF)))
xvar = (V_F == VX_CF)
s.add(lvar != xvar)
solve("CF", lvar, xvar, s)
s.pop()
## =******= OF =******=
s.push()
lvar = (V_F == z3.Extract(0, 0, z3.Extract(7, 0, VL_OF)))
xvar = (V_F == VX_OF)
s.add(lvar != xvar)
solve("OF", lvar, xvar, s)
s.pop()
## =******= PF =******=
s.push()
lvar = (V_F == z3.Extract(0, 0, z3.Extract(7, 0, VL_PF)))
xvar = (V_F == VX_PF)
s.add(lvar != xvar)
solve("PF", lvar, xvar, s)
s.pop()
## =******= RAX =******=
s.push()
lvar = (V_R == z3.Concat(z3.Extract(63, 56, VL_RAX), z3.Extract(55, 48, VL_RAX), z3.Extract(47, 40, VL_RAX), z3.Extract(39, 32, VL_RAX), z3.Extract(31, 24, VL_RAX), z3.Extract(23, 16, VL_RAX), z3.Extract(15, 8, VL_RAX), z3.Extract(7, 0, VL_RAX)))
xvar = (V_R == VX_RAX)
s.add(lvar != xvar)
solve("RAX", lvar, xvar, s)
s.pop()
## =******= RBX =******=
s.push()
lvar = (V_R == z3.Concat(z3.Extract(63, 56, VL_RBX), z3.Extract(55, 48, VL_RBX), z3.Extract(47, 40, VL_RBX), z3.Extract(39, 32, VL_RBX), z3.Extract(31, 24, VL_RBX), z3.Extract(23, 16, VL_RBX), z3.Extract(15, 8, VL_RBX), z3.Extract(7, 0, (z3.Concat(z3.BitVecVal(0, 56), z3.Extract(15, 8, VL_RAX)) & z3.BitVecVal(256 - 1, 64)))))
xvar = (V_R == z3.Concat(z3.Extract(63, 8, VX_RBX), z3.Extract(15, 8, VX_RAX)))
s.add(lvar != xvar)
solve("RBX", lvar, xvar, s)
s.pop()
## =******= RCX =******=
s.push()
lvar = (V_R == z3.Concat(z3.Extract(63, 56, VL_RCX), z3.Extract(55, 48, VL_RCX), z3.Extract(47, 40, VL_RCX), z3.Extract(39, 32, VL_RCX), z3.Extract(31, 24, VL_RCX), z3.Extract(23, 16, VL_RCX), z3.Extract(15, 8, VL_RCX), z3.Extract(7, 0, VL_RCX)))
xvar = (V_R == VX_RCX)
s.add(lvar != xvar)
solve("RCX", lvar, xvar, s)
s.pop()
## =******= RDX =******=
s.push()
lvar = (V_R == z3.Concat(z3.Extract(63, 56, VL_RDX), z3.Extract(55, 48, VL_RDX), z3.Extract(47, 40, VL_RDX), z3.Extract(39, 32, VL_RDX), z3.Extract(31, 24, VL_RDX), z3.Extract(23, 16, VL_RDX), z3.Extract(15, 8, VL_RDX), z3.Extract(7, 0, VL_RDX)))
xvar = (V_R == VX_RDX)
s.add(lvar != xvar)
solve("RDX", lvar, xvar, s)
s.pop()
## =******= SF =******=
s.push()
lvar = (V_F == z3.Extract(0, 0, z3.Extract(7, 0, VL_SF)))
xvar = (V_F == VX_SF)
s.add(lvar != xvar)
solve("SF", lvar, xvar, s)
s.pop()
## =******= ZF =******=
s.push()
lvar = (V_F == z3.Extract(0, 0, z3.Extract(7, 0, VL_ZF)))
xvar = (V_F == VX_ZF)
s.add(lvar != xvar)
solve("ZF", lvar, xvar, s)
s.pop()
if(status == True):
print('[6;30;42m' + 'Test-Pass: ' + '[0m' + test_name)
else:
if(status == False):
print('[0;30;41m' + 'Test-Fail: ' + '[0m' + test_name)
else:
print('[6;30;47m' + 'Test-Unk: ' + '[0m' + test_name)
| [
"[email protected]"
]
| |
0ccb62474a0317f86dfe9138ec3b8c5878be2948 | fb00b570251ba52df467e4cc030a30e778f8a970 | /Atividade 02 - semana 04/questão4_semana4_atividade02_runcodes.py | a3048ea0063d9e885ce77e9effdf8b688eb5e1ef | []
| no_license | SirLeonardoFerreira/Atividades-ifpi | 7379f9df4640fd1ee3623d80e4341f495e855895 | e366ee3f801dc9a1876c7399a2eefd37a03d0a55 | refs/heads/master | 2023-01-05T04:03:30.774277 | 2020-11-02T00:56:10 | 2020-11-02T00:56:10 | 287,967,575 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,426 | py | def signo(dia, mes):
if (21 <= dia <= 31 and mes == 3) or (1 <= dia <= 19 and mes == 4):
return 'Áries'
elif (20 <= dia <= 30 and mes == 4) or (1 <= dia <= 20 and mes == 5):
return 'Touro'
elif (21 <= dia <= 31 and mes == 5) or (1 <= dia <= 21 and mes == 6):
return 'Gêmeos'
elif (22 <= dia <= 30 and mes == 6) or (1 <= dia <= 22 and mes == 7):
return 'Câncer'
elif (23 <= dia <= 31 and mes == 7) or (1 <= dia <= 22 and mes == 8):
return 'Leão'
elif (23 <= dia <= 31 and mes == 8) or (1 <= dia <= 22 and mes == 9):
return 'Virgem'
elif (23 <= dia <= 30 and mes == 9) or (1 <= dia <= 22 and mes == 10):
return 'Libra'
elif (23 <= dia <= 31 and mes == 10) or (1 <= dia <= 21 and mes == 11):
return 'Escorpião'
elif (22 <= dia <= 30 and mes == 11) or (1 <= dia <= 21 and mes == 12):
return 'Sagitário'
elif (22 <= dia <= 31 and mes == 12) or (1 <= dia <= 19 and mes == 1):
return 'Capricórnio'
elif (20 <= dia <= 31 and mes == 1) or (1 <= dia <= 18 and mes == 2):
return 'Aquário'
elif (19 <= dia <= 29 and mes == 2) or (1 <= dia <= 20 and mes == 3):
return 'Peixes'
def main():
dia_nascimento = int(input())
mes_nascimento = int(input())
mensagem_signo = signo(dia_nascimento, mes_nascimento)
print(f'{mensagem_signo}')
if __name__=='__main__':
main()
| [
"[email protected]"
]
| |
6adaf26c83041f163d6f9002d77e24deeb133c0f | 30ea9abff7438755bfc8a483ae843152d3e49b9b | /力扣习题/118杨辉三角/pascalstriangle.py | 28285769718b8d071b795a07cd59ee1e588a6057 | [
"MIT"
]
| permissive | houcy/AlgorithmLearning | 2dee945a4f9fefc981020c365664bcd65e5994c4 | 92e3dd6ae8d27cd8fb1a3a7035b2f7e0eb86a7dc | refs/heads/master | 2022-12-25T19:55:51.323740 | 2020-10-09T04:24:11 | 2020-10-09T04:24:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,049 | py | class Solution:
'''
非递归算法
'''
def generate(self, numRows: int) -> [[]]:
res = []
if numRows == 1:
res.append([1])
elif numRows > 1:
# res.append([1])
for k in range(0, numRows):
item = []
for i in range(0, k + 1):
if i == 0 or i == k:
item.append(1)
else:
item.append(res[-1][i - 1] + res[-1][i])
res.append(item[:])
item.clear()
return res
s = Solution()
print(s.generate(5))
class Solution2:
'''
递归算法
'''
def generate(self, numRows: int) -> [[]]:
if numRows == 0:
return []
elif numRows == 1:
return [[1]]
else:
item = []
res = self.generate(numRows - 1)
item.append(1)
for i in range(1, numRows - 1):
item.append(res[-1][i - 1] + res[-1][i])
item.append(1)
res.append(item)
return res
class Solution3:
'''
满脑子骚操作
'''
def generate(self, numRows: int) -> [[]]:
res = [
[1],
[1,1],
[1,2,1],
[1,3,3,1],
[1,4,6,4,1],
[1,5,10,10,5,1],
[1,6,15,20,15,6,1],
[1,7,21,35,35,21,7,1],
[1,8,28,56,70,56,28,8,1],
[1,9,36,84,126,126,84,36,9,1],
[1,10,45,120,210,252,210,120,45,10,1],
[1,11,55,165,330,462,462,330,165,55,11,1],
[1,12,66,220,495,792,924,792,495,220,66,12,1],
[1,13,78,286,715,1287,1716,1716,1287,715,286,78,13,1],
[1,14,91,364,1001,2002,3003,3432,3003,2002,1001,364,91,14,1],
[1,15,105,455,1365,3003,5005,6435,6435,5005,3003,1365,455,105,15,1],
[1,16,120,560,1820,4368,8008,11440,12870,11440,8008,4368,1820,560,120,16,1],
[1,17,136,680,2380,6188,12376,19448,24310,24310,19448,12376,6188,2380,680,136,17,1],
[1,18,153,816,3060,8568,18564,31824,43758,48620,43758,31824,18564,8568,3060,816,153,18,1],
[1,19,171,969,3876,11628,27132,50388,75582,92378,92378,75582,50388,27132,11628,3876,969,171,19,1],
[1,20,190,1140,4845,15504,38760,77520,125970,167960,184756,167960,125970,77520,38760,15504,4845,1140,190,20,1],
[1,21,210,1330,5985,20349,54264,116280,203490,293930,352716,352716,293930,203490,116280,54264,20349,5985,1330,210,21,1],
[1,22,231,1540,7315,26334,74613,170544,319770,497420,646646,705432,646646,497420,319770,170544,74613,26334,7315,1540,231,22,1],
[1,23,253,1771,8855,33649,100947,245157,490314,817190,1144066,1352078,1352078,1144066,817190,490314,245157,100947,33649,8855,1771,253,23,1],
[1,24,276,2024,10626,42504,134596,346104,735471,1307504,1961256,2496144,2704156,2496144,1961256,1307504,735471,346104,134596,42504,10626,2024,276,24,1],
[1,25,300,2300,12650,53130,177100,480700,1081575,2042975,3268760,4457400,5200300,5200300,4457400,3268760,2042975,1081575,480700,177100,53130,12650,2300,300,25,1],
[1,26,325,2600,14950,65780,230230,657800,1562275,3124550,5311735,7726160,9657700,10400600,9657700,7726160,5311735,3124550,1562275,657800,230230,65780,14950,2600,325,26,1],
[1,27,351,2925,17550,80730,296010,888030,2220075,4686825,8436285,13037895,17383860,20058300,20058300,17383860,13037895,8436285,4686825,2220075,888030,296010,80730,17550,2925,351,27,1],
[1,28,378,3276,20475,98280,376740,1184040,3108105,6906900,13123110,21474180,30421755,37442160,40116600,37442160,30421755,21474180,13123110,6906900,3108105,1184040,376740,98280,20475,3276,378,28,1],
[1,29,406,3654,23751,118755,475020,1560780,4292145,10015005,20030010,34597290,51895935,67863915,77558760,77558760,67863915,51895935,34597290,20030010,10015005,4292145,1560780,475020,118755,23751,3654,406,29,1]
]
return res[0:numRows] | [
"[email protected]"
]
| |
825accd3872929d9287bb3b4c66b0585d16507fe | 350db570521d3fc43f07df645addb9d6e648c17e | /1299_Replace_Elements_with_Greatest_Element_on_Right_Side/solution.py | c1d77900854ca9a59cc3073bb3f87162f7eb586d | []
| no_license | benjaminhuanghuang/ben-leetcode | 2efcc9185459a1dd881c6e2ded96c42c5715560a | a2cd0dc5e098080df87c4fb57d16877d21ca47a3 | refs/heads/master | 2022-12-10T02:30:06.744566 | 2022-11-27T04:06:52 | 2022-11-27T04:06:52 | 236,252,145 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 269 | py | '''
1299. Replace Elements with Greatest Element on Right Side
Level: Easy
https://leetcode.com/problems/replace-elements-with-greatest-element-on-right-side
'''
'''
Solution:
'''
class Solution:
def replaceElements(self, arr: List[int]) -> List[int]:
| [
"[email protected]"
]
| |
c4004c31f6f741fa0ea0b2920df0bf29178c8391 | 1c6283303ceb883add8de4ee07c5ffcfc2e93fab | /Jinja2/lib/python3.7/site-packages/uhd_restpy/testplatform/sessions/ixnetwork/globals/protocolstack/egtpglobalsbase/egtpglobalsbase.py | 7fa4e1ff57e59ad6470ff3f0e36395a91ba0bda3 | []
| no_license | pdobrinskiy/devcore | 0f5b3dfc2f3bf1e44abd716f008a01c443e14f18 | 580c7df6f5db8c118990cf01bc2b986285b9718b | refs/heads/main | 2023-07-29T20:28:49.035475 | 2021-09-14T10:02:16 | 2021-09-14T10:02:16 | 405,919,390 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,711 | py | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from uhd_restpy.base import Base
from uhd_restpy.files import Files
from typing import List, Any, Union
class EgtpGlobalsBase(Base):
"""
The EgtpGlobalsBase class encapsulates a list of egtpGlobalsBase resources that are managed by the user.
A list of resources can be retrieved from the server using the EgtpGlobalsBase.find() method.
The list can be managed by using the EgtpGlobalsBase.add() and EgtpGlobalsBase.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'egtpGlobalsBase'
_SDM_ATT_MAP = {
'EnableDynamicQosCtrl': 'enableDynamicQosCtrl',
'EnableGatewayArp': 'enableGatewayArp',
'GatewayArpRequestRate': 'gatewayArpRequestRate',
'MaxMbrUAndD': 'maxMbrUAndD',
'MaxOutstandingGatewayArpRequests': 'maxOutstandingGatewayArpRequests',
'MaxOutstandingReleases': 'maxOutstandingReleases',
'MaxOutstandingRequests': 'maxOutstandingRequests',
'ObjectId': 'objectId',
'SendOneArpFromEachInterface': 'sendOneArpFromEachInterface',
'SetupRateInitial': 'setupRateInitial',
'TeardownRateInitial': 'teardownRateInitial',
'TsSpec': 'tsSpec',
'UseMaxRatesForDcp': 'useMaxRatesForDcp',
}
_SDM_ENUM_MAP = {
}
def __init__(self, parent, list_op=False):
super(EgtpGlobalsBase, self).__init__(parent, list_op)
@property
def EnableDynamicQosCtrl(self):
# type: () -> bool
"""
Returns
-------
- bool: Enable Dynamic QoS Enforcement
"""
return self._get_attribute(self._SDM_ATT_MAP['EnableDynamicQosCtrl'])
@EnableDynamicQosCtrl.setter
def EnableDynamicQosCtrl(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['EnableDynamicQosCtrl'], value)
@property
def EnableGatewayArp(self):
# type: () -> bool
"""
Returns
-------
- bool: When enabled, every IP address will ARP the specified gateway.
"""
return self._get_attribute(self._SDM_ATT_MAP['EnableGatewayArp'])
@EnableGatewayArp.setter
def EnableGatewayArp(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['EnableGatewayArp'], value)
@property
def GatewayArpRequestRate(self):
# type: () -> int
"""
Returns
-------
- number: Maximum ARP request rate
"""
return self._get_attribute(self._SDM_ATT_MAP['GatewayArpRequestRate'])
@GatewayArpRequestRate.setter
def GatewayArpRequestRate(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['GatewayArpRequestRate'], value)
@property
def MaxMbrUAndD(self):
# type: () -> int
"""
Returns
-------
- number:
"""
return self._get_attribute(self._SDM_ATT_MAP['MaxMbrUAndD'])
@MaxMbrUAndD.setter
def MaxMbrUAndD(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['MaxMbrUAndD'], value)
@property
def MaxOutstandingGatewayArpRequests(self):
# type: () -> int
"""
Returns
-------
- number: Threshold at which the plugin begins throttling back the number of new ARP requests sent out.
"""
return self._get_attribute(self._SDM_ATT_MAP['MaxOutstandingGatewayArpRequests'])
@MaxOutstandingGatewayArpRequests.setter
def MaxOutstandingGatewayArpRequests(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['MaxOutstandingGatewayArpRequests'], value)
@property
def MaxOutstandingReleases(self):
# type: () -> int
"""
Returns
-------
- number:
"""
return self._get_attribute(self._SDM_ATT_MAP['MaxOutstandingReleases'])
@MaxOutstandingReleases.setter
def MaxOutstandingReleases(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['MaxOutstandingReleases'], value)
@property
def MaxOutstandingRequests(self):
# type: () -> int
"""
Returns
-------
- number:
"""
return self._get_attribute(self._SDM_ATT_MAP['MaxOutstandingRequests'])
@MaxOutstandingRequests.setter
def MaxOutstandingRequests(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['MaxOutstandingRequests'], value)
@property
def ObjectId(self):
# type: () -> str
"""
Returns
-------
- str: Unique identifier for this object
"""
return self._get_attribute(self._SDM_ATT_MAP['ObjectId'])
@property
def SendOneArpFromEachInterface(self):
# type: () -> bool
"""
Returns
-------
- bool: When set, each interface will send one ARP request.
"""
return self._get_attribute(self._SDM_ATT_MAP['SendOneArpFromEachInterface'])
@SendOneArpFromEachInterface.setter
def SendOneArpFromEachInterface(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['SendOneArpFromEachInterface'], value)
@property
def SetupRateInitial(self):
# type: () -> int
"""
Returns
-------
- number: Initial setup rate
"""
return self._get_attribute(self._SDM_ATT_MAP['SetupRateInitial'])
@SetupRateInitial.setter
def SetupRateInitial(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['SetupRateInitial'], value)
@property
def TeardownRateInitial(self):
# type: () -> int
"""
Returns
-------
- number: Initial teardown rate
"""
return self._get_attribute(self._SDM_ATT_MAP['TeardownRateInitial'])
@TeardownRateInitial.setter
def TeardownRateInitial(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP['TeardownRateInitial'], value)
@property
def TsSpec(self):
# type: () -> str
"""
Returns
-------
- str:
"""
return self._get_attribute(self._SDM_ATT_MAP['TsSpec'])
@TsSpec.setter
def TsSpec(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP['TsSpec'], value)
@property
def UseMaxRatesForDcp(self):
# type: () -> bool
"""
Returns
-------
- bool: Use default rates (DCP mode)
"""
return self._get_attribute(self._SDM_ATT_MAP['UseMaxRatesForDcp'])
@UseMaxRatesForDcp.setter
def UseMaxRatesForDcp(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP['UseMaxRatesForDcp'], value)
def update(self, EnableDynamicQosCtrl=None, EnableGatewayArp=None, GatewayArpRequestRate=None, MaxMbrUAndD=None, MaxOutstandingGatewayArpRequests=None, MaxOutstandingReleases=None, MaxOutstandingRequests=None, SendOneArpFromEachInterface=None, SetupRateInitial=None, TeardownRateInitial=None, TsSpec=None, UseMaxRatesForDcp=None):
# type: (bool, bool, int, int, int, int, int, bool, int, int, str, bool) -> EgtpGlobalsBase
"""Updates egtpGlobalsBase resource on the server.
Args
----
- EnableDynamicQosCtrl (bool): Enable Dynamic QoS Enforcement
- EnableGatewayArp (bool): When enabled, every IP address will ARP the specified gateway.
- GatewayArpRequestRate (number): Maximum ARP request rate
- MaxMbrUAndD (number):
- MaxOutstandingGatewayArpRequests (number): Threshold at which the plugin begins throttling back the number of new ARP requests sent out.
- MaxOutstandingReleases (number):
- MaxOutstandingRequests (number):
- SendOneArpFromEachInterface (bool): When set, each interface will send one ARP request.
- SetupRateInitial (number): Initial setup rate
- TeardownRateInitial (number): Initial teardown rate
- TsSpec (str):
- UseMaxRatesForDcp (bool): Use default rates (DCP mode)
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, EnableDynamicQosCtrl=None, EnableGatewayArp=None, GatewayArpRequestRate=None, MaxMbrUAndD=None, MaxOutstandingGatewayArpRequests=None, MaxOutstandingReleases=None, MaxOutstandingRequests=None, SendOneArpFromEachInterface=None, SetupRateInitial=None, TeardownRateInitial=None, TsSpec=None, UseMaxRatesForDcp=None):
# type: (bool, bool, int, int, int, int, int, bool, int, int, str, bool) -> EgtpGlobalsBase
"""Adds a new egtpGlobalsBase resource on the server and adds it to the container.
Args
----
- EnableDynamicQosCtrl (bool): Enable Dynamic QoS Enforcement
- EnableGatewayArp (bool): When enabled, every IP address will ARP the specified gateway.
- GatewayArpRequestRate (number): Maximum ARP request rate
- MaxMbrUAndD (number):
- MaxOutstandingGatewayArpRequests (number): Threshold at which the plugin begins throttling back the number of new ARP requests sent out.
- MaxOutstandingReleases (number):
- MaxOutstandingRequests (number):
- SendOneArpFromEachInterface (bool): When set, each interface will send one ARP request.
- SetupRateInitial (number): Initial setup rate
- TeardownRateInitial (number): Initial teardown rate
- TsSpec (str):
- UseMaxRatesForDcp (bool): Use default rates (DCP mode)
Returns
-------
- self: This instance with all currently retrieved egtpGlobalsBase resources using find and the newly added egtpGlobalsBase resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained egtpGlobalsBase resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, EnableDynamicQosCtrl=None, EnableGatewayArp=None, GatewayArpRequestRate=None, MaxMbrUAndD=None, MaxOutstandingGatewayArpRequests=None, MaxOutstandingReleases=None, MaxOutstandingRequests=None, ObjectId=None, SendOneArpFromEachInterface=None, SetupRateInitial=None, TeardownRateInitial=None, TsSpec=None, UseMaxRatesForDcp=None):
# type: (bool, bool, int, int, int, int, int, str, bool, int, int, str, bool) -> EgtpGlobalsBase
"""Finds and retrieves egtpGlobalsBase resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve egtpGlobalsBase resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all egtpGlobalsBase resources from the server.
Args
----
- EnableDynamicQosCtrl (bool): Enable Dynamic QoS Enforcement
- EnableGatewayArp (bool): When enabled, every IP address will ARP the specified gateway.
- GatewayArpRequestRate (number): Maximum ARP request rate
- MaxMbrUAndD (number):
- MaxOutstandingGatewayArpRequests (number): Threshold at which the plugin begins throttling back the number of new ARP requests sent out.
- MaxOutstandingReleases (number):
- MaxOutstandingRequests (number):
- ObjectId (str): Unique identifier for this object
- SendOneArpFromEachInterface (bool): When set, each interface will send one ARP request.
- SetupRateInitial (number): Initial setup rate
- TeardownRateInitial (number): Initial teardown rate
- TsSpec (str):
- UseMaxRatesForDcp (bool): Use default rates (DCP mode)
Returns
-------
- self: This instance with matching egtpGlobalsBase resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of egtpGlobalsBase data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the egtpGlobalsBase resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
| [
"[email protected]"
]
| |
ee2798634484ddb530cf7b917209d4cafc4a2673 | 6906a911f03e369569352893728275fee287680b | /manage.py | ee088666d8485d80bbf23484229d28f3d7dc11e1 | []
| no_license | crowdbotics-apps/joe-1319 | ab62638b43c303219230789c2c000e6e32377591 | 34cb120f2eac820357206348b3f281d81561ca51 | refs/heads/master | 2022-12-10T23:07:45.356298 | 2019-03-12T21:18:58 | 2019-03-12T21:18:58 | 175,297,690 | 0 | 0 | null | 2022-12-08T19:43:51 | 2019-03-12T21:17:56 | Python | UTF-8 | Python | false | false | 806 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "joe_1319.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| [
"[email protected]"
]
| |
ca5d2f735aaee931762726e44f8ffc69d56dab76 | ddd35c693194aefb9c009fe6b88c52de7fa7c444 | /Live 10.1.18/ATOM/channel_strip.py | fbd60c5b943b861e81fa7cbe0be8417f4de3f5ce | []
| no_license | notelba/midi-remote-scripts | 819372d9c22573877c7912091bd8359fdd42585d | e3ec6846470eed7da8a4d4f78562ed49dc00727b | refs/heads/main | 2022-07-30T00:18:33.296376 | 2020-10-04T00:00:12 | 2020-10-04T00:00:12 | 301,003,961 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 989 | py | # uncompyle6 version 3.7.4
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.8.5 (default, Aug 12 2020, 00:00:00)
# [GCC 10.2.1 20200723 (Red Hat 10.2.1-1)]
# Embedded file name: c:\Jenkins\live\output\Live\win_64_static\Release\python-bundle\MIDI Remote Scripts\ATOM\channel_strip.py
# Compiled at: 2020-05-05 13:23:28
from __future__ import absolute_import, print_function, unicode_literals
from ableton.v2.base import liveobj_valid
from ableton.v2.control_surface.components import ChannelStripComponent as ChannelStripComponentBase
class ChannelStripComponent(ChannelStripComponentBase):
empty_color = b'Mixer.EmptyTrack'
def _update_select_button(self):
if liveobj_valid(self._track) and self.song.view.selected_track == self._track:
self.select_button.color = b'Mixer.Selected'
else:
self.select_button.color = b'DefaultButton.Off'
# okay decompiling /home/deniz/data/projects/midiremote/Live 10.1.18/ATOM/channel_strip.pyc
| [
"[email protected]"
]
| |
13b235a66727792736ec940ae4bc3cc630a0c1fb | d44215864e30ad8039a1a294875e4222e3d23ebd | /build/geometry-hydro-devel/tf/catkin_generated/pkg.installspace.context.pc.py | dbd62ce2cd49cece7fca3f4fcc8794848494ff9a | []
| no_license | prathyusha-shine/abhiyan1.0 | 5c3eebfbbacb8b364180b9c2bd377c73cf29e693 | bf9be6462c132465ddbf8c20b1e9a4e1eabd596e | refs/heads/master | 2020-12-31T01:23:32.911145 | 2015-05-31T06:19:16 | 2015-05-31T06:19:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 540 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/sudha/catkin_ws/install/include".split(';') if "/home/sudha/catkin_ws/install/include" != "" else []
PROJECT_CATKIN_DEPENDS = "geometry_msgs;message_filters;message_runtime;roscpp;sensor_msgs;std_msgs;tf2_ros;rosconsole".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-ltf".split(';') if "-ltf" != "" else []
PROJECT_NAME = "tf"
PROJECT_SPACE_DIR = "/home/sudha/catkin_ws/install"
PROJECT_VERSION = "1.10.8"
| [
"sudha@sudha.(none)"
]
| sudha@sudha.(none) |
6d1ebf41ab4811a8adc2865d675e4b20db67c5ee | bc547e7d9e4b2c1e49edc2daaa735c9afb87f5ae | /test/test_all_fault_handlers.py | 49068eb0020cb52f7c87f865ccede508daaabba8 | [
"MIT"
]
| permissive | farisachugthai/dynamic_ipython | f7ed092ff23b785fc8c545390c581338a64b9bda | 7572a01f09998812830379644c45af4df67a3e45 | refs/heads/master | 2022-11-05T11:48:48.344585 | 2021-08-28T04:25:05 | 2021-08-28T04:25:05 | 178,786,145 | 7 | 0 | MIT | 2022-10-25T10:16:39 | 2019-04-01T04:35:37 | Python | UTF-8 | Python | false | false | 1,785 | py | #!/usr/bin/env python3
import os
import shutil
import tempfile
import unittest
from os.path import abspath, realpath, isfile, exists
import pytest
from IPython.testing.globalipapp import get_ipython
from default_profile.startup.all_fault_handlers import tempdir, in_tempdir, in_dir
def remove_tmpdir(dir):
try:
shutil.rmtree(dir)
except (NotADirectoryError, FileNotFoundError, OSError):
pass
except PermissionError:
raise
@pytest.fixture
def cwd():
return os.path.abspath(os.path.curdir)
class FixturesTest(unittest.TestCase):
def setUp(self):
# unittest's version of the tmpdir fixture
self.tmpdir = tempfile.mkdtemp()
self.addCleanup(remove_tmpdir, self.tmpdir)
# def test_rehashx_does_not_raise(self):
# are you allowed to do this?
# would something like this work
# with self.assertRaises(None):
# Wait this isn't a context manager??? hold the fuck up.
# with not self.assertRaises(Exception):
# get_ipython().run_line_magic('rehashx')
def test_tempdir():
with tempdir() as tmpdir:
fname = os.path.join(tmpdir, 'example_file.txt')
with open(fname, 'wt') as fobj:
fobj.write('a string\\n')
assert not exists(tmpdir)
def test_in_tempdir(cwd):
with in_tempdir() as tmpdir:
with open('test.txt', 'wt') as f:
f.write('some text')
assert isfile('test.txt')
assert isfile(os.path.join(tmpdir, 'test.txt'))
assert not exists(tmpdir)
def test_given_directory(cwd):
# Test InGivenDirectory
with in_dir(cwd) as tmpdir:
assert tmpdir == abspath(cwd)
with in_dir(cwd) as tmpdir:
assert tmpdir == cwd
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
]
| |
3ca71c49a6da7cfa420cf73cb3475c330888bddc | 0e65380b2cf1386a21d1d54f22240f5b3fd3d0d0 | /1121.py | 2c12ee6c777dba4225e75560c187371785338d59 | []
| no_license | hwanginbeom/python | b6023e6082e8942f06a859c7fd63e5a2a405772f | 01afbd4f54cda1e2994f2454ff2b01fef3e13116 | refs/heads/master | 2018-11-05T10:52:42.414380 | 2018-08-28T09:30:41 | 2018-08-28T09:30:41 | 105,129,965 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 49 | py | IntVar() 0- 정수값으로 만들어 준는 것 | [
"[email protected]"
]
| |
aba1fe1222e36f72353fd0c6c5a21047cc2cedee | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03212/s618244450.py | 185edf86b67bdc8518f2d9341edb2c2cdcd3ecfc | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 310 | py | N = input()
L=len(N)
from itertools import product
check_num=[3,5,7]
check=[]
for l in range(1,L+1):
for p in product(range(3),repeat=l):
c=''
for p_ in p:
c+=str(check_num[p_])
if len(set(c))==3 and int(c)<=int(N):
check.append(int(c))
print(len(check)) | [
"[email protected]"
]
| |
71687720ff526965a20c77c9db597830ce3187b5 | 714058081fe435ed89b94cfa94587338e64672cb | /marqeta/response_models/digital_wallet_token_hash.py | 44fe321135afec4b24ddcf5ac0ed83bccebdd7f4 | [
"MIT"
]
| permissive | andyw8/marqeta-python | bc194944c08e8c8327a8a20bac3dc615b2e2a95f | 23e0a66a5d7b20f3f992e44ae22b33a0eebdbce2 | refs/heads/master | 2020-05-20T14:25:39.398668 | 2019-04-01T23:53:55 | 2019-04-01T23:53:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 623 | py | from datetime import datetime, date
import json
class DigitalWalletTokenHash(object):
def __init__(self, json_response):
self.json_response = json_response
def __str__(self):
return json.dumps(self.json_response, default=self.json_serial)
@staticmethod
def json_serial(o):
if isinstance(o, datetime) or isinstance(o, date):
return o.__str__()
@property
def token(self):
return self.json_response.get('token', None)
def __repr__(self):
return '<Marqeta.response_models.digital_wallet_token_hash.DigitalWalletTokenHash>' + self.__str__()
| [
"[email protected]"
]
| |
c167c5819bfa452fa8fdba057ff142fbdbde00fe | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /090_logging/examples/nuke/34-python_in_production-logging_to_a_qt_widget/logger.py | c0e05b78da6905f18952e733200c169b31a72bf1 | []
| no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 2,774 | py | import logging
import sys
class Logger(object):
LOGGER_NAME = "Zurbrigg"
FORMAT_DEFAULT = "[%(name)s][%(levelname)s] %(message)s"
LEVEL_DEFAULT = logging.DEBUG
PROPAGATE_DEFAULT = True
_logger_obj = None
@classmethod
def logger_obj(cls):
if not cls._logger_obj:
if cls.logger_exists():
cls._logger_obj = logging.getLogger(cls.LOGGER_NAME)
else:
cls._logger_obj = logging.getLogger(cls.LOGGER_NAME)
cls._logger_obj.setLevel(cls.LEVEL_DEFAULT)
cls._logger_obj.propagate = cls.PROPAGATE_DEFAULT
fmt = logging.Formatter(cls.FORMAT_DEFAULT)
stream_handler = logging.StreamHandler(sys.stderr)
stream_handler.setFormatter(fmt)
cls._logger_obj.addHandler(stream_handler)
return cls._logger_obj
@classmethod
def logger_exists(cls):
return cls.LOGGER_NAME in logging.Logger.manager.loggerDict.keys()
@classmethod
def set_level(cls, level):
lg = cls.logger_obj()
lg.setLevel(level)
@classmethod
def set_propagate(cls, propagate):
lg = cls.logger_obj()
lg.propagate = propagate
@classmethod
def debug(cls, msg, *args, **kwargs):
lg = cls.logger_obj()
lg.debug(msg, *args, **kwargs)
@classmethod
def info(cls, msg, *args, **kwargs):
lg = cls.logger_obj()
lg.info(msg, *args, **kwargs)
@classmethod
def warning(cls, msg, *args, **kwargs):
lg = cls.logger_obj()
lg.warning(msg, *args, **kwargs)
@classmethod
def error(cls, msg, *args, **kwargs):
lg = cls.logger_obj()
lg.error(msg, *args, **kwargs)
@classmethod
def critical(cls, msg, *args, **kwargs):
lg = cls.logger_obj()
lg.critical(msg, *args, **kwargs)
@classmethod
def log(cls, level, msg, *args, **kwargs):
lg = cls.logger_obj()
lg.log(level, msg, *args, **kwargs)
@classmethod
def exception(cls, msg, *args, **kwargs):
lg = cls.logger_obj()
lg.exception(msg, *args, **kwargs)
@classmethod
def write_to_file(cls, path, level=logging.WARNING):
file_handler = logging.FileHandler(path)
file_handler.setLevel(level)
fmt = logging.Formatter("[%(asctime)s][%(levelname)s] %(message)s")
file_handler.setFormatter(fmt)
lg = cls.logger_obj()
lg.addHandler(file_handler)
if __name__ == "__main__":
Logger.set_propagate(False)
Logger.debug("debug message")
Logger.info("info message")
Logger.warning("warning message")
Logger.error("error message")
Logger.critical("critical message")
| [
"[email protected]"
]
| |
907e51e3e9abb9e4b37491c1122a2c555afe1fcc | 42674d7355d852e6ec7071830bb87d781ab63ad3 | /bitmovin/resources/models/manifests/dash/audio_adaptation_set.py | 71e54986b06f54a233eb3ec0495f83cff6d90a84 | [
"Unlicense"
]
| permissive | bitmovin/bitmovin-python | 57b1eb5deb7e38f3079e0ded546ec762753c3132 | d183718d640117dd75141da261901dc2f60433b0 | refs/heads/master | 2023-07-11T02:40:59.277881 | 2020-01-28T14:49:15 | 2020-01-28T14:49:15 | 72,857,798 | 46 | 27 | Unlicense | 2019-12-17T13:59:51 | 2016-11-04T15:01:56 | Python | UTF-8 | Python | false | false | 663 | py | from .abstract_adaptation_set import AbstractAdaptationSet
class AudioAdaptationSet(AbstractAdaptationSet):
def __init__(self, lang, id_=None, custom_data=None):
super().__init__(id_=id_, custom_data=custom_data)
self.lang = lang
@classmethod
def parse_from_json_object(cls, json_object):
adaptation_set = AbstractAdaptationSet.parse_from_json_object(json_object=json_object)
id_ = adaptation_set.id
custom_data = adaptation_set.customData
lang = json_object['lang']
audio_adaptation_set = AudioAdaptationSet(id_=id_, custom_data=custom_data, lang=lang)
return audio_adaptation_set
| [
"[email protected]"
]
| |
df6ef7c6944817a25ccbfd8559fcf9785e64e3cc | 974c5a4f101d0e6f4dfa5fc2f7c641c9d2bd8184 | /sdk/containerservice/azure-mgmt-containerservice/azure/mgmt/containerservice/v2021_07_01/operations/_managed_clusters_operations.py | bdbec1f83cfe156583cef0b5a15d8fa90b4f386b | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
]
| permissive | gaoyp830/azure-sdk-for-python | 4816f04c554dcffb7510a6b7044b0c86a2dd32e1 | 1c66defa502b754abcc9e5afa444ca03c609342f | refs/heads/master | 2022-10-20T21:33:44.281041 | 2022-09-29T17:03:13 | 2022-09-29T17:03:13 | 250,355,505 | 0 | 0 | MIT | 2020-03-26T19:42:13 | 2020-03-26T19:42:12 | null | UTF-8 | Python | false | false | 144,531 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
from urllib.parse import parse_qs, urljoin, urlparse
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
from ..._serialization import Serializer
from .._vendor import _convert_request, _format_url_section
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_get_os_options_request(
location: str, subscription_id: str, *, resource_type: Optional[str] = None, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/providers/Microsoft.ContainerService/locations/{location}/osOptions/default",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"location": _SERIALIZER.url("location", location, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
if resource_type is not None:
_params["resource-type"] = _SERIALIZER.query("resource_type", resource_type, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_list_request(subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url", "/subscriptions/{subscriptionId}/providers/Microsoft.ContainerService/managedClusters"
)
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_list_by_resource_group_request(resource_group_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_get_upgrade_profile_request(
resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/upgradeProfiles/default",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_get_access_profile_request(
resource_group_name: str, resource_name: str, role_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/accessProfiles/{roleName}/listCredential",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
"roleName": _SERIALIZER.url("role_name", role_name, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_list_cluster_admin_credentials_request(
resource_group_name: str,
resource_name: str,
subscription_id: str,
*,
server_fqdn: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/listClusterAdminCredential",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
if server_fqdn is not None:
_params["server-fqdn"] = _SERIALIZER.query("server_fqdn", server_fqdn, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_list_cluster_user_credentials_request(
resource_group_name: str,
resource_name: str,
subscription_id: str,
*,
server_fqdn: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/listClusterUserCredential",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
if server_fqdn is not None:
_params["server-fqdn"] = _SERIALIZER.query("server_fqdn", server_fqdn, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_list_cluster_monitoring_user_credentials_request(
resource_group_name: str,
resource_name: str,
subscription_id: str,
*,
server_fqdn: Optional[str] = None,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/listClusterMonitoringUserCredential",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
if server_fqdn is not None:
_params["server-fqdn"] = _SERIALIZER.query("server_fqdn", server_fqdn, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_get_request(resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_create_or_update_request(
resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_update_tags_request(
resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PATCH", url=_url, params=_params, headers=_headers, **kwargs)
def build_delete_request(
resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
def build_reset_service_principal_profile_request(
resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/resetServicePrincipalProfile",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_reset_aad_profile_request(
resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/resetAADProfile",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_rotate_cluster_certificates_request(
resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/rotateClusterCertificates",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_stop_request(
resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/stop",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_start_request(
resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/start",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_run_command_request(
resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/runCommand",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="POST", url=_url, params=_params, headers=_headers, **kwargs)
def build_get_command_result_request(
resource_group_name: str, resource_name: str, command_id: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/commandResults/{commandId}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
"commandId": _SERIALIZER.url("command_id", command_id, "str"),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_list_outbound_network_dependencies_endpoints_request(
resource_group_name: str, resource_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/outboundNetworkDependenciesEndpoints",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str"),
"resourceGroupName": _SERIALIZER.url("resource_group_name", resource_group_name, "str", min_length=1),
"resourceName": _SERIALIZER.url(
"resource_name",
resource_name,
"str",
max_length=63,
min_length=1,
pattern=r"^[a-zA-Z0-9]$|^[a-zA-Z0-9][-_a-zA-Z0-9]{0,61}[a-zA-Z0-9]$",
),
}
_url = _format_url_section(_url, **path_format_arguments)
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
class ManagedClustersOperations: # pylint: disable=too-many-public-methods
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.containerservice.v2021_07_01.ContainerServiceClient`'s
:attr:`managed_clusters` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def get_os_options(
self, location: str, resource_type: Optional[str] = None, **kwargs: Any
) -> _models.OSOptionProfile:
"""Gets supported OS options in the specified subscription.
Gets supported OS options in the specified subscription.
:param location: The name of a supported Azure region. Required.
:type location: str
:param resource_type: The resource type for which the OS options needs to be returned. Default
value is None.
:type resource_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: OSOptionProfile or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2021_07_01.models.OSOptionProfile
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.OSOptionProfile]
request = build_get_os_options_request(
location=location,
subscription_id=self._config.subscription_id,
resource_type=resource_type,
api_version=api_version,
template_url=self.get_os_options.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("OSOptionProfile", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_os_options.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.ContainerService/locations/{location}/osOptions/default"} # type: ignore
@distributed_trace
def list(self, **kwargs: Any) -> Iterable["_models.ManagedCluster"]:
"""Gets a list of managed clusters in the specified subscription.
Gets a list of managed clusters in the specified subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ManagedCluster or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.containerservice.v2021_07_01.models.ManagedCluster]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedClusterListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urlparse(next_link)
_next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query))
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ManagedClusterListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.ContainerService/managedClusters"} # type: ignore
@distributed_trace
def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> Iterable["_models.ManagedCluster"]:
"""Lists managed clusters in the specified subscription and resource group.
Lists managed clusters in the specified subscription and resource group.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ManagedCluster or the result of cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.containerservice.v2021_07_01.models.ManagedCluster]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedClusterListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urlparse(next_link)
_next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query))
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("ManagedClusterListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_by_resource_group.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters"} # type: ignore
@distributed_trace
def get_upgrade_profile(
self, resource_group_name: str, resource_name: str, **kwargs: Any
) -> _models.ManagedClusterUpgradeProfile:
"""Gets the upgrade profile of a managed cluster.
Gets the upgrade profile of a managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagedClusterUpgradeProfile or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2021_07_01.models.ManagedClusterUpgradeProfile
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedClusterUpgradeProfile]
request = build_get_upgrade_profile_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_upgrade_profile.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("ManagedClusterUpgradeProfile", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_upgrade_profile.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/upgradeProfiles/default"} # type: ignore
@distributed_trace
def get_access_profile(
self, resource_group_name: str, resource_name: str, role_name: str, **kwargs: Any
) -> _models.ManagedClusterAccessProfile:
"""Gets an access profile of a managed cluster.
**WARNING**\ : This API will be deprecated. Instead use `ListClusterUserCredentials
<https://docs.microsoft.com/rest/api/aks/managedclusters/listclusterusercredentials>`_ or
`ListClusterAdminCredentials
<https://docs.microsoft.com/rest/api/aks/managedclusters/listclusteradmincredentials>`_ .
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param role_name: The name of the role for managed cluster accessProfile resource. Required.
:type role_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagedClusterAccessProfile or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2021_07_01.models.ManagedClusterAccessProfile
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedClusterAccessProfile]
request = build_get_access_profile_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
role_name=role_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_access_profile.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("ManagedClusterAccessProfile", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_access_profile.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/accessProfiles/{roleName}/listCredential"} # type: ignore
@distributed_trace
def list_cluster_admin_credentials(
self, resource_group_name: str, resource_name: str, server_fqdn: Optional[str] = None, **kwargs: Any
) -> _models.CredentialResults:
"""Lists the admin credentials of a managed cluster.
Lists the admin credentials of a managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param server_fqdn: server fqdn type for credentials to be returned. Default value is None.
:type server_fqdn: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CredentialResults or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2021_07_01.models.CredentialResults
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.CredentialResults]
request = build_list_cluster_admin_credentials_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
server_fqdn=server_fqdn,
api_version=api_version,
template_url=self.list_cluster_admin_credentials.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("CredentialResults", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_cluster_admin_credentials.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/listClusterAdminCredential"} # type: ignore
@distributed_trace
def list_cluster_user_credentials(
self, resource_group_name: str, resource_name: str, server_fqdn: Optional[str] = None, **kwargs: Any
) -> _models.CredentialResults:
"""Lists the user credentials of a managed cluster.
Lists the user credentials of a managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param server_fqdn: server fqdn type for credentials to be returned. Default value is None.
:type server_fqdn: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CredentialResults or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2021_07_01.models.CredentialResults
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.CredentialResults]
request = build_list_cluster_user_credentials_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
server_fqdn=server_fqdn,
api_version=api_version,
template_url=self.list_cluster_user_credentials.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("CredentialResults", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_cluster_user_credentials.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/listClusterUserCredential"} # type: ignore
@distributed_trace
def list_cluster_monitoring_user_credentials(
self, resource_group_name: str, resource_name: str, server_fqdn: Optional[str] = None, **kwargs: Any
) -> _models.CredentialResults:
"""Lists the cluster monitoring user credentials of a managed cluster.
Lists the cluster monitoring user credentials of a managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param server_fqdn: server fqdn type for credentials to be returned. Default value is None.
:type server_fqdn: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CredentialResults or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2021_07_01.models.CredentialResults
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.CredentialResults]
request = build_list_cluster_monitoring_user_credentials_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
server_fqdn=server_fqdn,
api_version=api_version,
template_url=self.list_cluster_monitoring_user_credentials.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("CredentialResults", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_cluster_monitoring_user_credentials.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/listClusterMonitoringUserCredential"} # type: ignore
@distributed_trace
def get(self, resource_group_name: str, resource_name: str, **kwargs: Any) -> _models.ManagedCluster:
"""Gets a managed cluster.
Gets a managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagedCluster or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2021_07_01.models.ManagedCluster
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedCluster]
request = build_get_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("ManagedCluster", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}"} # type: ignore
def _create_or_update_initial(
self, resource_group_name: str, resource_name: str, parameters: Union[_models.ManagedCluster, IO], **kwargs: Any
) -> _models.ManagedCluster:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedCluster]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ManagedCluster")
request = build_create_or_update_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("ManagedCluster", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("ManagedCluster", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}"} # type: ignore
@overload
def begin_create_or_update(
self,
resource_group_name: str,
resource_name: str,
parameters: _models.ManagedCluster,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.ManagedCluster]:
"""Creates or updates a managed cluster.
Creates or updates a managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param parameters: The managed cluster to create or update. Required.
:type parameters: ~azure.mgmt.containerservice.v2021_07_01.models.ManagedCluster
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either ManagedCluster or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.containerservice.v2021_07_01.models.ManagedCluster]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_create_or_update(
self,
resource_group_name: str,
resource_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.ManagedCluster]:
"""Creates or updates a managed cluster.
Creates or updates a managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param parameters: The managed cluster to create or update. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either ManagedCluster or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.containerservice.v2021_07_01.models.ManagedCluster]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_create_or_update(
self, resource_group_name: str, resource_name: str, parameters: Union[_models.ManagedCluster, IO], **kwargs: Any
) -> LROPoller[_models.ManagedCluster]:
"""Creates or updates a managed cluster.
Creates or updates a managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param parameters: The managed cluster to create or update. Is either a model type or a IO
type. Required.
:type parameters: ~azure.mgmt.containerservice.v2021_07_01.models.ManagedCluster or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either ManagedCluster or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.containerservice.v2021_07_01.models.ManagedCluster]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedCluster]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial( # type: ignore
resource_group_name=resource_group_name,
resource_name=resource_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("ManagedCluster", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}"} # type: ignore
def _update_tags_initial(
self, resource_group_name: str, resource_name: str, parameters: Union[_models.TagsObject, IO], **kwargs: Any
) -> _models.ManagedCluster:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedCluster]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "TagsObject")
request = build_update_tags_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._update_tags_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("ManagedCluster", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}"} # type: ignore
@overload
def begin_update_tags(
self,
resource_group_name: str,
resource_name: str,
parameters: _models.TagsObject,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.ManagedCluster]:
"""Updates tags on a managed cluster.
Updates tags on a managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param parameters: Parameters supplied to the Update Managed Cluster Tags operation. Required.
:type parameters: ~azure.mgmt.containerservice.v2021_07_01.models.TagsObject
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either ManagedCluster or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.containerservice.v2021_07_01.models.ManagedCluster]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_update_tags(
self,
resource_group_name: str,
resource_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.ManagedCluster]:
"""Updates tags on a managed cluster.
Updates tags on a managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param parameters: Parameters supplied to the Update Managed Cluster Tags operation. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either ManagedCluster or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.containerservice.v2021_07_01.models.ManagedCluster]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_update_tags(
self, resource_group_name: str, resource_name: str, parameters: Union[_models.TagsObject, IO], **kwargs: Any
) -> LROPoller[_models.ManagedCluster]:
"""Updates tags on a managed cluster.
Updates tags on a managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param parameters: Parameters supplied to the Update Managed Cluster Tags operation. Is either
a model type or a IO type. Required.
:type parameters: ~azure.mgmt.containerservice.v2021_07_01.models.TagsObject or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either ManagedCluster or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.containerservice.v2021_07_01.models.ManagedCluster]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ManagedCluster]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_tags_initial( # type: ignore
resource_group_name=resource_group_name,
resource_name=resource_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("ManagedCluster", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}"} # type: ignore
def _delete_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, resource_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_delete_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}"} # type: ignore
@distributed_trace
def begin_delete(self, resource_group_name: str, resource_name: str, **kwargs: Any) -> LROPoller[None]:
"""Deletes a managed cluster.
Deletes a managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
resource_name=resource_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}"} # type: ignore
def _reset_service_principal_profile_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
resource_name: str,
parameters: Union[_models.ManagedClusterServicePrincipalProfile, IO],
**kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[None]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ManagedClusterServicePrincipalProfile")
request = build_reset_service_principal_profile_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._reset_service_principal_profile_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_reset_service_principal_profile_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/resetServicePrincipalProfile"} # type: ignore
@overload
def begin_reset_service_principal_profile(
self,
resource_group_name: str,
resource_name: str,
parameters: _models.ManagedClusterServicePrincipalProfile,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[None]:
"""Reset the Service Principal Profile of a managed cluster.
This action cannot be performed on a cluster that is not using a service principal.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param parameters: The service principal profile to set on the managed cluster. Required.
:type parameters:
~azure.mgmt.containerservice.v2021_07_01.models.ManagedClusterServicePrincipalProfile
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_reset_service_principal_profile(
self,
resource_group_name: str,
resource_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[None]:
"""Reset the Service Principal Profile of a managed cluster.
This action cannot be performed on a cluster that is not using a service principal.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param parameters: The service principal profile to set on the managed cluster. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_reset_service_principal_profile(
self,
resource_group_name: str,
resource_name: str,
parameters: Union[_models.ManagedClusterServicePrincipalProfile, IO],
**kwargs: Any
) -> LROPoller[None]:
"""Reset the Service Principal Profile of a managed cluster.
This action cannot be performed on a cluster that is not using a service principal.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param parameters: The service principal profile to set on the managed cluster. Is either a
model type or a IO type. Required.
:type parameters:
~azure.mgmt.containerservice.v2021_07_01.models.ManagedClusterServicePrincipalProfile or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._reset_service_principal_profile_initial( # type: ignore
resource_group_name=resource_group_name,
resource_name=resource_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset_service_principal_profile.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/resetServicePrincipalProfile"} # type: ignore
def _reset_aad_profile_initial( # pylint: disable=inconsistent-return-statements
self,
resource_group_name: str,
resource_name: str,
parameters: Union[_models.ManagedClusterAADProfile, IO],
**kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[None]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "ManagedClusterAADProfile")
request = build_reset_aad_profile_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._reset_aad_profile_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_reset_aad_profile_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/resetAADProfile"} # type: ignore
@overload
def begin_reset_aad_profile(
self,
resource_group_name: str,
resource_name: str,
parameters: _models.ManagedClusterAADProfile,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[None]:
"""Reset the AAD Profile of a managed cluster.
Reset the AAD Profile of a managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param parameters: The AAD profile to set on the Managed Cluster. Required.
:type parameters: ~azure.mgmt.containerservice.v2021_07_01.models.ManagedClusterAADProfile
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_reset_aad_profile(
self,
resource_group_name: str,
resource_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[None]:
"""Reset the AAD Profile of a managed cluster.
Reset the AAD Profile of a managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param parameters: The AAD profile to set on the Managed Cluster. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_reset_aad_profile(
self,
resource_group_name: str,
resource_name: str,
parameters: Union[_models.ManagedClusterAADProfile, IO],
**kwargs: Any
) -> LROPoller[None]:
"""Reset the AAD Profile of a managed cluster.
Reset the AAD Profile of a managed cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param parameters: The AAD profile to set on the Managed Cluster. Is either a model type or a
IO type. Required.
:type parameters: ~azure.mgmt.containerservice.v2021_07_01.models.ManagedClusterAADProfile or
IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._reset_aad_profile_initial( # type: ignore
resource_group_name=resource_group_name,
resource_name=resource_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset_aad_profile.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/resetAADProfile"} # type: ignore
def _rotate_cluster_certificates_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, resource_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_rotate_cluster_certificates_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._rotate_cluster_certificates_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_rotate_cluster_certificates_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/rotateClusterCertificates"} # type: ignore
@distributed_trace
def begin_rotate_cluster_certificates(
self, resource_group_name: str, resource_name: str, **kwargs: Any
) -> LROPoller[None]:
"""Rotates the certificates of a managed cluster.
See `Certificate rotation <https://docs.microsoft.com/azure/aks/certificate-rotation>`_ for
more details about rotating managed cluster certificates.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._rotate_cluster_certificates_initial( # type: ignore
resource_group_name=resource_group_name,
resource_name=resource_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_rotate_cluster_certificates.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/rotateClusterCertificates"} # type: ignore
def _stop_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, resource_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_stop_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._stop_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_stop_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/stop"} # type: ignore
@distributed_trace
def begin_stop(self, resource_group_name: str, resource_name: str, **kwargs: Any) -> LROPoller[None]:
"""Stops a Managed Cluster.
This can only be performed on Azure Virtual Machine Scale set backed clusters. Stopping a
cluster stops the control plane and agent nodes entirely, while maintaining all object and
cluster state. A cluster does not accrue charges while it is stopped. See `stopping a cluster
<https://docs.microsoft.com/azure/aks/start-stop-cluster>`_ for more details about stopping a
cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._stop_initial( # type: ignore
resource_group_name=resource_group_name,
resource_name=resource_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_stop.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/stop"} # type: ignore
def _start_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, resource_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_start_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._start_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_start_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/start"} # type: ignore
@distributed_trace
def begin_start(self, resource_group_name: str, resource_name: str, **kwargs: Any) -> LROPoller[None]:
"""Starts a previously stopped Managed Cluster.
See `starting a cluster <https://docs.microsoft.com/azure/aks/start-stop-cluster>`_ for more
details about starting a cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._start_initial( # type: ignore
resource_group_name=resource_group_name,
resource_name=resource_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_start.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/start"} # type: ignore
def _run_command_initial(
self,
resource_group_name: str,
resource_name: str,
request_payload: Union[_models.RunCommandRequest, IO],
**kwargs: Any
) -> Optional[_models.RunCommandResult]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.RunCommandResult]]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(request_payload, (IO, bytes)):
_content = request_payload
else:
_json = self._serialize.body(request_payload, "RunCommandRequest")
request = build_run_command_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._run_command_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("RunCommandResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_run_command_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/runCommand"} # type: ignore
@overload
def begin_run_command(
self,
resource_group_name: str,
resource_name: str,
request_payload: _models.RunCommandRequest,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.RunCommandResult]:
"""Submits a command to run against the Managed Cluster.
AKS will create a pod to run the command. This is primarily useful for private clusters. For
more information see `AKS Run Command
<https://docs.microsoft.com/azure/aks/private-clusters#aks-run-command-preview>`_.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param request_payload: The run command request. Required.
:type request_payload: ~azure.mgmt.containerservice.v2021_07_01.models.RunCommandRequest
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either RunCommandResult or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.containerservice.v2021_07_01.models.RunCommandResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_run_command(
self,
resource_group_name: str,
resource_name: str,
request_payload: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.RunCommandResult]:
"""Submits a command to run against the Managed Cluster.
AKS will create a pod to run the command. This is primarily useful for private clusters. For
more information see `AKS Run Command
<https://docs.microsoft.com/azure/aks/private-clusters#aks-run-command-preview>`_.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param request_payload: The run command request. Required.
:type request_payload: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either RunCommandResult or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.containerservice.v2021_07_01.models.RunCommandResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_run_command(
self,
resource_group_name: str,
resource_name: str,
request_payload: Union[_models.RunCommandRequest, IO],
**kwargs: Any
) -> LROPoller[_models.RunCommandResult]:
"""Submits a command to run against the Managed Cluster.
AKS will create a pod to run the command. This is primarily useful for private clusters. For
more information see `AKS Run Command
<https://docs.microsoft.com/azure/aks/private-clusters#aks-run-command-preview>`_.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param request_payload: The run command request. Is either a model type or a IO type. Required.
:type request_payload: ~azure.mgmt.containerservice.v2021_07_01.models.RunCommandRequest or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either RunCommandResult or the result of
cls(response)
:rtype:
~azure.core.polling.LROPoller[~azure.mgmt.containerservice.v2021_07_01.models.RunCommandResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.RunCommandResult]
polling = kwargs.pop("polling", True) # type: Union[bool, PollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = self._run_command_initial( # type: ignore
resource_group_name=resource_group_name,
resource_name=resource_name,
request_payload=request_payload,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("RunCommandResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(PollingMethod, ARMPolling(lro_delay, **kwargs)) # type: PollingMethod
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_run_command.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/runCommand"} # type: ignore
@distributed_trace
def get_command_result(
self, resource_group_name: str, resource_name: str, command_id: str, **kwargs: Any
) -> Optional[_models.RunCommandResult]:
"""Gets the results of a command which has been run on the Managed Cluster.
Gets the results of a command which has been run on the Managed Cluster.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param command_id: Id of the command. Required.
:type command_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RunCommandResult or None or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2021_07_01.models.RunCommandResult or None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[Optional[_models.RunCommandResult]]
request = build_get_command_result_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
command_id=command_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_command_result.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("RunCommandResult", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_command_result.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/commandResults/{commandId}"} # type: ignore
@distributed_trace
def list_outbound_network_dependencies_endpoints(
self, resource_group_name: str, resource_name: str, **kwargs: Any
) -> Iterable["_models.OutboundEnvironmentEndpoint"]:
"""Gets a list of egress endpoints (network endpoints of all outbound dependencies) in the
specified managed cluster.
Gets a list of egress endpoints (network endpoints of all outbound dependencies) in the
specified managed cluster. The operation returns properties of each egress endpoint.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either OutboundEnvironmentEndpoint or the result of
cls(response)
:rtype:
~azure.core.paging.ItemPaged[~azure.mgmt.containerservice.v2021_07_01.models.OutboundEnvironmentEndpoint]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-07-01")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.OutboundEnvironmentEndpointCollection]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_outbound_network_dependencies_endpoints_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_outbound_network_dependencies_endpoints.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urlparse(next_link)
_next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query))
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("OutboundEnvironmentEndpointCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_outbound_network_dependencies_endpoints.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/outboundNetworkDependenciesEndpoints"} # type: ignore
| [
"[email protected]"
]
| |
3712937801b4655d2f06e615f42f6119be1d0be2 | d9e5f868392cc846a14577e2578332dd389766a5 | /ex13.py | 2a4652a2c2319f92b92f4fdfda224686a6f5811d | []
| no_license | quanlidavid/Learn_Python_the_Hard_Way | 8d8d9c9906d1e6b0de1a1dae78fbf4fd150c466c | bc591552efbeb2db588c831bf5280cbe21e11246 | refs/heads/master | 2021-05-16T11:18:13.171264 | 2017-09-27T05:56:20 | 2017-09-27T05:56:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 260 | py | from sys import argv
script, frist, second, third = argv
print("The script is called:", script)
print("Your first variable is:", frist)
print("Your second variable is:", second)
print("Your third variable is:", third)
list1=['aa','bb']
a,b=list1
print(a,b)
| [
"[email protected]"
]
| |
e27cf93f24bc53f6f16fd551ed429b1aca98d4d2 | 480bee2fee71fa5f91fcece256918795adfb3eda | /detector/model.py | 7ebca4a47e922b335504cca41e45677a4865c1e2 | []
| no_license | favyen/skyquery | f71d0095681660e4bce5324ae866371fe51e9e3a | dce2639314aaa06cba0d56aab1f7794744c22090 | refs/heads/master | 2023-08-22T17:48:08.697538 | 2021-09-27T02:14:52 | 2021-09-27T02:14:52 | 412,963,924 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,272 | py | import numpy
import tensorflow as tf
import os
import os.path
import random
import math
import time
from PIL import Image
BATCH_SIZE = 4
KERNEL_SIZE = 3
class Model:
def _conv_layer(self, name, input_var, stride, in_channels, out_channels, options = {}):
activation = options.get('activation', 'relu')
dropout = options.get('dropout', None)
padding = options.get('padding', 'SAME')
batchnorm = options.get('batchnorm', False)
transpose = options.get('transpose', False)
with tf.variable_scope(name) as scope:
if not transpose:
filter_shape = [KERNEL_SIZE, KERNEL_SIZE, in_channels, out_channels]
else:
filter_shape = [KERNEL_SIZE, KERNEL_SIZE, out_channels, in_channels]
kernel = tf.get_variable(
'weights',
shape=filter_shape,
initializer=tf.truncated_normal_initializer(stddev=math.sqrt(2.0 / KERNEL_SIZE / KERNEL_SIZE / in_channels)),
dtype=tf.float32
)
biases = tf.get_variable(
'biases',
shape=[out_channels],
initializer=tf.constant_initializer(0.0),
dtype=tf.float32
)
if not transpose:
output = tf.nn.bias_add(
tf.nn.conv2d(
input_var,
kernel,
[1, stride, stride, 1],
padding=padding
),
biases
)
else:
batch = tf.shape(input_var)[0]
side = tf.shape(input_var)[1]
output = tf.nn.bias_add(
tf.nn.conv2d_transpose(
input_var,
kernel,
[batch, side * stride, side * stride, out_channels],
[1, stride, stride, 1],
padding=padding
),
biases
)
if batchnorm:
output = tf.contrib.layers.batch_norm(output, center=True, scale=True, is_training=self.is_training, decay=0.99)
if dropout is not None:
output = tf.nn.dropout(output, keep_prob=1-dropout)
if activation == 'relu':
return tf.nn.relu(output, name=scope.name)
elif activation == 'sigmoid':
return tf.nn.sigmoid(output, name=scope.name)
elif activation == 'none':
return output
else:
raise Exception('invalid activation {} specified'.format(activation))
def _fc_layer(self, name, input_var, input_size, output_size, options = {}):
activation = options.get('activation', 'relu')
dropout = options.get('dropout', None)
batchnorm = options.get('batchnorm', False)
with tf.variable_scope(name) as scope:
weights = tf.get_variable(
'weights',
shape=[input_size, output_size],
initializer=tf.truncated_normal_initializer(stddev=math.sqrt(2.0 / input_size)),
dtype=tf.float32
)
biases = tf.get_variable(
'biases',
shape=[output_size],
initializer=tf.constant_initializer(0.0),
dtype=tf.float32
)
output = tf.matmul(input_var, weights) + biases
if batchnorm:
output = tf.contrib.layers.batch_norm(output, center=True, scale=True, is_training=self.is_training, decay=0.99)
if dropout is not None:
output = tf.nn.dropout(output, keep_prob=1-dropout)
if activation == 'relu':
return tf.nn.relu(output, name=scope.name)
elif activation == 'sigmoid':
return tf.nn.sigmoid(output, name=scope.name)
elif activation == 'none':
return output
else:
raise Exception('invalid activation {} specified'.format(activation))
def __init__(self, bn=False, size=(512, 512), input_channels=6):
tf.reset_default_graph()
self.is_training = tf.placeholder(tf.bool)
self.inputs = tf.placeholder(tf.uint8, [None, size[0], size[1], input_channels])
self.float_inputs = tf.cast(self.inputs, tf.float32)/255.0# + tf.random.normal(tf.shape(self.inputs), stddev=0.04)*tf.cast(self.is_training, tf.float32)
self.targets = tf.placeholder(tf.float32, [None, size[0]/4, size[1]/4])
self.masks = tf.placeholder(tf.float32, [None, size[0]/4, size[1]/4])
self.learning_rate = tf.placeholder(tf.float32)
# layers
self.layer1 = self._conv_layer('layer1', self.float_inputs, 2, input_channels, 32, {'batchnorm': False}) # -> 256x256x32
self.layer2 = self._conv_layer('layer2', self.layer1, 2, 32, 64, {'batchnorm': bn}) # -> 128x128x64
self.layer3 = self._conv_layer('layer3', self.layer2, 2, 64, 64, {'batchnorm': bn}) # -> 64x64x64
self.layer4 = self._conv_layer('layer4', self.layer3, 2, 64, 64, {'batchnorm': bn}) # -> 32x32x64
self.layer5 = self._conv_layer('layer5', self.layer4, 1, 64, 64, {'batchnorm': bn}) # -> 32x32x64
self.layer6 = self._conv_layer('layer6', self.layer5, 2, 64, 64, {'batchnorm': bn, 'transpose': True}) # -> 64x64x64
self.layer7 = self._conv_layer('layer7', self.layer6, 2, 64, 64, {'batchnorm': bn, 'transpose': True}) # -> 128x128x64
#self.layer7 = tf.concat([self.layer2, tf.image.resize(self.layer5, [128, 128], method=tf.image.ResizeMethod.NEAREST_NEIGHBOR)], axis=3)
self.pre_outputs = self._conv_layer('pre_outputs', self.layer7, 1, 64, 1, {'activation': 'none', 'batchnorm': False})[:, :, :, 0] # -> 128x128x1
self.outputs = tf.nn.sigmoid(self.pre_outputs)
self.loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels=self.targets, logits=self.pre_outputs) * self.masks)
with tf.control_dependencies(tf.get_collection(tf.GraphKeys.UPDATE_OPS)):
self.optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate).minimize(self.loss)
self.init_op = tf.initialize_all_variables()
self.saver = tf.train.Saver(max_to_keep=None)
| [
"[email protected]"
]
| |
fe6aa27d544a7bc06532e7cb5bfad0801c9b1eba | 8174d11add088a2413d5a7fdf8233059c3876f52 | /docs/examples/pool.py | 9265af58ef12bca9806eb7e1896aa4e7797bc85c | [
"MIT"
]
| permissive | AraHaan/aioredis | 903eaaefb243c8bc8d70c9178baf721446c9cc7f | 19be499015a8cf32580e937cbfd711fd48489eca | refs/heads/master | 2023-03-17T03:16:46.281210 | 2022-02-22T14:33:33 | 2022-02-22T14:33:33 | 82,424,636 | 1 | 0 | MIT | 2023-03-07T15:57:29 | 2017-02-19T00:20:56 | Python | UTF-8 | Python | false | false | 648 | py | import asyncio
import aioredis
async def main():
redis = aioredis.from_url("redis://localhost", max_connections=10)
await redis.execute_command("set", "my-key", "value")
val = await redis.execute_command("get", "my-key")
print("raw value:", val)
async def main_pool():
pool = aioredis.ConnectionPool.from_url("redis://localhost", max_connections=10)
redis = aioredis.Redis(connection_pool=pool)
await redis.execute_command("set", "my-key", "value")
val = await redis.execute_command("get", "my-key")
print("raw value:", val)
if __name__ == "__main__":
asyncio.run(main())
asyncio.run(main_pool())
| [
"[email protected]"
]
| |
4943e2a33bb554208d68eb6a684117fda0462433 | 44c372cd25a2496979fa29a1dc9131c54487d945 | /data/zh50w/zh50w_process.py | 0c551f731118ff55f6d1cef12a2e33090afd341d | [
"MIT"
]
| permissive | zhongerqiandan/OpenDialog | 4ff4e65d0ade1efdd3029475634ae1cf38c7bdd3 | f478b2a912c8c742da5ced510ac40da59217ddb3 | refs/heads/master | 2023-01-22T09:07:54.385604 | 2020-12-04T02:00:17 | 2020-12-04T02:00:17 | 318,419,052 | 0 | 1 | MIT | 2020-12-04T05:58:37 | 2020-12-04T05:58:37 | null | UTF-8 | Python | false | false | 5,482 | py | import csv
import random
from tqdm import tqdm
import ipdb
import sys
import pickle
sys.path.append('..')
from utils import read_stop_words
from collections import Counter
from gensim.summarization import bm25
from elasticsearch import Elasticsearch
'''
TODO
1. adding the reesponses into elasticsearch for q-r match
'''
class ESUtils:
def __init__(self, index_name, create_index=False):
self.es = Elasticsearch()
self.index = index_name
if create_index:
mapping = {
'properties': {
'context': {
'type': 'text',
'analyzer': 'ik_max_word',
'search_analyzer': 'ik_max_word'
}
}
}
if self.es.indices.exists(index=self.index):
self.es.indices.delete(index=self.index)
rest = self.es.indices.create(index=self.index)
print(rest)
rest = self.es.indices.put_mapping(body=mapping, index=self.index)
print(rest)
def insert_pairs(self, pairs):
count = self.es.count(index=self.index)['count']
print(f'[!] begin of the idx: {count}')
for i, qa in enumerate(tqdm(pairs)):
data = {
'context': qa[0],
'response': qa[1]
}
self.es.index(index=self.index, body=data)
print(f'[!] insert data over, whole size: {self.es.count(index=self.index)["count"]}')
class ESChat:
def __init__(self, index_name):
self.es = Elasticsearch()
self.index = index_name
def search(self, query, samples=10):
dsl = {
'query': {
'match': {
'context': query
}
}
}
hits = self.es.search(index=self.index, body=dsl, size=samples)['hits']['hits']
rest = []
for h in hits:
rest.append({'score': h['_score'], 'context': h['_source']['context'],
'response': h['_source']['response']
})
return rest
def chat(self):
sentence = input('You are speaking: ').strip()
while sentence:
if sentence == 'exit':
break
rest = self.search(sentence)
for idx, i in enumerate(rest):
print(f'ESChat({idx}/{len(rest)}): {i["response"]}')
sentence = input('You are speaking: ').strip()
def read_file(path):
with open(path) as f:
data = f.read()
dialogs = data.split('\n\n')
dialogs = [dialog.split('\n') for dialog in dialogs if dialog.strip()]
random.shuffle(dialogs)
return dialogs
def write_file(dialogs, mode='train', samples=10):
chatbot = ESChat('retrieval_chatbot')
with open(f'{mode}.csv', 'w') as f:
f = csv.writer(f)
f.writerow(['Context', 'Response'] + [f'Retrieval_{i+1}' for i in range(samples)])
# f.writerow(['Context', 'Response'])
error_counter = 0
responses = [i[1] for i in dialogs]
for dialog in tqdm(dialogs):
rest = [i['response'] for i in chatbot.search(dialog[0], samples=samples+1)]
if dialog[1] in rest:
rest.remove(dialog[1])
dialog = list(dialog) + rest
if len(dialog) != samples + 2:
error_counter += 1
dialog.extend(random.sample(responses, samples+3-len(dialog)))
# assert len(dialog) == samples + 2, f'{len(dialog)} retrieval utterances are obtained'
f.writerow(dialog[:samples+2])
print(f'[!] finish writing the file {mode}.csv, error counter: {error_counter}')
def process_data(dialogs, samples=10, max_len=10, max_utter_len=50):
data = []
for dialog in tqdm(dialogs):
# dialog = [' '.join(list(jieba.cut(i))) for i in dialog]
context, response = dialog[-(max_len+1):-1], dialog[-1]
context = [i[-max_utter_len:] for i in context]
context = ' <eou> '.join(context)
data.append((context, response))
return data
def retrieval_model():
chatbot = ESChat('retrieval_chatbot')
print(f'[!] load retrieval model from ElasticSearch, default 10 replys.')
return chatbot
if __name__ == "__main__":
import sys
if sys.argv[1] == 'process':
data = read_file('train.txt')
whole_size = len(data)
train_size = (0, int(0.95 * whole_size))
dev_size = (train_size[1], train_size[1] + int(0.025 * whole_size))
test_size = (dev_size[1], whole_size)
print(f'data size: train({train_size[1]-train_size[0]}); dev({dev_size[1]-dev_size[0]}); test({test_size[1]-test_size[0]})')
train_data = data[train_size[0]:train_size[1]]
dev_data = data[dev_size[0]:dev_size[1]]
test_data = data[test_size[0]:test_size[1]]
train_data = process_data(train_data)
dev_data = process_data(dev_data)
test_data = process_data(test_data)
# write file
write_file(train_data, mode='train')
write_file(dev_data, mode='dev')
write_file(test_data, mode='test')
else:
# test elasticsearch
# data = read_file('zh50w/train.txt')
# pairs = [(' . '.join(i[:-1]), i[-1]) for i in data]
# ut = ESUtils('retrieval_chatbot', create_index=True)
# ut.insert_pairs(pairs)
chatbot = ESChat('retrieval_chatbot')
chatbot.chat()
| [
"[email protected]"
]
| |
fe8763de336ee65092b7aaec84eea8912eb81c8c | df75b4d24416bb764db61931457f367872d8a66c | /django_states/main/migrations/0006_auto__add_field_statecapital_state__chg_field_statecapital_latitude__c.py | fa52b64db591b04f547b608edbe24fd3731be7db | []
| no_license | Bofahda/states | bb1f7caf8409e363ba2cb67974464854f14570d8 | 11016ac07040177e81e53b1ea88739b4de0ea936 | refs/heads/master | 2020-12-24T16:58:56.789855 | 2015-08-12T09:20:53 | 2015-08-12T09:20:53 | 40,591,309 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,729 | py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'StateCapital.state'
db.add_column(u'main_statecapital', 'state',
self.gf('django.db.models.fields.related.OneToOneField')(to=orm['main.State'], unique=True, null=True),
keep_default=False)
# Changing field 'StateCapital.latitude'
db.alter_column(u'main_statecapital', 'latitude', self.gf('django.db.models.fields.FloatField')(null=True))
# Changing field 'StateCapital.longitude'
db.alter_column(u'main_statecapital', 'longitude', self.gf('django.db.models.fields.FloatField')(null=True))
# Changing field 'StateCapital.population'
db.alter_column(u'main_statecapital', 'population', self.gf('django.db.models.fields.IntegerField')(null=True))
def backwards(self, orm):
# Deleting field 'StateCapital.state'
db.delete_column(u'main_statecapital', 'state_id')
# Changing field 'StateCapital.latitude'
db.alter_column(u'main_statecapital', 'latitude', self.gf('django.db.models.fields.FloatField')(default=1))
# Changing field 'StateCapital.longitude'
db.alter_column(u'main_statecapital', 'longitude', self.gf('django.db.models.fields.FloatField')(default=1))
# Changing field 'StateCapital.population'
db.alter_column(u'main_statecapital', 'population', self.gf('django.db.models.fields.IntegerField')(default=1))
models = {
u'main.state': {
'Meta': {'object_name': 'State'},
'abbreviation': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'})
},
u'main.statecapital': {
'Meta': {'object_name': 'StateCapital'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'longitude': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'population': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'state': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['main.State']", 'unique': 'True', 'null': 'True'})
}
}
complete_apps = ['main'] | [
"[email protected]"
]
| |
3748a9e8a475776c784dacc5951e89171f92e72b | cb2a4180ffc0df4296737134230397069de8da21 | /accounts/signals.py | d6f2b8bc1882f084fe375b450158fdf3249fc531 | [
"MIT"
]
| permissive | fagrimacs/fagrimacs_production | 8a9cef4e1d73360301fd66f4f0b70ea4868ef610 | ea1a8f92c41c416309cc1fdd8deb02f41a9c95a0 | refs/heads/master | 2022-12-23T22:08:27.768479 | 2020-09-24T10:10:35 | 2020-09-24T10:10:35 | 295,315,768 | 0 | 0 | MIT | 2020-09-24T10:10:36 | 2020-09-14T05:44:21 | JavaScript | UTF-8 | Python | false | false | 646 | py | from django.db.models.signals import pre_save
from django.dispatch import receiver
from .models import UserProfile
@receiver(pre_save, sender=UserProfile)
def delete_prev_profile_pic(sender, instance, **kwargs):
if instance.pk:
try:
prev_profile = UserProfile.objects.get(
pk=instance.pk).profile_pic
except UserProfile.DoesNotExist:
return
else:
new_profile = instance.profile_pic
if prev_profile and prev_profile.url != new_profile.url:
if prev_profile != 'profile_pics/user.png':
prev_profile.delete(save=False)
| [
"[email protected]"
]
| |
2265604085f0b363acfc4bbfcfd9c1294885eb23 | 626b14ce13986b6d5e03143e151004247659625a | /Day01-15/code/Day07/dict2.py | 1f89c849d510ca7c3702747ec28763684b8c1a4f | []
| no_license | Focavn/Python-100-Days | c7586ecf7ae3f1fd42f024558bb998be23ee9df8 | d8de6307aeff9fe31fd752bd7725b9cc3fbc084b | refs/heads/master | 2021-08-08T17:57:02.025178 | 2020-09-17T11:58:04 | 2020-09-17T11:58:04 | 220,427,144 | 0 | 0 | null | 2019-11-08T08:59:43 | 2019-11-08T08:59:41 | null | UTF-8 | Python | false | false | 575 | py | """
字典的常用操作
Version: 0.1
Author: 骆昊
Date: 2018-03-06
"""
def main():
stu = {'name': '骆昊', 'age': 38, 'gender': True}
print(stu)
print(stu.keys())
print(stu.values())
print(stu.items())
for elem in stu.items():
print(elem)
print(elem[0], elem[1])
if 'age' in stu:
stu['age'] = 20
print(stu)
stu.setdefault('score', 60)
print(stu)
stu.setdefault('score', 100)
print(stu)
stu['score'] = 100
print(stu)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
216d0e5c1001e89b218aef24c8cabfa7ee8027a8 | 5a310398592ddb75d27dc67c9b45198e31cb0d55 | /rfid-v1.py | d941851aa03ed4b0f8dbaef378689460a5bf2f2a | []
| no_license | ch-tseng/rfidDoor2 | 97871be9d431515425180b8e0893400a9b147831 | ed04b794d6c70dc223bb2f75e5d7367bea8353b4 | refs/heads/master | 2021-01-20T04:21:23.102422 | 2017-05-04T05:03:02 | 2017-05-04T05:03:02 | 89,674,676 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,950 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import socket
import urllib.request
import logging
import json
#import base64
import binascii
import sys
import time
# A UDP server
# Set up a UDP server
UDPSock = socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
# Listen on port 21567
# (to all IP addresses on this system)
listen_addr = ("",8080)
UDPSock.bind(listen_addr)
debugPrint = False
urlHeadString = "http://data.sunplusit.com/Api/DoorRFIDInfo?code=83E4621643F7B2E148257244000655E3&rfid="
#-----------------------------------------
#logging記錄
logger = logging.getLogger('msg')
hdlr = logging.FileHandler('/home/chtseng/rfidDoor/msg.log')
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.INFO)
def is_json(myjson):
try:
json_object = json.loads(myjson)
except ValueError:
return False
return True
while True:
data,addr = UDPSock.recvfrom(1024)
#tmpTAGS, tmpTIMES = scanTAGS(binascii.b2a_hex(data).decode('ascii'))
readHEX = binascii.b2a_hex(data).decode('ascii')
logger.info('Received rfid:' + readHEX)
if(debugPrint==True):
print (readHEX)
try:
webReply = urllib.request.urlopen(urlHeadString + readHEX).read()
webReply = webReply.decode('utf-8').rstrip()
logger.info('webReply: {}'.format(webReply))
if(debugPrint==True):
print('webReply: {}'.format(webReply))
print(urlHeadString + readHEX)
print("webReply:" + webReply)
# listTAGs = webReply.split("")
except Exception:
print("Unexpected error:", sys.exc_info()[0])
logger.info('Unexpected error:' + str(sys.exc_info()[0]))
webReply = "[]"
pass
if(is_json(webReply)==True):
jsonReply = json.loads(webReply)
if(debugPrint==True):
print (jsonReply)
#time.sleep(1)
| [
"[email protected]"
]
| |
a14ab6df974107ea56998dc17be7330c4bf7fa0f | e6c65e2e354336a4bea5b6a4ccbccd3682915fe2 | /out-bin/py/google/fhir/models/run_locally.runfiles/com_google_fhir/external/pypi__tensorflow_1_12_0/tensorflow-1.12.0.data/purelib/tensorflow/python/pywrap_tensorflow_internal.py | 57359e7e1d2bc639df434b43db03044bd4c69c0c | [
"Apache-2.0"
]
| permissive | rasalt/fhir-datalab | c30ab773d84983dd04a37e9d0ddec8bf2824b8a4 | 3e329fc8b4226d3e3a4a7c23c306a86e7a9ea0de | refs/heads/master | 2021-10-09T05:51:04.593416 | 2018-12-21T18:11:03 | 2018-12-22T05:38:32 | 162,744,237 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 188 | py | /home/rkharwar/.cache/bazel/_bazel_rkharwar/0ddaa3627472ad9d1367a008236ce2f5/external/pypi__tensorflow_1_12_0/tensorflow-1.12.0.data/purelib/tensorflow/python/pywrap_tensorflow_internal.py | [
"[email protected]"
]
| |
64541b443d026560b213cf649fddf14d9174859e | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/bob/894c37be40744bf289920a1bd2eb6ba4.py | 8a84f39bc980357d36a643c97a4bffbd58c75679 | []
| no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 233 | py | def hey(phrase):
phrase = phrase.strip()
if len(phrase) == 0:
return 'Fine. Be that way!'
elif phrase.isupper():
return 'Whoa, chill out!'
elif phrase.endswith('?') :
return 'Sure.'
else:
return 'Whatever.'
| [
"[email protected]"
]
| |
62c90294b18a2c3fd268af603a53b8a22b86605c | 30754a148b79903d6e49399f1f270c79934ce389 | /fuzzinator/ui/tui/tui_listener.py | 9427825e28a0e23a7c0a9b1feb0cd1a50d817e82 | [
"BSD-3-Clause"
]
| permissive | syedkhalid/fuzzinator | 720ffc552c595b50de46e4e4e51f3a01cdc9aa77 | f90b58605de563e77b85ed0d54d2beb29efc7d14 | refs/heads/master | 2021-04-09T17:31:06.625840 | 2018-03-12T14:37:18 | 2018-03-12T15:21:27 | 125,814,277 | 1 | 0 | BSD-3-Clause | 2018-03-19T06:53:29 | 2018-03-19T06:53:29 | null | UTF-8 | Python | false | false | 1,054 | py | # Copyright (c) 2016-2017 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import inspect
import os
from fuzzinator.listener import EventListener
class TuiListener(EventListener):
def __init__(self, pipe, events, lock):
for fn, _ in inspect.getmembers(EventListener, predicate=inspect.isfunction):
setattr(self, fn, self.Trampoline(name=fn, pipe=pipe, events=events, lock=lock))
class Trampoline(object):
def __init__(self, name, pipe, events, lock):
self.name = name
self.pipe = pipe
self.events = events
self.lock = lock
def __call__(self, **kwargs):
with self.lock:
try:
self.events.put_nowait({'fn': self.name, 'kwargs': kwargs})
os.write(self.pipe, b'x')
except:
pass
| [
"[email protected]"
]
| |
834d806a1de11b9ede080c5f7971ceaf79826ab9 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/2/d_7.py | ab8cf5b7f1a98268b126e4f73b7ff347efe89e40 | []
| no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'd_7':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
]
| |
06d1a676a79e4717ef3a8f9091ba8612972c4f88 | af829a7bb04f515b01dc78aaeb318991ead50d24 | /cart/forms.py | 05c7b1e1972cd2dd46c070cb532696742bea4a12 | []
| no_license | marcinpelszyk/Djnago-ecom | 75ffffb1d6fcd5457d9db8bf166610b15994203f | 8ae049087c952b52f287dd58f6a91a2e83113921 | refs/heads/main | 2023-06-08T20:14:22.007418 | 2021-06-14T20:56:26 | 2021-06-14T20:56:26 | 376,601,973 | 0 | 0 | null | 2021-06-14T20:56:27 | 2021-06-13T17:26:48 | HTML | UTF-8 | Python | false | false | 192 | py | from django import forms
from django import forms
from .models import OrderItem
class AddCartForm(forms.ModelForm):
class Meta:
model = OrderItem
fields = ['quantity']
| [
"[email protected]"
]
| |
8829070ac250ac2824845aa0c2f13fbe2be8478b | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_19002.py | 3593dffbce1abc3e5313e425c261f44a9b5adabd | []
| no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 86 | py | # No datetime module in python how to install via easy_install?
easy_install DateTime
| [
"[email protected]"
]
| |
c7129370443f104ccf475efa0da13bda8d448769 | 600df3590cce1fe49b9a96e9ca5b5242884a2a70 | /third_party/catapult/third_party/gsutil/gslib/hashing_helper.py | c26831fe3af861fcb437fa8a1e588da4bb246152 | [
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0",
"BSD-3-Clause"
]
| permissive | metux/chromium-suckless | efd087ba4f4070a6caac5bfbfb0f7a4e2f3c438a | 72a05af97787001756bae2511b7985e61498c965 | refs/heads/orig | 2022-12-04T23:53:58.681218 | 2017-04-30T10:59:06 | 2017-04-30T23:35:58 | 89,884,931 | 5 | 3 | BSD-3-Clause | 2022-11-23T20:52:53 | 2017-05-01T00:09:08 | null | UTF-8 | Python | false | false | 16,892 | py | # -*- coding: utf-8 -*-
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper functions for hashing functionality."""
import base64
import binascii
from hashlib import md5
import os
from boto import config
import crcmod
from gslib.exception import CommandException
from gslib.util import DEFAULT_FILE_BUFFER_SIZE
from gslib.util import MIN_SIZE_COMPUTE_LOGGING
from gslib.util import TRANSFER_BUFFER_SIZE
from gslib.util import UsingCrcmodExtension
SLOW_CRCMOD_WARNING = """
WARNING: You have requested checksumming but your crcmod installation isn't
using the module's C extension, so checksumming will run very slowly. For help
installing the extension, please see:
$ gsutil help crcmod
"""
_SLOW_CRCMOD_DOWNLOAD_WARNING = """
WARNING: Downloading this composite object requires integrity checking with
CRC32c, but your crcmod installation isn't using the module's C extension,
so the hash computation will likely throttle download performance. For help
installing the extension, please see:
$ gsutil help crcmod
To disable slow integrity checking, see the "check_hashes" option in your
boto config file.
"""
_SLOW_CRC_EXCEPTION_TEXT = """
Downloading this composite object requires integrity checking with CRC32c,
but your crcmod installation isn't using the module's C extension, so the
hash computation will likely throttle download performance. For help
installing the extension, please see:
$ gsutil help crcmod
To download regardless of crcmod performance or to skip slow integrity
checks, see the "check_hashes" option in your boto config file.
NOTE: It is strongly recommended that you not disable integrity checks. Doing so
could allow data corruption to go undetected during uploading/downloading."""
_NO_HASH_CHECK_WARNING = """
WARNING: This download will not be validated since your crcmod installation
doesn't use the module's C extension, so the hash computation would likely
throttle download performance. For help in installing the extension, please
see:
$ gsutil help crcmod
To force integrity checking, see the "check_hashes" option in your boto config
file.
"""
# Configuration values for hashing.
CHECK_HASH_IF_FAST_ELSE_FAIL = 'if_fast_else_fail'
CHECK_HASH_IF_FAST_ELSE_SKIP = 'if_fast_else_skip'
CHECK_HASH_ALWAYS = 'always'
CHECK_HASH_NEVER = 'never'
# Table storing polynomial values of x^(2^k) mod CASTAGNOLI_POLY for all k < 31,
# where x^(2^k) and CASTAGNOLI_POLY are both considered polynomials. This is
# sufficient since x^(2^31) mod CASTAGNOLI_POLY = x.
X_POW_2K_TABLE = [2, 4, 16, 256, 65536, 517762881, 984302966,
408362264, 1503875210, 2862076957, 3884826397, 1324787473,
621200174, 1758783527, 1416537776, 1180494764, 648569364,
2521473789, 994858823, 1728245375, 3498467999, 4059169852,
3345064394, 2828422810, 2429203150, 3336788029, 860151998,
2102628683, 1033187991, 4243778976, 1123580069]
# Castagnoli polynomial and its degree.
CASTAGNOLI_POLY = 4812730177
DEGREE = 32
def ConcatCrc32c(crc_a, crc_b, num_bytes_in_b):
"""Computes CRC32C for concat(A, B) given crc(A), crc(B) and len(B).
An explanation of the algorithm can be found at
crcutil.googlecode.com/files/crc-doc.1.0.pdf.
Args:
crc_a: A 32-bit integer representing crc(A) with least-significant
coefficient first.
crc_b: Same as crc_a.
num_bytes_in_b: Length of B in bytes.
Returns:
CRC32C for concat(A, B)
"""
if not num_bytes_in_b:
return crc_a
return _ExtendByZeros(crc_a, 8 * num_bytes_in_b) ^ crc_b
def _CrcMultiply(p, q):
"""Multiplies two polynomials together modulo CASTAGNOLI_POLY.
Args:
p: The first polynomial.
q: The second polynomial.
Returns:
Result of the multiplication.
"""
result = 0
top_bit = 1 << DEGREE
for _ in range(DEGREE):
if p & 1:
result ^= q
q <<= 1
if q & top_bit:
q ^= CASTAGNOLI_POLY
p >>= 1
return result
def _ExtendByZeros(crc, num_bits):
"""Given crc representing polynomial P(x), compute P(x)*x^num_bits.
Args:
crc: crc respresenting polynomial P(x).
num_bits: number of bits in crc.
Returns:
P(x)*x^num_bits
"""
def _ReverseBits32(crc):
return int('{0:032b}'.format(crc, width=32)[::-1], 2)
crc = _ReverseBits32(crc)
i = 0
while num_bits != 0:
if num_bits & 1:
crc = _CrcMultiply(crc, X_POW_2K_TABLE[i % len(X_POW_2K_TABLE)])
i += 1
num_bits >>= 1
crc = _ReverseBits32(crc)
return crc
def _CalculateHashFromContents(fp, hash_alg):
"""Calculates a base64 digest of the contents of a seekable stream.
This function resets the file pointer to position 0.
Args:
fp: An already-open file object.
hash_alg: Instance of hashing class initialized to start state.
Returns:
Hash of the stream in hex string format.
"""
hash_dict = {'placeholder': hash_alg}
fp.seek(0)
CalculateHashesFromContents(fp, hash_dict)
fp.seek(0)
return hash_dict['placeholder'].hexdigest()
def CalculateHashesFromContents(fp, hash_dict, callback_processor=None):
"""Calculates hashes of the contents of a file.
Args:
fp: An already-open file object (stream will be consumed).
hash_dict: Dict of (string alg_name: initialized hashing class)
Hashing class will be populated with digests upon return.
callback_processor: Optional callback processing class that implements
Progress(integer amount of bytes processed).
"""
while True:
data = fp.read(DEFAULT_FILE_BUFFER_SIZE)
if not data:
break
for hash_alg in hash_dict.itervalues():
hash_alg.update(data)
if callback_processor:
callback_processor.Progress(len(data))
def CalculateB64EncodedCrc32cFromContents(fp):
"""Calculates a base64 CRC32c checksum of the contents of a seekable stream.
This function sets the stream position 0 before and after calculation.
Args:
fp: An already-open file object.
Returns:
CRC32c checksum of the file in base64 format.
"""
return _CalculateB64EncodedHashFromContents(
fp, crcmod.predefined.Crc('crc-32c'))
def CalculateB64EncodedMd5FromContents(fp):
"""Calculates a base64 MD5 digest of the contents of a seekable stream.
This function sets the stream position 0 before and after calculation.
Args:
fp: An already-open file object.
Returns:
MD5 digest of the file in base64 format.
"""
return _CalculateB64EncodedHashFromContents(fp, md5())
def CalculateMd5FromContents(fp):
"""Calculates a base64 MD5 digest of the contents of a seekable stream.
This function sets the stream position 0 before and after calculation.
Args:
fp: An already-open file object.
Returns:
MD5 digest of the file in hex format.
"""
return _CalculateHashFromContents(fp, md5())
def Base64EncodeHash(digest_value):
"""Returns the base64-encoded version of the input hex digest value."""
return base64.encodestring(binascii.unhexlify(digest_value)).rstrip('\n')
def Base64ToHexHash(base64_hash):
"""Returns the hex digest value of the input base64-encoded hash.
Args:
base64_hash: Base64-encoded hash, which may contain newlines and single or
double quotes.
Returns:
Hex digest of the input argument.
"""
return binascii.hexlify(base64.decodestring(base64_hash.strip('\n"\'')))
def _CalculateB64EncodedHashFromContents(fp, hash_alg):
"""Calculates a base64 digest of the contents of a seekable stream.
This function sets the stream position 0 before and after calculation.
Args:
fp: An already-open file object.
hash_alg: Instance of hashing class initialized to start state.
Returns:
Hash of the stream in base64 format.
"""
return Base64EncodeHash(_CalculateHashFromContents(fp, hash_alg))
def GetUploadHashAlgs():
"""Returns a dict of hash algorithms for validating an uploaded object.
This is for use only with single object uploads, not compose operations
such as those used by parallel composite uploads (though it can be used to
validate the individual components).
Returns:
dict of (algorithm_name: hash_algorithm)
"""
check_hashes_config = config.get(
'GSUtil', 'check_hashes', CHECK_HASH_IF_FAST_ELSE_FAIL)
if check_hashes_config == 'never':
return {}
return {'md5': md5}
def GetDownloadHashAlgs(logger, consider_md5=False, consider_crc32c=False):
"""Returns a dict of hash algorithms for validating an object.
Args:
logger: logging.Logger for outputting log messages.
consider_md5: If True, consider using a md5 hash.
consider_crc32c: If True, consider using a crc32c hash.
Returns:
Dict of (string, hash algorithm).
Raises:
CommandException if hash algorithms satisfying the boto config file
cannot be returned.
"""
check_hashes_config = config.get(
'GSUtil', 'check_hashes', CHECK_HASH_IF_FAST_ELSE_FAIL)
if check_hashes_config == CHECK_HASH_NEVER:
return {}
hash_algs = {}
if consider_md5:
hash_algs['md5'] = md5
elif consider_crc32c:
# If the cloud provider supplies a CRC, we'll compute a checksum to
# validate if we're using a native crcmod installation and MD5 isn't
# offered as an alternative.
if UsingCrcmodExtension(crcmod):
hash_algs['crc32c'] = lambda: crcmod.predefined.Crc('crc-32c')
elif not hash_algs:
if check_hashes_config == CHECK_HASH_IF_FAST_ELSE_FAIL:
raise CommandException(_SLOW_CRC_EXCEPTION_TEXT)
elif check_hashes_config == CHECK_HASH_IF_FAST_ELSE_SKIP:
logger.warn(_NO_HASH_CHECK_WARNING)
elif check_hashes_config == CHECK_HASH_ALWAYS:
logger.warn(_SLOW_CRCMOD_DOWNLOAD_WARNING)
hash_algs['crc32c'] = lambda: crcmod.predefined.Crc('crc-32c')
else:
raise CommandException(
'Your boto config \'check_hashes\' option is misconfigured.')
return hash_algs
class HashingFileUploadWrapper(object):
"""Wraps an input stream in a hash digester and exposes a stream interface.
This class provides integrity checking during file uploads via the
following properties:
Calls to read will appropriately update digesters with all bytes read.
Calls to seek (assuming it is supported by the wrapped stream) using
os.SEEK_SET will catch up / reset the digesters to the specified
position. If seek is called with a different os.SEEK mode, the caller
must return to the original position using os.SEEK_SET before further
reads.
Calls to seek are fast if the desired position is equal to the position at
the beginning of the last read call (we only need to re-hash bytes
from that point on).
"""
def __init__(self, stream, digesters, hash_algs, src_url, logger):
"""Initializes the wrapper.
Args:
stream: Input stream.
digesters: dict of {string: hash digester} containing digesters, where
string is the name of the hash algorithm.
hash_algs: dict of {string: hash algorithm} for resetting and
recalculating digesters. String is the name of the hash algorithm.
src_url: Source FileUrl that is being copied.
logger: For outputting log messages.
"""
if not digesters:
raise CommandException('HashingFileUploadWrapper used with no digesters.')
elif not hash_algs:
raise CommandException('HashingFileUploadWrapper used with no hash_algs.')
self._orig_fp = stream
self._digesters = digesters
self._src_url = src_url
self._logger = logger
self._seek_away = None
self._digesters_previous = {}
for alg in self._digesters:
self._digesters_previous[alg] = self._digesters[alg].copy()
self._digesters_previous_mark = 0
self._digesters_current_mark = 0
self._hash_algs = hash_algs
def read(self, size=-1): # pylint: disable=invalid-name
""""Reads from the wrapped file pointer and calculates hash digests.
Args:
size: The amount of bytes to read. If ommited or negative, the entire
contents of the file will be read, hashed, and returned.
Returns:
Bytes from the wrapped stream.
Raises:
CommandException if the position of the wrapped stream is unknown.
"""
if self._seek_away is not None:
raise CommandException('Read called on hashing file pointer in an '
'unknown position; cannot correctly compute '
'digest.')
data = self._orig_fp.read(size)
self._digesters_previous_mark = self._digesters_current_mark
for alg in self._digesters:
self._digesters_previous[alg] = self._digesters[alg].copy()
self._digesters[alg].update(data)
self._digesters_current_mark += len(data)
return data
def tell(self): # pylint: disable=invalid-name
"""Returns the current stream position."""
return self._orig_fp.tell()
def seekable(self): # pylint: disable=invalid-name
"""Returns true if the stream is seekable."""
return self._orig_fp.seekable()
def seek(self, offset, whence=os.SEEK_SET): # pylint: disable=invalid-name
"""Seeks in the wrapped file pointer and catches up hash digests.
Args:
offset: The offset to seek to.
whence: os.SEEK_CUR, or SEEK_END, SEEK_SET.
Returns:
Return value from the wrapped stream's seek call.
"""
if whence != os.SEEK_SET:
# We do not catch up hashes for non-absolute seeks, and rely on the
# caller to seek to an absolute position before reading.
self._seek_away = self._orig_fp.tell()
else:
# Hashes will be correct and it's safe to call read().
self._seek_away = None
if offset < self._digesters_previous_mark:
# This is earlier than our earliest saved digest, so we need to
# reset the digesters and scan from the beginning.
for alg in self._digesters:
self._digesters[alg] = self._hash_algs[alg]()
self._digesters_current_mark = 0
self._orig_fp.seek(0)
self._CatchUp(offset)
elif offset == self._digesters_previous_mark:
# Just load the saved digests.
self._digesters_current_mark = self._digesters_previous_mark
for alg in self._digesters:
self._digesters[alg] = self._digesters_previous[alg]
elif offset < self._digesters_current_mark:
# Reset the position to our previous digest and scan forward.
self._digesters_current_mark = self._digesters_previous_mark
for alg in self._digesters:
self._digesters[alg] = self._digesters_previous[alg]
self._orig_fp.seek(self._digesters_previous_mark)
self._CatchUp(offset - self._digesters_previous_mark)
else:
# Scan forward from our current digest and position.
self._orig_fp.seek(self._digesters_current_mark)
self._CatchUp(offset - self._digesters_current_mark)
return self._orig_fp.seek(offset, whence)
def _CatchUp(self, bytes_to_read):
"""Catches up hashes, but does not return data and uses little memory.
Before calling this function, digesters_current_mark should be updated
to the current location of the original stream and the self._digesters
should be current to that point (but no further).
Args:
bytes_to_read: Number of bytes to catch up from the original stream.
"""
if self._orig_fp.tell() != self._digesters_current_mark:
raise CommandException(
'Invalid mark when catching up hashes. Stream position %s, hash '
'position %s' % (self._orig_fp.tell(), self._digesters_current_mark))
for alg in self._digesters:
if bytes_to_read >= MIN_SIZE_COMPUTE_LOGGING:
self._logger.info('Catching up %s for %s...', alg,
self._src_url.url_string)
self._digesters_previous[alg] = self._digesters[alg].copy()
self._digesters_previous_mark = self._digesters_current_mark
bytes_remaining = bytes_to_read
bytes_this_round = min(bytes_remaining, TRANSFER_BUFFER_SIZE)
while bytes_this_round:
data = self._orig_fp.read(bytes_this_round)
bytes_remaining -= bytes_this_round
for alg in self._digesters:
self._digesters[alg].update(data)
bytes_this_round = min(bytes_remaining, TRANSFER_BUFFER_SIZE)
self._digesters_current_mark += bytes_to_read
| [
"[email protected]"
]
| |
d264e28ed2341e4155dedc2bdc2156861c78747e | e7ce273f404f82fd8672c97e50b386509c8f9870 | /Advanced/File_Handling/Directory_Traversal.py | 5aa0fbcbb285a6c22e60ca7405b8dd188b7a9b8b | []
| no_license | rzlatkov/Softuni | 3edca300f8ecdcfd86e332557712e17552bc91c3 | a494e35bff965b2b9dccc90e1381d5a1a23737a1 | refs/heads/main | 2023-07-02T12:49:59.737043 | 2021-08-13T20:47:07 | 2021-08-13T20:47:07 | 319,088,872 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,352 | py | # Directory Traversal
import os
USER = os.getlogin()
USED_PATH = '/home/rzlatkov/Softuni/Advanced/File_Handling/' # first level traverse only
ROOT_PATH = './' # traverse everything
REPORT_PATH_WINDOWS = f'C:\\Users\\{USER}\\Desktop\\report.txt' # for Win users
REPORT_PATH_LINUX = f'/home/{USER}/Desktop/report.txt' # I am coding on a Linux (Manjaro)
def traverse(path):
dictionary_of_files = {}
path = os.walk(path)
for _,_, files in path:
for f in files:
extension = f[f.index('.'):]
if not extension in dictionary_of_files:
dictionary_of_files[extension] = []
dictionary_of_files[extension].append(f)
return dictionary_of_files
def sort_extensions(dictionary_of_files):
return dict(sorted(dictionary_of_files.items(), key=lambda x: x[0]))
def sort_filenames(dictionary_list_values):
return sorted(dictionary_list_values, key=lambda x: x)
def write_to_report(result, report_path):
with open(report_path, 'w') as writer:
for ext, fnames in result.items():
writer.write(ext + '\n')
sorted_fnames = sort_filenames(fnames)
for f in sorted_fnames:
writer.write(f"- - - {f}\n")
files = traverse(USED_PATH)
sorted_ext = sort_extensions(files)
write_to_report(sorted_ext, REPORT_PATH_LINUX)
| [
"[email protected]"
]
| |
704361a75a15b4ff7147ee4334cde26f9da2f4dd | 180beda50f31031bd3ba1668067bdb73fc1a7686 | /website/members/management/commands/generate_member_invoices.py | e73095aaebe1d829fb36e519f1585796e73b12cd | [
"MIT"
]
| permissive | eamanu/asoc_members | 053a05563a20ff4cafd09020367f3d60f149392e | bf2e99e9c63c60a59bdfd10ca1812d78851cbde6 | refs/heads/master | 2020-11-26T14:20:46.857545 | 2020-01-06T12:57:30 | 2020-01-06T12:57:30 | 229,101,268 | 0 | 0 | MIT | 2019-12-19T17:01:15 | 2019-12-19T17:01:14 | null | UTF-8 | Python | false | false | 7,788 | py | import datetime
import os
from django.conf import settings
from django.core.mail import EmailMessage
from django.core.management.base import BaseCommand
from django.db.models import Max
from members import logic
from members.models import Quota, Person, Payment, Member, PaymentStrategy
from . import _afip, _gdrive
INVOICES_FROM = '2018-08-01 00:00+03'
GMTminus3 = datetime.timezone(datetime.timedelta(hours=-3))
# mail stuff
MAIL_SUBJECT = "Factura por pago de cuota(s) a la Asociación Civil Python Argentina"
MAIL_TEXT = """\
Hola!
Adjunta va la factura por el pago hecho en fecha {payment_date:%Y-%m-%d}.
¡Gracias! Saludos,
--
. Lalita
.
Asociación Civil Python Argentina
http://ac.python.org.ar/
(claro, este mail es automático, soy une bot, pero contestá el mail sin problemas que
le va a llegar al humane correspondiente)
"""
PDF_MIMETYPE = 'application/pdf'
def _send_mail(payment_date, recipient, attach_path):
text = MAIL_TEXT.format(payment_date=payment_date)
mail = EmailMessage(MAIL_SUBJECT, text, settings.EMAIL_FROM, [recipient])
filename = os.path.basename(attach_path)
with open(attach_path, "rb") as fh:
attach_content = fh.read()
mail.attach(filename, attach_content, PDF_MIMETYPE)
mail.send()
class Command(BaseCommand):
help = "Generate the missing invoices"
def add_arguments(self, parser):
parser.add_argument('--limit', type=int, nargs='?', default=1)
parser.add_argument(
'--invoice-date', type=str, nargs='?', help="Invoice date (%Y-%m-%d), forces limit=1")
def handle(self, *args, **options):
limit = options['limit']
invoice_date = options['invoice_date']
if invoice_date is None:
invoice_date = datetime.date.today()
else:
invoice_date = datetime.datetime.strptime(invoice_date, "%Y-%m-%d").date()
limit = 1
print("Forcing invoice date to {} (also limit=1)".format(invoice_date))
records = []
# check AFIP
_afip.verify_service()
# get the greatest invoice number used (once, will keep updated later)
_max_invoice_number_query = Payment.objects.aggregate(Max('invoice_number'))
max_invoice_number = _max_invoice_number_query['invoice_number__max']
print("Found max invoice number {}".format(max_invoice_number))
# get payments after we started automatically that still have no invoice generated
payments_per_invoice = {}
persons_per_invoice = {}
payments = (
Payment.objects.filter(timestamp__gte=INVOICES_FROM, invoice_ok=False)
.exclude(strategy__platform=PaymentStrategy.CREDIT)
.order_by('timestamp', 'pk').all()
)
print("Found {} payments to process".format(len(payments)))
if len(payments) > limit:
payments = payments[:limit]
print(" truncating to {}".format(limit))
for payment in payments:
print("Generating invoice for payment", payment)
record = {
'invoice_date': invoice_date,
}
records.append(record)
# get the related member (if None, or multiple, still not supported!)
_members = Member.objects.filter(patron=payment.strategy.patron).all()
assert len(_members) == 1, "multiple or no members for the patron is not supported"
member = _members[0]
# only process payments for normal members (benefactor members get invoices done
# by hand)
person = member.entity
if isinstance(person, Person):
print(" person found", person)
else:
print(" IGNORING payment, member {} is not a person: {}".format(member, person))
continue
# if payment still doesn't have a number, add one to latest and save;
# in any case, use it
if not payment.invoice_number:
max_invoice_number += 1
payment.invoice_number = max_invoice_number
payment.invoice_spoint = settings.AFIP['selling_point']
payment.save()
print(" using new invoice number", payment.invoice_number)
else:
print(" using already stored invoice number", payment.invoice_number)
assert payment.invoice_spoint == settings.AFIP['selling_point']
payments_per_invoice[payment.invoice_number] = payment
record['invoice'] = payment.invoice_number
# we bill one item, for the whole amount: "3 quotas for $300", instead of billing
# 3 x "1 quota for $100", which would be problematic if the paid amount is
# not exactly 300
record['amount'] = payment.amount
record['quantity'] = 1
# get all billing data from the person
persons_per_invoice[payment.invoice_number] = person
record['dni'] = person.document_number
record['fullname'] = person.full_name
record['address'] = person.street_address
record['city'] = person.city
record['zip_code'] = person.zip_code
record['province'] = person.province
tstamp_argentina = payment.timestamp.astimezone(GMTminus3)
record['payment_comment'] = "Pago via {} ({:%Y-%m-%d %H:%M})".format(
payment.strategy.platform_name, tstamp_argentina)
# get quotas for the payment; we don't show the period in the description
# as there's a specific field for that
quotas = list(Quota.objects.filter(payment=payment).order_by('year', 'month').all())
assert quotas
if len(quotas) == 1:
description = "1 cuota social"
else:
description = "{} cuotas sociales".format(len(quotas))
record['description'] = description
from_quota = quotas[0]
from_day = datetime.date(from_quota.year, from_quota.month, 1)
to_quota = quotas[-1]
ny, nm = logic.increment_year_month(to_quota.year, to_quota.month)
to_day = datetime.date(ny, nm, 1) - datetime.timedelta(days=1)
record['service_date_from'] = from_day.strftime("%Y%m%d")
record['service_date_to'] = to_day.strftime("%Y%m%d")
print(" found {} quota(s) ({} - {})".format(
len(quotas), record['service_date_from'], record['service_date_to']))
try:
results = _afip.generate_invoices(records)
except Exception:
print("PROBLEMS generating invoices with records", records)
raise
# save the results for the generated ok invoices and send the proper mails
for invoice_number, result in sorted(results.items()):
print("Post-processing invoice {} at {}".format(
invoice_number, result.get('pdf_path')))
if not result['invoice_ok']:
print(" WARNING: invoice NOT authorized ok")
continue
payment = payments_per_invoice[invoice_number]
payment.invoice_ok = True
payment.save()
# upload the invoice to google drive
_gdrive.upload_invoice(result['pdf_path'], invoice_date)
print(" uploaded to gdrive OK")
# send the invoice by mail
person = persons_per_invoice[invoice_number]
_send_mail(payment.timestamp, person.email, result['pdf_path'])
print(" sent by mail OK")
# invoice uploaded to gdrive and sent ok, don't need it here anymore
os.remove(result['pdf_path'])
| [
"[email protected]"
]
| |
3cf43fa8092e181dca265178db23f042cb43c200 | 8e304f1291480db18b9727efa61647b369531172 | /csvkit/convert/js.py | 8757c65a2bfbc280bab8fc78a153f0f7dcf57e4c | [
"MIT"
]
| permissive | zviri/csvkit | 4439fff0e376d089f89420fabca245c25eb12dc5 | 39f5d3b6c7d6eaaf145e7e01fa247292763da16d | refs/heads/master | 2021-01-12T20:54:42.673449 | 2014-09-04T14:11:06 | 2014-09-04T14:11:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,879 | py | #!/usr/bin/env python
import json
import six
from csvkit import CSVKitWriter
def parse_object(obj, path=''):
"""
Recursively parse JSON objects and a dictionary of paths/keys and values.
Inspired by JSONPipe (https://github.com/dvxhouse/jsonpipe).
"""
if isinstance(obj, dict):
iterator = obj.iteritems()
elif isinstance(obj, (list, tuple)):
iterator = enumerate(obj)
else:
return { path.strip('/'): obj }
d = {}
for key, value in iterator:
key = six.text_type(key)
d.update(parse_object(value, path + key + '/'))
return d
def json2csv(f, key=None, **kwargs):
"""
Convert a JSON document into CSV format.
The top-level element of the input must be a list or a dictionary. If it is a dictionary, a key must be provided which is an item of the dictionary which contains a list.
"""
document = f.read()
js = json.loads(document)
if isinstance(js, dict):
if not key:
raise TypeError('When converting a JSON document with a top-level dictionary element, a key must be specified.')
js = js[key]
if not isinstance(js, list):
raise TypeError('Only JSON documents with a top-level list element are able to be converted (or a top-level dictionary if specifying a key).')
field_set = set()
flat = []
for obj in js:
flat.append(parse_object(obj))
for obj in flat:
field_set.update(obj.keys())
fields = sorted(list(field_set))
o = six.StringIO()
writer = CSVKitWriter(o)
writer.writerow(fields)
for i in flat:
row = []
for field in fields:
if field in i:
row.append(i[field])
else:
row.append(None)
writer.writerow(row)
output = o.getvalue()
o.close()
return output
| [
"[email protected]"
]
| |
6389cd069b984d4e989a8c114236bd598cef97a2 | a89dfda3732eb73863b3e2fb1ebb46f1cb40973a | /txweb/tests/test_util_basic_sanitize_render_output.py | 3ea1634b5077da8b2f699e5319c384c2d49cc0f1 | [
"MIT"
]
| permissive | devdave/txWeb | 543ccb7be0671a5e83959bb7cfc8e7804f04a74a | e447fbefd16134cb2f83323c04c20c41638d7da3 | refs/heads/master | 2022-12-15T18:11:50.880675 | 2021-03-24T18:48:16 | 2021-03-24T18:48:16 | 2,116,693 | 1 | 0 | MIT | 2022-12-08T04:28:41 | 2011-07-28T03:55:43 | Python | UTF-8 | Python | false | false | 568 | py |
import pytest
from txweb.util.basic import sanitize_render_output
from twisted.web.server import NOT_DONE_YET
from twisted.internet.defer import Deferred
def test_full_suite_coverage():
assert sanitize_render_output("Foo") == b"Foo"
assert sanitize_render_output(b"Foo") == b"Foo"
with pytest.raises(RuntimeError):
assert sanitize_render_output(("Foo",))
assert sanitize_render_output(NOT_DONE_YET) == NOT_DONE_YET
d = Deferred()
assert sanitize_render_output(d) == NOT_DONE_YET
assert sanitize_render_output(123) == b"123" | [
"[email protected]"
]
| |
c7a84219541a207b77a6abe222131259e8320dcf | 18a79067223932c2f7aa6ff6b81d0b3f36169db2 | /atcoder/abc178/A.py | fbe81ed981719c0a616b1b4817d553d4699e8bb1 | []
| no_license | aadiupadhyay/CodeForces | 894b0e5faef73bfd55a28c2058fb0ca6f43c69f9 | 76dac4aa29a2ea50a89b3492387febf6515cf43e | refs/heads/master | 2023-04-12T17:58:52.733861 | 2021-05-07T20:08:00 | 2021-05-11T20:07:11 | 330,149,645 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 337 | py | from sys import stdin,stdout
st=lambda:list(stdin.readline().strip())
li=lambda:list(map(int,stdin.readline().split()))
mp=lambda:map(int,stdin.readline().split())
inp=lambda:int(stdin.readline())
pr=lambda n: stdout.write(str(n)+"\n")
mod=1000000007
def solve():
n=inp()
print((n+1)%2)
for _ in range(1):
solve()
| [
"[email protected]"
]
| |
3f912421cf12848e776f7f30387961e82a570848 | ef32b87973a8dc08ba46bf03c5601548675de649 | /pytglib/api/functions/get_web_app_url.py | fd431d0848611e166908d7d79bd1b425fdebbbee | [
"MIT"
]
| permissive | iTeam-co/pytglib | 1a7580f0e0c9e317fbb0de1d3259c8c4cb90e721 | d3b52d7c74ee5d82f4c3e15e4aa8c9caa007b4b5 | refs/heads/master | 2022-07-26T09:17:08.622398 | 2022-07-14T11:24:22 | 2022-07-14T11:24:22 | 178,060,880 | 10 | 9 | null | null | null | null | UTF-8 | Python | false | false | 1,089 | py |
from ..utils import Object
class GetWebAppUrl(Object):
"""
Returns an HTTPS URL of a Web App to open after keyboardButtonTypeWebApp button is pressed
Attributes:
ID (:obj:`str`): ``GetWebAppUrl``
Args:
bot_user_id (:obj:`int`):
Identifier of the target bot
url (:obj:`str`):
The URL from the keyboardButtonTypeWebApp button
theme (:class:`telegram.api.types.themeParameters`):
Preferred Web App theme; pass null to use the default theme
Returns:
HttpUrl
Raises:
:class:`telegram.Error`
"""
ID = "getWebAppUrl"
def __init__(self, bot_user_id, url, theme, extra=None, **kwargs):
self.extra = extra
self.bot_user_id = bot_user_id # int
self.url = url # str
self.theme = theme # ThemeParameters
@staticmethod
def read(q: dict, *args) -> "GetWebAppUrl":
bot_user_id = q.get('bot_user_id')
url = q.get('url')
theme = Object.read(q.get('theme'))
return GetWebAppUrl(bot_user_id, url, theme)
| [
"[email protected]"
]
| |
ec8e7dafe20595ebc94fed5089fa5fc70c148552 | 6147d3d059a048be57aaabe3519551ed4bc305ec | /config/management/commands/fs2import.py | ed4cb19b50f25028e6c1954d36d1fe16bcc534f1 | [
"MIT"
]
| permissive | a-mere-peasant/MangAdventure | a8f7fdfddf5ae65e645b0e0e0d197f2b0033bc8d | afbcdb5ab68bfc801550c8383568f7265e70b5ab | refs/heads/master | 2020-08-06T13:38:59.062119 | 2019-10-05T12:22:53 | 2019-10-05T12:22:53 | 212,808,131 | 0 | 0 | MIT | 2019-10-04T13:07:47 | 2019-10-04T12:18:27 | null | UTF-8 | Python | false | false | 4,613 | py | from os.path import abspath, join
from xml.etree import cElementTree as et
from django.core.files import File
from django.core.management import BaseCommand
from groups.models import Group
from reader.models import Chapter, Page, Series
def _get_element(tables, name):
return list(filter(
lambda t: t.attrib['name'].endswith(name), tables
))
def _get_column(table, name):
text = table.find('column[@name="%s"]' % name).text
return text if text is not None else ''
def _sort_children(tables, name):
return sorted(tables, key=lambda p: _get_column(p, name))
class Command(BaseCommand):
help = 'Imports data from FoolSlide2.'
def add_arguments(self, parser):
parser.add_argument(
'root', type=str,
help='The path to the root directory of the FS2 installation.'
)
parser.add_argument(
'data', type=str,
help="The path to FS2's exported data (in XML format)."
)
def handle(self, *args, **options):
root = abspath(options['root'])
data = abspath(options['data'])
tables = et.parse(data).findall('database/table')
content = join(root, 'content', 'comics')
directories = {'series': [], 'chapters': []}
elements = {
'series': _get_element(tables, 'comics'),
'chapters': _get_element(tables, 'chapters'),
'pages': _get_element(tables, 'pages'),
'groups': _get_element(tables, 'teams')
}
all_groups = []
for g in elements['groups']:
group = Group(
id=_get_column(g, 'id'),
name=_get_column(g, 'name'),
website=_get_column(g, 'url'),
twitter=_get_column(g, 'twitter'),
irc=_get_column(g, 'irc')
)
all_groups.append(group)
Group.objects.bulk_create(all_groups)
all_series = []
for s in elements['series']:
slug = _get_column(s, 'stub')
series = Series(
id=_get_column(s, 'id'), slug=slug,
title=_get_column(s, 'name'),
description=_get_column(s, 'description'),
)
thumb = _get_column(s, 'thumbnail')
series_dir = join(content, '%s_%s' % (
slug, _get_column(s, 'uniqid')
))
cover = join(series_dir, 'thumb_%s' % thumb)
with open(cover, 'rb') as f:
series.cover.save(thumb, File(f), save=False)
all_series.append(series)
directories['series'].append(
(_get_column(s, 'id'), series_dir)
)
Series.objects.bulk_create(all_series)
all_chapters = []
chapter_groups = []
groups_through = Chapter.groups.through
for c in elements['chapters']:
cid = _get_column(c, 'id')
sid = _get_column(c, 'comic_id')
number = float('%s.%s' % (
_get_column(c, 'chapter') or '0',
_get_column(c, 'subchapter') or '0'
))
volume = int(_get_column(c, 'volume') or '0')
chapter = Chapter(
id=cid, series_id=sid,
title=_get_column(c, 'name'),
volume=volume, number=number
)
gid = _get_column(c, 'team_id')
if gid:
chapter_groups.append(
groups_through(chapter_id=cid, group_id=gid)
)
_dir = next(d[1] for d in directories['series'] if d[0] == sid)
directories['chapters'].append((
cid, join(_dir, '%s_%s' % (
_get_column(c, 'stub'), _get_column(c, 'uniqid')
))
))
all_chapters.append(chapter)
Chapter.objects.bulk_create(all_chapters)
groups_through.objects.bulk_create(chapter_groups)
all_pages = []
page_numbers = {}
for p in _sort_children(elements['pages'], 'filename'):
pid = _get_column(p, 'id')
cid = _get_column(p, 'chapter_id')
page_numbers[cid] = page_numbers.get(cid, 0) + 1
page = Page(id=pid, chapter_id=cid, number=page_numbers[cid])
_dir = next(d[1] for d in directories['chapters'] if d[0] == cid)
fname = _get_column(p, 'filename')
with open(join(_dir, fname), 'rb') as f:
page.image.save(fname, File(f), save=False)
all_pages.append(page)
Page.objects.bulk_create(all_pages)
| [
"[email protected]"
]
| |
7bdb3032d0b87e6e58936035f17049cb25437466 | 6f05f7d5a67b6bb87956a22b988067ec772ba966 | /data/train/python/4fde7f3e48576985304dbb54c7ab85f5d1c4d4e9observer.py | 4fde7f3e48576985304dbb54c7ab85f5d1c4d4e9 | [
"MIT"
]
| permissive | harshp8l/deep-learning-lang-detection | 93b6d24a38081597c610ecf9b1f3b92c7d669be5 | 2a54293181c1c2b1a2b840ddee4d4d80177efb33 | refs/heads/master | 2020-04-07T18:07:00.697994 | 2018-11-29T23:21:23 | 2018-11-29T23:21:23 | 158,597,498 | 0 | 0 | MIT | 2018-11-21T19:36:42 | 2018-11-21T19:36:41 | null | UTF-8 | Python | false | false | 988 | py | from flask import Flask
from flask.ext.restful import reqparse, abort, Api, Resource
from utils import get_controllers
app = Flask(__name__)
api = Api(app)
def get_controller_by_id(controller_id):
try:
return controllers[controller_id]
except IndexError:
abort(404, message="Controller {} doesn't exist".format(controller_id))
class ControllerListResource(Resource):
def get(self):
return [controller.state_as_dict() for controller in controllers]
class ControllerResource(Resource):
def get(self, controller_id):
controller = get_controller_by_id(controller_id)
return controller.state_as_dict()
api.add_resource(ControllerListResource, '/controllers')
api.add_resource(ControllerResource, '/controllers/<int:controller_id>')
if __name__ == '__main__':
controllers = get_controllers(read_only=True)
app.run(debug=True, use_reloader=False)
for controller in controllers:
controller.terminate()
| [
"[email protected]"
]
| |
a93ea63288eff967bb4d9195f3c82744bd638f54 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/verbs/_fasting.py | 2a2b2ee81235039958aac103ee2d9541cc58f881 | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 228 | py |
from xai.brain.wordbase.verbs._fast import _FAST
#calss header
class _FASTING(_FAST, ):
def __init__(self,):
_FAST.__init__(self)
self.name = "FASTING"
self.specie = 'verbs'
self.basic = "fast"
self.jsondata = {}
| [
"[email protected]"
]
| |
90a9f66b973d50155b27452cee64bbbfb1800a9b | 4dd1d8fa59e20061e2c12e540fc52b1b305e575b | /source/sims-2/boltz-bgk/s5/plot-sol.py | ff8f54a4239c7054fea5d9ce39a198fdbe5120d1 | [
"MIT"
]
| permissive | ammarhakim/ammar-simjournal | f63521906a97d55ab290a5960d94758139944c89 | 5019f4723e20db80a20db6f2bd454c2fd3241412 | refs/heads/master | 2023-06-08T08:18:11.722779 | 2023-06-02T15:06:43 | 2023-06-02T15:06:43 | 204,050,516 | 3 | 3 | null | 2022-02-01T16:53:13 | 2019-08-23T18:28:44 | Lua | UTF-8 | Python | false | false | 3,194 | py | import gkedata
import gkedgbasis
from pylab import *
import pylab
import tables
import math
import numpy
import pylab
import numpy
from matplotlib import rcParams
import matplotlib.pyplot as plt
# customization for figure
rcParams['lines.linewidth'] = 2
rcParams['font.size'] = 18
rcParams['xtick.major.size'] = 8 # default is 4
rcParams['xtick.major.width'] = 3 # default is 0.5
rcParams['ytick.major.size'] = 8 # default is 4
rcParams['ytick.major.width'] = 3 # default is 0.5
rcParams['figure.facecolor'] = 'white'
#rcParams['figure.subplot.bottom'] = 0.125
#rcParams['figure.subplot.right'] = 0.85 # keep labels/ticks of colobar in figure
rcParams['image.interpolation'] = 'none'
rcParams['image.origin'] = 'lower'
rcParams['contour.negative_linestyle'] = 'solid'
rcParams['savefig.bbox'] = 'tight'
# Math/LaTex fonts:
# http://matplotlib.org/users/mathtext.html
# http://matplotlib.org/users/usetex.html
# Example: xlabel(r'$t \cdot l / V_{A,bc}$')
rcParams['mathtext.default'] = 'regular' # match the font used for regular text
def IE(n, nu, E):
return 0.5*(E-nu**2/n)
# density plot
d = gkedata.GkeData("../s5/s5-bgk-boltz_numDensity_5.h5")
dg1 = gkedgbasis.GkeDgLobatto1DPolyOrder2Basis(d)
Xc, n2 = dg1.project(0)
d = gkedata.GkeData("../s6/s6-bgk-boltz_numDensity_5.h5")
dg1 = gkedgbasis.GkeDgLobatto1DPolyOrder2Basis(d)
Xc, n3 = dg1.project(0)
nEul = loadtxt("../m2/m2-euler-shock-exact-density.txt")
figure(1)
plot(Xc, n2, '-r', label='Kn=1/100')
plot(Xc, n3, '-b', label='Kn=1/1000')
plot(nEul[:,0], nEul[:,1], 'k--')
xlabel('X')
ylabel('Density')
legend(loc='best')
savefig('jets-density-cmp.png', dpi=200)
# momentum plot
d = gkedata.GkeData("../s5/s5-bgk-boltz_momentum_5.h5")
dg1 = gkedgbasis.GkeDgLobatto1DPolyOrder2Basis(d)
Xc, nu2 = dg1.project(0)
d = gkedata.GkeData("../s6/s6-bgk-boltz_momentum_5.h5")
dg1 = gkedgbasis.GkeDgLobatto1DPolyOrder2Basis(d)
Xc, nu3 = dg1.project(0)
uEul = loadtxt("../m2/m2-euler-shock-exact-velocity.txt")
figure(2)
plot(Xc, nu2/n2, '-r', label='Kn=1/100')
plot(Xc, nu3/n3, '-b', label='Kn=1/1000')
plot(uEul[:,0], uEul[:,1], 'k--')
xlabel('X')
ylabel('Velocity')
legend(loc='best')
savefig('jets-velocity-cmp.png', dpi=200)
# internal energy plot
d = gkedata.GkeData("../s5/s5-bgk-boltz_ptclEnergy_5.h5")
dg1 = gkedgbasis.GkeDgLobatto1DPolyOrder2Basis(d)
Xc, E2 = dg1.project(0)
d = gkedata.GkeData("../s6/s6-bgk-boltz_ptclEnergy_5.h5")
dg1 = gkedgbasis.GkeDgLobatto1DPolyOrder2Basis(d)
Xc, E3 = dg1.project(0)
pEul = loadtxt("../m2/m2-euler-shock-exact-pressure.txt")
figure(3)
plot(Xc, IE(n2, nu2, E2), '-r', label='Kn=1/100')
plot(Xc, IE(n3, nu3, E3), '-b', label='Kn=1/1000')
plot(pEul[:,0], pEul[:,1]/(3-1), 'k--')
xlabel('X')
ylabel('Particle Energy')
legend(loc='best')
savefig('jets-ptclInternalEnergy-cmp.png', dpi=200)
figure(4)
plot(Xc, 0.5*E2, '-r', label='Kn=1/100')
plot(Xc, 0.5*E3, '-b', label='Kn=1/1000')
plot(pEul[:,0], 0.5*nEul[:,1]*uEul[:,1]**2+pEul[:,1]/(3-1), 'k--')
xlabel('X')
ylabel('Particle Energy')
legend(loc='best')
savefig('jets-ptclEnergy-cmp.png', dpi=200)
show()
| [
"[email protected]"
]
| |
c52bcba0f9d5a677631d2c20a62d28a6c84fd7d1 | 3afb516b7a759478a5b7181c94941934c48ef63e | /baselines/cifar/sngp.py | ab31eed8491da1bf18bec06e6979d462495fd4e7 | [
"Apache-2.0"
]
| permissive | barseghyanartur/uncertainty-baselines | e85848dd04db998b114254186eb46917395722fc | 982323e1e82699ff42292f53cd3bbf3cd180912c | refs/heads/master | 2023-02-09T00:12:07.632028 | 2021-01-04T09:48:46 | 2021-01-04T09:49:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,412 | py | # coding=utf-8
# Copyright 2020 The Uncertainty Baselines Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Wide ResNet 28-10 with SNGP on CIFAR-10.
Spectral-normalized neural GP (SNGP) [1] is a simple method to improve
a deterministic neural network's uncertainty by applying spectral
normalization to the hidden layers, and then replace the dense output layer
with a Gaussian process layer.
## Reproducibility Instruction for CIFAR-100:
When running this script on CIFAR-100, set base_learning_rate=0.08 and
gp_mean_field_factor=12.5 to reproduce the benchmark result.
## Combining with MC Dropout:
As a single-model method, SNGP can be combined with other classic
uncertainty techniques (e.g., Monte Carlo dropout, deep ensemble) to further
improve performance.
This script supports adding Monte Carlo dropout to
SNGP by setting `use_mc_dropout=True`, setting `num_dropout_samples=10`
(or any integer larger than 1). Additionally we recommend adjust
`gp_mean_field_factor` slightly, since averaging already calibrated
individual models (in this case single SNGPs) can sometimes lead to
under-confidence [3].
## References:
[1]: Jeremiah Liu et al. Simple and Principled Uncertainty Estimation with
Deterministic Deep Learning via Distance Awareness.
_arXiv preprint arXiv:2006.10108_, 2020.
https://arxiv.org/abs/2006.10108
[2]: Zhiyun Lu, Eugene Ie, Fei Sha. Uncertainty Estimation with Infinitesimal
Jackknife. _arXiv preprint arXiv:2006.07584_, 2020.
https://arxiv.org/abs/2006.07584
[3]: Rahul Rahaman, Alexandre H. Thiery. Uncertainty Quantification and Deep
Ensembles. _arXiv preprint arXiv:2007.08792_, 2020.
https://arxiv.org/abs/2007.08792
[4]: Hendrycks, Dan et al. AugMix: A Simple Data Processing Method to Improve
Robustness and Uncertainty. In _International Conference on Learning
Representations_, 2020.
https://arxiv.org/abs/1912.02781
[5]: Zhang, Hongyi et al. mixup: Beyond Empirical Risk Minimization. In
_International Conference on Learning Representations_, 2018.
https://arxiv.org/abs/1710.09412
"""
import functools
import os
import time
from absl import app
from absl import flags
from absl import logging
import edward2 as ed
import tensorflow as tf
import tensorflow_datasets as tfds
import uncertainty_baselines as ub
import data_utils # local file import
import utils # local file import
import uncertainty_metrics as um
flags.DEFINE_integer('seed', 42, 'Random seed.')
flags.DEFINE_integer('per_core_batch_size', 64, 'Batch size per TPU core/GPU.')
flags.DEFINE_float('base_learning_rate', 0.05,
'Base learning rate when total batch size is 128. It is '
'scaled by the ratio of the total batch size to 128.')
flags.DEFINE_integer('lr_warmup_epochs', 1,
'Number of epochs for a linear warmup to the initial '
'learning rate. Use 0 to do no warmup.')
flags.DEFINE_float('lr_decay_ratio', 0.2, 'Amount to decay learning rate.')
flags.DEFINE_list('lr_decay_epochs', ['60', '120', '160'],
'Epochs to decay learning rate by.')
flags.DEFINE_float('l2', 3e-4, 'L2 regularization coefficient.')
flags.DEFINE_float('train_proportion', 1.,
'Only a fraction (between 0 and 1) of the train set is used '
'for training. The remainder can be used for validation.')
flags.DEFINE_enum('dataset', 'cifar10',
enum_values=['cifar10', 'cifar100'],
help='Dataset.')
flags.DEFINE_string('cifar100_c_path', None,
'Path to the TFRecords files for CIFAR-100-C. Only valid '
'(and required) if dataset is cifar100 and corruptions.')
flags.DEFINE_integer('corruptions_interval', -1,
'Number of epochs between evaluating on the corrupted '
'test data. Use -1 to never evaluate.')
flags.DEFINE_integer(
'checkpoint_interval', -1,
'Number of epochs between saving checkpoints. Use -1 to '
'never save checkpoints.')
flags.DEFINE_integer('num_bins', 15, 'Number of bins for ECE.')
flags.DEFINE_string('output_dir', '/tmp/cifar', 'Output directory.')
flags.DEFINE_integer('train_epochs', 250, 'Number of training epochs.')
# Data Augmentation flags.
flags.DEFINE_bool('augmix', False,
'Whether to perform AugMix [4] on the input data.')
flags.DEFINE_integer('aug_count', 1,
'Number of augmentation operations in AugMix to perform '
'on the input image. In the simgle model context, it'
'should be 1. In the ensembles context, it should be'
'ensemble_size if we perform random_augment only; It'
'should be (ensemble_size - 1) if we perform augmix.')
flags.DEFINE_float('augmix_prob_coeff', 0.5, 'Augmix probability coefficient.')
flags.DEFINE_integer('augmix_depth', -1,
'Augmix depth, -1 meaning sampled depth. This corresponds'
'to line 7 in the Algorithm box in [4].')
flags.DEFINE_integer('augmix_width', 3,
'Augmix width. This corresponds to the k in line 5 in the'
'Algorithm box in [4].')
flags.DEFINE_float('mixup_alpha', 0., 'Mixup hyperparameter, 0. to diable.')
# Dropout flags
flags.DEFINE_bool('use_mc_dropout', False,
'Whether to use Monte Carlo dropout for the hidden layers.')
flags.DEFINE_bool('use_filterwise_dropout', True,
'Whether to use filterwise dropout for the hidden layers.')
flags.DEFINE_float('dropout_rate', 0.1, 'Dropout rate.')
flags.DEFINE_integer('num_dropout_samples', 1,
'Number of dropout samples to use for prediction.')
flags.DEFINE_integer('num_dropout_samples_training', 1,
'Number of dropout samples for training.')
# SNGP flags.
flags.DEFINE_bool('use_spec_norm', True,
'Whether to apply spectral normalization.')
flags.DEFINE_integer(
'spec_norm_iteration', 1,
'Number of power iterations to perform for estimating '
'the spectral norm of weight matrices.')
flags.DEFINE_float('spec_norm_bound', 6.,
'Upper bound to spectral norm of weight matrices.')
# Gaussian process flags.
flags.DEFINE_bool('use_gp_layer', True,
'Whether to use Gaussian process as the output layer.')
flags.DEFINE_float('gp_bias', 0., 'The bias term for GP layer.')
flags.DEFINE_float(
'gp_scale', 2.,
'The length-scale parameter for the RBF kernel of the GP layer.')
flags.DEFINE_integer(
'gp_input_dim', 128,
'The dimension to reduce the neural network input for the GP layer '
'(via random Gaussian projection which preserves distance by the '
' Johnson-Lindenstrauss lemma). If -1, no dimension reduction.')
flags.DEFINE_integer(
'gp_hidden_dim', 1024,
'The hidden dimension of the GP layer, which corresponds to the number of '
'random features used for the approximation.')
flags.DEFINE_bool(
'gp_input_normalization', True,
'Whether to normalize the input using LayerNorm for GP layer.'
'This is similar to automatic relevance determination (ARD) in the classic '
'GP learning.')
flags.DEFINE_string(
'gp_random_feature_type', 'orf',
'The type of random feature to use. One of "rff" (random fourier feature), '
'"orf" (orthogonal random feature).')
flags.DEFINE_float('gp_cov_ridge_penalty', 1.,
'Ridge penalty parameter for GP posterior covariance.')
flags.DEFINE_float(
'gp_cov_discount_factor', -1.,
'The discount factor to compute the moving average of precision matrix'
'across epochs. If -1 then compute the exact precision matrix within the '
'latest epoch.')
flags.DEFINE_float(
'gp_mean_field_factor', 25.,
'The tunable multiplicative factor used in the mean-field approximation '
'for the posterior mean of softmax Gaussian process. If -1 then use '
'posterior mode instead of posterior mean. See [2] for detail.')
# Accelerator flags.
flags.DEFINE_bool('use_gpu', False, 'Whether to run on GPU or otherwise TPU.')
flags.DEFINE_bool('use_bfloat16', False, 'Whether to use mixed precision.')
flags.DEFINE_integer('num_cores', 8, 'Number of TPU cores or number of GPUs.')
flags.DEFINE_string('tpu', None,
'Name of the TPU. Only used if use_gpu is False.')
FLAGS = flags.FLAGS
def main(argv):
del argv # unused arg
tf.io.gfile.makedirs(FLAGS.output_dir)
logging.info('Saving checkpoints at %s', FLAGS.output_dir)
tf.random.set_seed(FLAGS.seed)
if FLAGS.use_gpu:
logging.info('Use GPU')
strategy = tf.distribute.MirroredStrategy()
else:
logging.info('Use TPU at %s',
FLAGS.tpu if FLAGS.tpu is not None else 'local')
resolver = tf.distribute.cluster_resolver.TPUClusterResolver(tpu=FLAGS.tpu)
tf.config.experimental_connect_to_cluster(resolver)
tf.tpu.experimental.initialize_tpu_system(resolver)
strategy = tf.distribute.TPUStrategy(resolver)
ds_info = tfds.builder(FLAGS.dataset).info
batch_size = (FLAGS.per_core_batch_size * FLAGS.num_cores
// FLAGS.num_dropout_samples_training)
test_batch_size = FLAGS.per_core_batch_size * FLAGS.num_cores
num_classes = ds_info.features['label'].num_classes
aug_params = {
'augmix': FLAGS.augmix,
'aug_count': FLAGS.aug_count,
'augmix_depth': FLAGS.augmix_depth,
'augmix_prob_coeff': FLAGS.augmix_prob_coeff,
'augmix_width': FLAGS.augmix_width,
'ensemble_size': 1,
'mixup_alpha': FLAGS.mixup_alpha,
}
validation_proportion = 1. - FLAGS.train_proportion
use_validation_set = validation_proportion > 0.
train_dataset = data_utils.load_dataset(
split=tfds.Split.TRAIN,
name=FLAGS.dataset,
batch_size=batch_size,
use_bfloat16=FLAGS.use_bfloat16,
aug_params=aug_params,
validation_set=use_validation_set,
validation_proportion=validation_proportion)
train_sample_size = ds_info.splits[
'train'].num_examples * FLAGS.train_proportion
val_sample_size = ds_info.splits['train'].num_examples - train_sample_size
if use_validation_set:
validation_dataset = data_utils.load_dataset(
split=tfds.Split.VALIDATION,
name=FLAGS.dataset,
batch_size=batch_size,
use_bfloat16=FLAGS.use_bfloat16,
aug_params=aug_params,
validation_set=use_validation_set,
validation_proportion=validation_proportion)
validation_dataset = strategy.experimental_distribute_dataset(
validation_dataset)
steps_per_val = steps_per_epoch = int(val_sample_size / batch_size)
clean_test_dataset = utils.load_dataset(
split=tfds.Split.TEST,
name=FLAGS.dataset,
batch_size=test_batch_size,
use_bfloat16=FLAGS.use_bfloat16)
train_sample_size = ds_info.splits[
'train'].num_examples * FLAGS.train_proportion
steps_per_epoch = int(train_sample_size / batch_size)
steps_per_epoch = ds_info.splits['train'].num_examples // batch_size
steps_per_eval = ds_info.splits['test'].num_examples // batch_size
train_dataset = strategy.experimental_distribute_dataset(train_dataset)
test_datasets = {
'clean': strategy.experimental_distribute_dataset(clean_test_dataset),
}
if FLAGS.corruptions_interval > 0:
if FLAGS.dataset == 'cifar10':
load_c_dataset = utils.load_cifar10_c
else:
load_c_dataset = functools.partial(utils.load_cifar100_c,
path=FLAGS.cifar100_c_path)
corruption_types, max_intensity = utils.load_corrupted_test_info(
FLAGS.dataset)
for corruption in corruption_types:
for intensity in range(1, max_intensity + 1):
dataset = load_c_dataset(
corruption_name=corruption,
corruption_intensity=intensity,
batch_size=test_batch_size,
use_bfloat16=FLAGS.use_bfloat16)
test_datasets['{0}_{1}'.format(corruption, intensity)] = (
strategy.experimental_distribute_dataset(dataset))
if FLAGS.use_bfloat16:
policy = tf.keras.mixed_precision.experimental.Policy('mixed_bfloat16')
tf.keras.mixed_precision.experimental.set_policy(policy)
summary_writer = tf.summary.create_file_writer(
os.path.join(FLAGS.output_dir, 'summaries'))
with strategy.scope():
logging.info('Building ResNet model')
if FLAGS.use_spec_norm:
logging.info('Use Spectral Normalization with norm bound %.2f',
FLAGS.spec_norm_bound)
if FLAGS.use_gp_layer:
logging.info('Use GP layer with hidden units %d', FLAGS.gp_hidden_dim)
model = ub.models.wide_resnet_sngp(
input_shape=ds_info.features['image'].shape,
batch_size=batch_size,
depth=28,
width_multiplier=10,
num_classes=num_classes,
l2=FLAGS.l2,
use_mc_dropout=FLAGS.use_mc_dropout,
use_filterwise_dropout=FLAGS.use_filterwise_dropout,
dropout_rate=FLAGS.dropout_rate,
use_gp_layer=FLAGS.use_gp_layer,
gp_input_dim=FLAGS.gp_input_dim,
gp_hidden_dim=FLAGS.gp_hidden_dim,
gp_scale=FLAGS.gp_scale,
gp_bias=FLAGS.gp_bias,
gp_input_normalization=FLAGS.gp_input_normalization,
gp_random_feature_type=FLAGS.gp_random_feature_type,
gp_cov_discount_factor=FLAGS.gp_cov_discount_factor,
gp_cov_ridge_penalty=FLAGS.gp_cov_ridge_penalty,
use_spec_norm=FLAGS.use_spec_norm,
spec_norm_iteration=FLAGS.spec_norm_iteration,
spec_norm_bound=FLAGS.spec_norm_bound)
logging.info('Model input shape: %s', model.input_shape)
logging.info('Model output shape: %s', model.output_shape)
logging.info('Model number of weights: %s', model.count_params())
# Linearly scale learning rate and the decay epochs by vanilla settings.
base_lr = FLAGS.base_learning_rate * batch_size / 128
lr_decay_epochs = [(int(start_epoch_str) * FLAGS.train_epochs) // 200
for start_epoch_str in FLAGS.lr_decay_epochs]
lr_schedule = utils.LearningRateSchedule(
steps_per_epoch,
base_lr,
decay_ratio=FLAGS.lr_decay_ratio,
decay_epochs=lr_decay_epochs,
warmup_epochs=FLAGS.lr_warmup_epochs)
optimizer = tf.keras.optimizers.SGD(lr_schedule,
momentum=0.9,
nesterov=True)
metrics = {
'train/negative_log_likelihood': tf.keras.metrics.Mean(),
'train/accuracy': tf.keras.metrics.SparseCategoricalAccuracy(),
'train/loss': tf.keras.metrics.Mean(),
'train/ece': um.ExpectedCalibrationError(num_bins=FLAGS.num_bins),
'test/negative_log_likelihood': tf.keras.metrics.Mean(),
'test/accuracy': tf.keras.metrics.SparseCategoricalAccuracy(),
'test/ece': um.ExpectedCalibrationError(num_bins=FLAGS.num_bins),
'test/stddev': tf.keras.metrics.Mean(),
}
if use_validation_set:
metrics.update({
'val/negative_log_likelihood': tf.keras.metrics.Mean(),
'val/accuracy': tf.keras.metrics.SparseCategoricalAccuracy(),
'val/ece': um.ExpectedCalibrationError(num_bins=FLAGS.num_bins),
'val/stddev': tf.keras.metrics.Mean(),
})
if FLAGS.corruptions_interval > 0:
corrupt_metrics = {}
for intensity in range(1, max_intensity + 1):
for corruption in corruption_types:
dataset_name = '{0}_{1}'.format(corruption, intensity)
corrupt_metrics['test/nll_{}'.format(dataset_name)] = (
tf.keras.metrics.Mean())
corrupt_metrics['test/accuracy_{}'.format(dataset_name)] = (
tf.keras.metrics.SparseCategoricalAccuracy())
corrupt_metrics['test/ece_{}'.format(dataset_name)] = (
um.ExpectedCalibrationError(num_bins=FLAGS.num_bins))
corrupt_metrics['test/stddev_{}'.format(dataset_name)] = (
tf.keras.metrics.Mean())
checkpoint = tf.train.Checkpoint(model=model, optimizer=optimizer)
latest_checkpoint = tf.train.latest_checkpoint(FLAGS.output_dir)
initial_epoch = 0
if latest_checkpoint:
# checkpoint.restore must be within a strategy.scope() so that optimizer
# slot variables are mirrored.
checkpoint.restore(latest_checkpoint)
logging.info('Loaded checkpoint %s', latest_checkpoint)
initial_epoch = optimizer.iterations.numpy() // steps_per_epoch
@tf.function
def train_step(iterator, step):
"""Training StepFn."""
def step_fn(inputs, step):
"""Per-Replica StepFn."""
images, labels = inputs
if tf.equal(step, 0) and FLAGS.gp_cov_discount_factor < 0:
# Resetting covaraince estimator at the begining of a new epoch.
model.layers[-1].reset_covariance_matrix()
if FLAGS.augmix and FLAGS.aug_count >= 1:
# Index 0 at augmix preprocessing is the unperturbed image.
images = images[:, 1, ...]
# This is for the case of combining AugMix and Mixup.
if FLAGS.mixup_alpha > 0:
labels = tf.split(labels, FLAGS.aug_count + 1, axis=0)[1]
images = tf.tile(images, [FLAGS.num_dropout_samples_training, 1, 1, 1])
if FLAGS.mixup_alpha > 0:
labels = tf.tile(labels, [FLAGS.num_dropout_samples_training, 1])
else:
labels = tf.tile(labels, [FLAGS.num_dropout_samples_training])
with tf.GradientTape() as tape:
logits = model(images, training=True)
if isinstance(logits, tuple):
# If model returns a tuple of (logits, covmat), extract logits
logits, _ = logits
if FLAGS.use_bfloat16:
logits = tf.cast(logits, tf.float32)
if FLAGS.mixup_alpha > 0:
negative_log_likelihood = tf.reduce_mean(
tf.keras.losses.categorical_crossentropy(labels,
logits,
from_logits=True))
else:
negative_log_likelihood = tf.reduce_mean(
tf.keras.losses.sparse_categorical_crossentropy(labels,
logits,
from_logits=True))
l2_loss = sum(model.losses)
loss = negative_log_likelihood + l2_loss
# Scale the loss given the TPUStrategy will reduce sum all gradients.
scaled_loss = loss / strategy.num_replicas_in_sync
grads = tape.gradient(scaled_loss, model.trainable_variables)
optimizer.apply_gradients(zip(grads, model.trainable_variables))
probs = tf.nn.softmax(logits)
if FLAGS.mixup_alpha > 0:
labels = tf.argmax(labels, axis=-1)
metrics['train/ece'].update_state(labels, probs)
metrics['train/loss'].update_state(loss)
metrics['train/negative_log_likelihood'].update_state(
negative_log_likelihood)
metrics['train/accuracy'].update_state(labels, logits)
strategy.run(step_fn, args=(next(iterator), step))
@tf.function
def test_step(iterator, dataset_name):
"""Evaluation StepFn."""
def step_fn(inputs):
"""Per-Replica StepFn."""
images, labels = inputs
logits_list = []
stddev_list = []
for _ in range(FLAGS.num_dropout_samples):
logits = model(images, training=False)
if isinstance(logits, tuple):
# If model returns a tuple of (logits, covmat), extract both
logits, covmat = logits
else:
covmat = tf.eye(FLAGS.per_core_batch_size)
if FLAGS.use_bfloat16:
logits = tf.cast(logits, tf.float32)
logits = ed.layers.utils.mean_field_logits(
logits, covmat, mean_field_factor=FLAGS.gp_mean_field_factor)
stddev = tf.sqrt(tf.linalg.diag_part(covmat))
stddev_list.append(stddev)
logits_list.append(logits)
# Logits dimension is (num_samples, batch_size, num_classes).
logits_list = tf.stack(logits_list, axis=0)
stddev_list = tf.stack(stddev_list, axis=0)
stddev = tf.reduce_mean(stddev_list, axis=0)
probs_list = tf.nn.softmax(logits_list)
probs = tf.reduce_mean(probs_list, axis=0)
labels_broadcasted = tf.broadcast_to(
labels, [FLAGS.num_dropout_samples, labels.shape[0]])
log_likelihoods = -tf.keras.losses.sparse_categorical_crossentropy(
labels_broadcasted, logits_list, from_logits=True)
negative_log_likelihood = tf.reduce_mean(
-tf.reduce_logsumexp(log_likelihoods, axis=[0]) +
tf.math.log(float(FLAGS.num_dropout_samples)))
if dataset_name == 'clean':
metrics['test/negative_log_likelihood'].update_state(
negative_log_likelihood)
metrics['test/accuracy'].update_state(labels, probs)
metrics['test/ece'].update_state(labels, probs)
metrics['test/stddev'].update_state(stddev)
elif dataset_name == 'val':
metrics['val/negative_log_likelihood'].update_state(
negative_log_likelihood)
metrics['val/accuracy'].update_state(labels, probs)
metrics['val/ece'].update_state(labels, probs)
metrics['val/stddev'].update_state(stddev)
else:
corrupt_metrics['test/nll_{}'.format(dataset_name)].update_state(
negative_log_likelihood)
corrupt_metrics['test/accuracy_{}'.format(dataset_name)].update_state(
labels, probs)
corrupt_metrics['test/ece_{}'.format(dataset_name)].update_state(
labels, probs)
corrupt_metrics['test/stddev_{}'.format(dataset_name)].update_state(
stddev)
strategy.run(step_fn, args=(next(iterator),))
metrics.update({'test/ms_per_example': tf.keras.metrics.Mean()})
step_variable = tf.Variable(0, dtype=tf.int32)
train_iterator = iter(train_dataset)
start_time = time.time()
for epoch in range(initial_epoch, FLAGS.train_epochs):
logging.info('Starting to run epoch: %s', epoch)
for step in range(steps_per_epoch):
step_variable.assign(step)
# Pass `step` as a tf.Variable to train_step to prevent the tf.function
# train_step() re-compiling itself at each function call.
train_step(train_iterator, step_variable)
current_step = epoch * steps_per_epoch + (step + 1)
max_steps = steps_per_epoch * FLAGS.train_epochs
time_elapsed = time.time() - start_time
steps_per_sec = float(current_step) / time_elapsed
eta_seconds = (max_steps - current_step) / steps_per_sec
message = ('{:.1%} completion: epoch {:d}/{:d}. {:.1f} steps/s. '
'ETA: {:.0f} min. Time elapsed: {:.0f} min'.format(
current_step / max_steps,
epoch + 1,
FLAGS.train_epochs,
steps_per_sec,
eta_seconds / 60,
time_elapsed / 60))
if step % 20 == 0:
logging.info(message)
datasets_to_evaluate = {'clean': test_datasets['clean']}
if use_validation_set:
datasets_to_evaluate['val'] = validation_dataset
if (FLAGS.corruptions_interval > 0 and
(epoch + 1) % FLAGS.corruptions_interval == 0):
datasets_to_evaluate = test_datasets
for dataset_name, test_dataset in datasets_to_evaluate.items():
test_iterator = iter(test_dataset)
logging.info('Testing on dataset %s', dataset_name)
steps_per_eval = steps_per_val if dataset_name == 'val' else steps_per_eval
for step in range(steps_per_eval):
if step % 20 == 0:
logging.info('Starting to run eval step %s of epoch: %s', step,
epoch)
test_start_time = time.time()
test_step(test_iterator, dataset_name)
ms_per_example = (time.time() - test_start_time) * 1e6 / batch_size
metrics['test/ms_per_example'].update_state(ms_per_example)
logging.info('Done with testing on %s', dataset_name)
corrupt_results = {}
if (FLAGS.corruptions_interval > 0 and
(epoch + 1) % FLAGS.corruptions_interval == 0):
corrupt_results = utils.aggregate_corrupt_metrics(corrupt_metrics,
corruption_types,
max_intensity)
logging.info('Train Loss: %.4f, Accuracy: %.2f%%',
metrics['train/loss'].result(),
metrics['train/accuracy'].result() * 100)
if use_validation_set:
logging.info('Val NLL: %.4f, Accuracy: %.2f%%',
metrics['val/negative_log_likelihood'].result(),
metrics['val/accuracy'].result() * 100)
logging.info('Test NLL: %.4f, Accuracy: %.2f%%',
metrics['test/negative_log_likelihood'].result(),
metrics['test/accuracy'].result() * 100)
total_results = {name: metric.result() for name, metric in metrics.items()}
total_results.update(corrupt_results)
with summary_writer.as_default():
for name, result in total_results.items():
tf.summary.scalar(name, result, step=epoch + 1)
for metric in metrics.values():
metric.reset_states()
if (FLAGS.checkpoint_interval > 0 and
(epoch + 1) % FLAGS.checkpoint_interval == 0):
checkpoint_name = checkpoint.save(
os.path.join(FLAGS.output_dir, 'checkpoint'))
logging.info('Saved checkpoint to %s', checkpoint_name)
final_checkpoint_name = checkpoint.save(
os.path.join(FLAGS.output_dir, 'checkpoint'))
logging.info('Saved last checkpoint to %s', final_checkpoint_name)
final_save_name = os.path.join(FLAGS.output_dir, 'model')
model.save(final_save_name)
logging.info('Saved model to %s', final_save_name)
if __name__ == '__main__':
app.run(main)
| [
"[email protected]"
]
| |
78568ca4885a42d8f3f9605cd773cdac043a3fda | 27317b3adb1ccd99afa86cb931d2d14e23b9b175 | /bcs-app/backend/apps/cluster/migrations/0011_auto_20180514_1805.py | 7246c2cb4817e9db1fb1c09afade48a95c1a0502 | [
"BSD-3-Clause",
"LicenseRef-scancode-unicode",
"ICU",
"LicenseRef-scancode-unknown-license-reference",
"Artistic-2.0",
"Zlib",
"LicenseRef-scancode-openssl",
"NAIST-2003",
"ISC",
"NTP",
"BSL-1.0",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"MIT"
]
| permissive | freyzheng/bk-bcs-saas | cf5a6c4ab1c20959bda1362bc31de7884451acd7 | 96373cda9d87038aceb0b4858ce89e7873c8e149 | refs/heads/master | 2021-07-05T04:11:08.555930 | 2020-09-22T12:26:37 | 2020-09-22T12:26:37 | 201,279,048 | 0 | 1 | NOASSERTION | 2020-09-16T03:07:16 | 2019-08-08T14:48:27 | Python | UTF-8 | Python | false | false | 1,794 | py | # -*- coding: utf-8 -*-
#
# Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available.
# Copyright (C) 2017-2019 THL A29 Limited, a Tencent company. All rights reserved.
# Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://opensource.org/licenses/MIT
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# Generated by Django 1.11.5 on 2018-05-14 10:05
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cluster', '0010_auto_20180224_2058'),
]
operations = [
migrations.AlterField(
model_name='clusterinstalllog',
name='oper_type',
field=models.CharField(choices=[('initialize', '集群初始化'), ('reinstall', '集群重新初始化'), ('initial_check', '前置检查'), ('removing', '删除集群'), ('so_initial', 'SO 机器初始化')], default='initialize', max_length=16),
),
migrations.AlterField(
model_name='nodeupdatelog',
name='oper_type',
field=models.CharField(choices=[('initialize', '节点初始化'), ('reinstall', '节点重新初始化'), ('removing', '节点移除'), ('initial_check', '前置检查'), ('so_initial', 'SO 机器初始化')], default='initialize', max_length=16),
),
]
| [
"[email protected]"
]
| |
96e1b7d4ba508da9f2c0883b2ba7a362efde32d5 | b9878c92b857f73ff0452fc51c822cfc9fa4dc1c | /watson_machine_learning_client/libs/repo/util/base_singleton.py | 4a9943aa7d08aaa332d2f94d4ddaafc4ea0a0726 | []
| no_license | DavidCastilloAlvarado/WMLC_mod | 35f5d84990c59b623bfdd27369fe7461c500e0a5 | f2673b9c77bd93c0e017831ee4994f6d9789d9a1 | refs/heads/master | 2022-12-08T02:54:31.000267 | 2020-09-02T15:49:21 | 2020-09-02T15:49:21 | 292,322,284 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | class _Singleton(type):
""" A metaclass that creates a Singleton base class when called. """
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(_Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class BaseSingleton(_Singleton('SingletonMeta', (object,), {})):
pass | [
"[email protected]"
]
| |
355a8a6a7493e09e033a44bc139d2aa4314b07e5 | f27c49458bde84048e6008da8c52ca0f1ae711ce | /code/05-interactive-code/m-n-m/guessinggame.py | f919ba7aeb09373c7ec432efe4f22638bff7f3d2 | [
"MIT"
]
| permissive | talkpython/python-for-absolute-beginners-course | 54b0f48b5edbf7755de6ca688a8e737ba16dc2fc | 1930dab0a91526863dc92c3e05fe3c7ec63480e1 | refs/heads/master | 2022-11-24T03:02:32.759177 | 2022-11-08T14:30:08 | 2022-11-08T14:30:08 | 225,979,578 | 2,287 | 1,059 | MIT | 2022-11-07T19:45:15 | 2019-12-05T00:02:31 | Python | UTF-8 | Python | false | false | 656 | py | import random
print("------------------------------")
print(" M&M guessing game!")
print("------------------------------")
print("Guess the number of M&Ms and you get lunch on the house!")
print()
mm_count = random.randint(1, 100)
attempt_limit = 5
attempts = 0
while attempts < attempt_limit:
guess_text = input("How many M&Ms are in the jar? ")
guess = int(guess_text)
attempts += 1
if mm_count == guess:
print(f"You got a free lunch! It was {guess}.")
break
elif guess < mm_count:
print("Sorry, that's too LOW!")
else:
print("That's too HIGH!")
print(f"Bye, you're done in {attempts}!")
| [
"[email protected]"
]
| |
5f347e6b6fc31503d5eb071e29e147c5e03c8963 | c94f888541c0c430331110818ed7f3d6b27b788a | /billing/python/antchain_sdk_billing/models.py | a84d6d9e51b2efd4b9c0729e85e28c8c46ab9f42 | [
"Apache-2.0",
"MIT"
]
| permissive | alipay/antchain-openapi-prod-sdk | 48534eb78878bd708a0c05f2fe280ba9c41d09ad | 5269b1f55f1fc19cf0584dc3ceea821d3f8f8632 | refs/heads/master | 2023-09-03T07:12:04.166131 | 2023-09-01T08:56:15 | 2023-09-01T08:56:15 | 275,521,177 | 9 | 10 | MIT | 2021-03-25T02:35:20 | 2020-06-28T06:22:14 | PHP | UTF-8 | Python | false | false | 55,021 | py | # -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from Tea.model import TeaModel
class Config(TeaModel):
"""
Model for initing client
"""
def __init__(
self,
access_key_id: str = None,
access_key_secret: str = None,
security_token: str = None,
protocol: str = None,
read_timeout: int = None,
connect_timeout: int = None,
http_proxy: str = None,
https_proxy: str = None,
endpoint: str = None,
no_proxy: str = None,
max_idle_conns: int = None,
user_agent: str = None,
socks_5proxy: str = None,
socks_5net_work: str = None,
max_idle_time_millis: int = None,
keep_alive_duration_millis: int = None,
max_requests: int = None,
max_requests_per_host: int = None,
):
# accesskey id
self.access_key_id = access_key_id
# accesskey secret
self.access_key_secret = access_key_secret
# security token
self.security_token = security_token
# http protocol
self.protocol = protocol
# read timeout
self.read_timeout = read_timeout
# connect timeout
self.connect_timeout = connect_timeout
# http proxy
self.http_proxy = http_proxy
# https proxy
self.https_proxy = https_proxy
# endpoint
self.endpoint = endpoint
# proxy white list
self.no_proxy = no_proxy
# max idle conns
self.max_idle_conns = max_idle_conns
# user agent
self.user_agent = user_agent
# socks5 proxy
self.socks_5proxy = socks_5proxy
# socks5 network
self.socks_5net_work = socks_5net_work
# 长链接最大空闲时长
self.max_idle_time_millis = max_idle_time_millis
# 长链接最大连接时长
self.keep_alive_duration_millis = keep_alive_duration_millis
# 最大连接数(长链接最大总数)
self.max_requests = max_requests
# 每个目标主机的最大连接数(分主机域名的长链接最大总数
self.max_requests_per_host = max_requests_per_host
def validate(self):
pass
def to_map(self):
result = dict()
if self.access_key_id is not None:
result['accessKeyId'] = self.access_key_id
if self.access_key_secret is not None:
result['accessKeySecret'] = self.access_key_secret
if self.security_token is not None:
result['securityToken'] = self.security_token
if self.protocol is not None:
result['protocol'] = self.protocol
if self.read_timeout is not None:
result['readTimeout'] = self.read_timeout
if self.connect_timeout is not None:
result['connectTimeout'] = self.connect_timeout
if self.http_proxy is not None:
result['httpProxy'] = self.http_proxy
if self.https_proxy is not None:
result['httpsProxy'] = self.https_proxy
if self.endpoint is not None:
result['endpoint'] = self.endpoint
if self.no_proxy is not None:
result['noProxy'] = self.no_proxy
if self.max_idle_conns is not None:
result['maxIdleConns'] = self.max_idle_conns
if self.user_agent is not None:
result['userAgent'] = self.user_agent
if self.socks_5proxy is not None:
result['socks5Proxy'] = self.socks_5proxy
if self.socks_5net_work is not None:
result['socks5NetWork'] = self.socks_5net_work
if self.max_idle_time_millis is not None:
result['maxIdleTimeMillis'] = self.max_idle_time_millis
if self.keep_alive_duration_millis is not None:
result['keepAliveDurationMillis'] = self.keep_alive_duration_millis
if self.max_requests is not None:
result['maxRequests'] = self.max_requests
if self.max_requests_per_host is not None:
result['maxRequestsPerHost'] = self.max_requests_per_host
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('accessKeyId') is not None:
self.access_key_id = m.get('accessKeyId')
if m.get('accessKeySecret') is not None:
self.access_key_secret = m.get('accessKeySecret')
if m.get('securityToken') is not None:
self.security_token = m.get('securityToken')
if m.get('protocol') is not None:
self.protocol = m.get('protocol')
if m.get('readTimeout') is not None:
self.read_timeout = m.get('readTimeout')
if m.get('connectTimeout') is not None:
self.connect_timeout = m.get('connectTimeout')
if m.get('httpProxy') is not None:
self.http_proxy = m.get('httpProxy')
if m.get('httpsProxy') is not None:
self.https_proxy = m.get('httpsProxy')
if m.get('endpoint') is not None:
self.endpoint = m.get('endpoint')
if m.get('noProxy') is not None:
self.no_proxy = m.get('noProxy')
if m.get('maxIdleConns') is not None:
self.max_idle_conns = m.get('maxIdleConns')
if m.get('userAgent') is not None:
self.user_agent = m.get('userAgent')
if m.get('socks5Proxy') is not None:
self.socks_5proxy = m.get('socks5Proxy')
if m.get('socks5NetWork') is not None:
self.socks_5net_work = m.get('socks5NetWork')
if m.get('maxIdleTimeMillis') is not None:
self.max_idle_time_millis = m.get('maxIdleTimeMillis')
if m.get('keepAliveDurationMillis') is not None:
self.keep_alive_duration_millis = m.get('keepAliveDurationMillis')
if m.get('maxRequests') is not None:
self.max_requests = m.get('maxRequests')
if m.get('maxRequestsPerHost') is not None:
self.max_requests_per_host = m.get('maxRequestsPerHost')
return self
class GetAccountBalanceRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
from_channel: str = None,
tenant: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
# 渠道来源: DEFAULT 官网 ANT_OPEN_SERVICE_MARKET 开放平-服务市场台
self.from_channel = from_channel
# 租户id,外部用户唯一id,如蚂蚁通行证id
self.tenant = tenant
def validate(self):
self.validate_required(self.from_channel, 'from_channel')
self.validate_required(self.tenant, 'tenant')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.from_channel is not None:
result['from_channel'] = self.from_channel
if self.tenant is not None:
result['tenant'] = self.tenant
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('from_channel') is not None:
self.from_channel = m.get('from_channel')
if m.get('tenant') is not None:
self.tenant = m.get('tenant')
return self
class GetAccountBalanceResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
account_balance: str = None,
available_balance: str = None,
currency_value: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 账户余额
self.account_balance = account_balance
# 可用余额
self.available_balance = available_balance
# 币种
self.currency_value = currency_value
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.account_balance is not None:
result['account_balance'] = self.account_balance
if self.available_balance is not None:
result['available_balance'] = self.available_balance
if self.currency_value is not None:
result['currency_value'] = self.currency_value
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('account_balance') is not None:
self.account_balance = m.get('account_balance')
if m.get('available_balance') is not None:
self.available_balance = m.get('available_balance')
if m.get('currency_value') is not None:
self.currency_value = m.get('currency_value')
return self
class ChargeAccountBalanceRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
charge_amt: str = None,
currency_value: str = None,
from_channel: str = None,
return_url: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
# "10.11"代表10.11元,最小粒度到分,小数点后2位
self.charge_amt = charge_amt
# 币种,支付宝体系内一般存储币种值(156/840/...),156代表人民币
self.currency_value = currency_value
# 渠道来源: ANTCLOUD_OFFICIAL 官网 ANT_OPEN_SERVICE_MARKET 开放平-服务市场台
self.from_channel = from_channel
# 唤起收银台后,充值后回调url链接
self.return_url = return_url
def validate(self):
self.validate_required(self.charge_amt, 'charge_amt')
self.validate_required(self.currency_value, 'currency_value')
self.validate_required(self.from_channel, 'from_channel')
self.validate_required(self.return_url, 'return_url')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.charge_amt is not None:
result['charge_amt'] = self.charge_amt
if self.currency_value is not None:
result['currency_value'] = self.currency_value
if self.from_channel is not None:
result['from_channel'] = self.from_channel
if self.return_url is not None:
result['return_url'] = self.return_url
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('charge_amt') is not None:
self.charge_amt = m.get('charge_amt')
if m.get('currency_value') is not None:
self.currency_value = m.get('currency_value')
if m.get('from_channel') is not None:
self.from_channel = m.get('from_channel')
if m.get('return_url') is not None:
self.return_url = m.get('return_url')
return self
class ChargeAccountBalanceResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
alipay_gateway: str = None,
input_charset: str = None,
notify_url: str = None,
out_trade_no: str = None,
partner: str = None,
payment_type: str = None,
return_url: str = None,
seller_id: str = None,
service: str = None,
sign: str = None,
sign_type: str = None,
subject: str = None,
total_fee: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 支付宝网关
self.alipay_gateway = alipay_gateway
# 字符编码格式 _input_charset
self.input_charset = input_charset
# 服务器异步通知页面路径
self.notify_url = notify_url
# 商户网站唯一订单号
self.out_trade_no = out_trade_no
# 收款方PID
self.partner = partner
# 支付类型
self.payment_type = payment_type
# 收银台发起页面跳转同步通知页面路径
self.return_url = return_url
# 收款方ID
self.seller_id = seller_id
# 调用的接口名
self.service = service
# 签名
self.sign = sign
# 签名类型
self.sign_type = sign_type
# 商品名称
self.subject = subject
# 交易金额
self.total_fee = total_fee
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.alipay_gateway is not None:
result['alipay_gateway'] = self.alipay_gateway
if self.input_charset is not None:
result['input_charset'] = self.input_charset
if self.notify_url is not None:
result['notify_url'] = self.notify_url
if self.out_trade_no is not None:
result['out_trade_no'] = self.out_trade_no
if self.partner is not None:
result['partner'] = self.partner
if self.payment_type is not None:
result['payment_type'] = self.payment_type
if self.return_url is not None:
result['return_url'] = self.return_url
if self.seller_id is not None:
result['seller_id'] = self.seller_id
if self.service is not None:
result['service'] = self.service
if self.sign is not None:
result['sign'] = self.sign
if self.sign_type is not None:
result['sign_type'] = self.sign_type
if self.subject is not None:
result['subject'] = self.subject
if self.total_fee is not None:
result['total_fee'] = self.total_fee
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('alipay_gateway') is not None:
self.alipay_gateway = m.get('alipay_gateway')
if m.get('input_charset') is not None:
self.input_charset = m.get('input_charset')
if m.get('notify_url') is not None:
self.notify_url = m.get('notify_url')
if m.get('out_trade_no') is not None:
self.out_trade_no = m.get('out_trade_no')
if m.get('partner') is not None:
self.partner = m.get('partner')
if m.get('payment_type') is not None:
self.payment_type = m.get('payment_type')
if m.get('return_url') is not None:
self.return_url = m.get('return_url')
if m.get('seller_id') is not None:
self.seller_id = m.get('seller_id')
if m.get('service') is not None:
self.service = m.get('service')
if m.get('sign') is not None:
self.sign = m.get('sign')
if m.get('sign_type') is not None:
self.sign_type = m.get('sign_type')
if m.get('subject') is not None:
self.subject = m.get('subject')
if m.get('total_fee') is not None:
self.total_fee = m.get('total_fee')
return self
class GetAccountUserRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
alipay_user_id: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
# alipay托管子户ID
self.alipay_user_id = alipay_user_id
def validate(self):
self.validate_required(self.alipay_user_id, 'alipay_user_id')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.alipay_user_id is not None:
result['alipay_user_id'] = self.alipay_user_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('alipay_user_id') is not None:
self.alipay_user_id = m.get('alipay_user_id')
return self
class GetAccountUserResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
tenant_id: str = None,
login_name: str = None,
real_name: str = None,
mobile_num: str = None,
firm_name: str = None,
user_type: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 智能科技user_id
self.tenant_id = tenant_id
# 登录名
self.login_name = login_name
# 用户的真实姓名
self.real_name = real_name
# 手机号
self.mobile_num = mobile_num
# 公司名称
self.firm_name = firm_name
# 帐号类型,企业或者是个人
# ENTERPRISE("1","企业类型账号"),
# INDIVIDUAL("2","个人类型账号")
self.user_type = user_type
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.tenant_id is not None:
result['tenant_id'] = self.tenant_id
if self.login_name is not None:
result['login_name'] = self.login_name
if self.real_name is not None:
result['real_name'] = self.real_name
if self.mobile_num is not None:
result['mobile_num'] = self.mobile_num
if self.firm_name is not None:
result['firm_name'] = self.firm_name
if self.user_type is not None:
result['user_type'] = self.user_type
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('tenant_id') is not None:
self.tenant_id = m.get('tenant_id')
if m.get('login_name') is not None:
self.login_name = m.get('login_name')
if m.get('real_name') is not None:
self.real_name = m.get('real_name')
if m.get('mobile_num') is not None:
self.mobile_num = m.get('mobile_num')
if m.get('firm_name') is not None:
self.firm_name = m.get('firm_name')
if m.get('user_type') is not None:
self.user_type = m.get('user_type')
return self
class CreateAcfeewebInstanceRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
id: str = None,
domain_code: str = None,
domain_name: str = None,
domain_version: str = None,
product_code: str = None,
product_name: str = None,
service_code: str = None,
service_name: str = None,
offer_code: str = None,
offer_name: str = None,
oms_data: str = None,
oms_biz_no: str = None,
verification_cache: str = None,
verification_url: str = None,
bpms_id: str = None,
config_data: str = None,
status: str = None,
creator: str = None,
modifor: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
# 出账验证主键id
self.id = id
# 计量域code
self.domain_code = domain_code
# 计量域名称
self.domain_name = domain_name
# 计量域版本
self.domain_version = domain_version
# 业务产品code
self.product_code = product_code
# 业务产品名称
self.product_name = product_name
# 渠道产品code
self.service_code = service_code
# 渠道产品名称
self.service_name = service_name
# 商品code
self.offer_code = offer_code
# 商品名称
self.offer_name = offer_name
# 验证的计量数据
self.oms_data = oms_data
# 计量数据业务幂等号
self.oms_biz_no = oms_biz_no
# 缓存报文
self.verification_cache = verification_cache
# 缓存链接
self.verification_url = verification_url
# 审批流id
self.bpms_id = bpms_id
# 配置报文
self.config_data = config_data
# 状态
self.status = status
# 创建人
self.creator = creator
# 修改人
self.modifor = modifor
def validate(self):
self.validate_required(self.id, 'id')
self.validate_required(self.domain_code, 'domain_code')
self.validate_required(self.domain_name, 'domain_name')
self.validate_required(self.domain_version, 'domain_version')
self.validate_required(self.product_code, 'product_code')
self.validate_required(self.product_name, 'product_name')
self.validate_required(self.service_code, 'service_code')
self.validate_required(self.service_name, 'service_name')
self.validate_required(self.offer_code, 'offer_code')
self.validate_required(self.offer_name, 'offer_name')
self.validate_required(self.oms_data, 'oms_data')
self.validate_required(self.oms_biz_no, 'oms_biz_no')
self.validate_required(self.verification_cache, 'verification_cache')
self.validate_required(self.verification_url, 'verification_url')
self.validate_required(self.bpms_id, 'bpms_id')
self.validate_required(self.config_data, 'config_data')
self.validate_required(self.status, 'status')
self.validate_required(self.creator, 'creator')
self.validate_required(self.modifor, 'modifor')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.id is not None:
result['id'] = self.id
if self.domain_code is not None:
result['domain_code'] = self.domain_code
if self.domain_name is not None:
result['domain_name'] = self.domain_name
if self.domain_version is not None:
result['domain_version'] = self.domain_version
if self.product_code is not None:
result['product_code'] = self.product_code
if self.product_name is not None:
result['product_name'] = self.product_name
if self.service_code is not None:
result['service_code'] = self.service_code
if self.service_name is not None:
result['service_name'] = self.service_name
if self.offer_code is not None:
result['offer_code'] = self.offer_code
if self.offer_name is not None:
result['offer_name'] = self.offer_name
if self.oms_data is not None:
result['oms_data'] = self.oms_data
if self.oms_biz_no is not None:
result['oms_biz_no'] = self.oms_biz_no
if self.verification_cache is not None:
result['verification_cache'] = self.verification_cache
if self.verification_url is not None:
result['verification_url'] = self.verification_url
if self.bpms_id is not None:
result['bpms_id'] = self.bpms_id
if self.config_data is not None:
result['config_data'] = self.config_data
if self.status is not None:
result['status'] = self.status
if self.creator is not None:
result['creator'] = self.creator
if self.modifor is not None:
result['modifor'] = self.modifor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('id') is not None:
self.id = m.get('id')
if m.get('domain_code') is not None:
self.domain_code = m.get('domain_code')
if m.get('domain_name') is not None:
self.domain_name = m.get('domain_name')
if m.get('domain_version') is not None:
self.domain_version = m.get('domain_version')
if m.get('product_code') is not None:
self.product_code = m.get('product_code')
if m.get('product_name') is not None:
self.product_name = m.get('product_name')
if m.get('service_code') is not None:
self.service_code = m.get('service_code')
if m.get('service_name') is not None:
self.service_name = m.get('service_name')
if m.get('offer_code') is not None:
self.offer_code = m.get('offer_code')
if m.get('offer_name') is not None:
self.offer_name = m.get('offer_name')
if m.get('oms_data') is not None:
self.oms_data = m.get('oms_data')
if m.get('oms_biz_no') is not None:
self.oms_biz_no = m.get('oms_biz_no')
if m.get('verification_cache') is not None:
self.verification_cache = m.get('verification_cache')
if m.get('verification_url') is not None:
self.verification_url = m.get('verification_url')
if m.get('bpms_id') is not None:
self.bpms_id = m.get('bpms_id')
if m.get('config_data') is not None:
self.config_data = m.get('config_data')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('modifor') is not None:
self.modifor = m.get('modifor')
return self
class CreateAcfeewebInstanceResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
process_instance_id: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 审批流实例id
self.process_instance_id = process_instance_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.process_instance_id is not None:
result['process_instance_id'] = self.process_instance_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('process_instance_id') is not None:
self.process_instance_id = m.get('process_instance_id')
return self
class UpdateAcfeewebStateRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
id: str = None,
domain_code: str = None,
domain_name: str = None,
domain_version: str = None,
product_code: str = None,
product_name: str = None,
service_code: str = None,
service_name: str = None,
offer_code: str = None,
offer_name: str = None,
oms_data: str = None,
oms_biz_no: str = None,
verification_cache: str = None,
verification_url: str = None,
bpms_id: str = None,
config_data: str = None,
status: str = None,
creator: str = None,
modifor: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
# 出账验证主键id
self.id = id
# 计量域code
self.domain_code = domain_code
# 计量域名称
self.domain_name = domain_name
# 计量域版本
self.domain_version = domain_version
# 业务产品code
self.product_code = product_code
# 业务产品名称
self.product_name = product_name
# 渠道产品code
self.service_code = service_code
# 渠道产品名称
self.service_name = service_name
# 商品code
self.offer_code = offer_code
# 商品名称
self.offer_name = offer_name
# 验证的计量数据
self.oms_data = oms_data
# 计量数据业务幂等号
self.oms_biz_no = oms_biz_no
# 缓存报文
self.verification_cache = verification_cache
# 缓存链接
self.verification_url = verification_url
# 审批流id
self.bpms_id = bpms_id
# 配置报文
self.config_data = config_data
# 状态
self.status = status
# 创建人
self.creator = creator
# 修改人
self.modifor = modifor
def validate(self):
self.validate_required(self.id, 'id')
self.validate_required(self.domain_code, 'domain_code')
self.validate_required(self.domain_name, 'domain_name')
self.validate_required(self.domain_version, 'domain_version')
self.validate_required(self.product_code, 'product_code')
self.validate_required(self.product_name, 'product_name')
self.validate_required(self.service_code, 'service_code')
self.validate_required(self.service_name, 'service_name')
self.validate_required(self.offer_code, 'offer_code')
self.validate_required(self.offer_name, 'offer_name')
self.validate_required(self.oms_data, 'oms_data')
self.validate_required(self.oms_biz_no, 'oms_biz_no')
self.validate_required(self.verification_cache, 'verification_cache')
self.validate_required(self.verification_url, 'verification_url')
self.validate_required(self.bpms_id, 'bpms_id')
self.validate_required(self.config_data, 'config_data')
self.validate_required(self.status, 'status')
self.validate_required(self.creator, 'creator')
self.validate_required(self.modifor, 'modifor')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.id is not None:
result['id'] = self.id
if self.domain_code is not None:
result['domain_code'] = self.domain_code
if self.domain_name is not None:
result['domain_name'] = self.domain_name
if self.domain_version is not None:
result['domain_version'] = self.domain_version
if self.product_code is not None:
result['product_code'] = self.product_code
if self.product_name is not None:
result['product_name'] = self.product_name
if self.service_code is not None:
result['service_code'] = self.service_code
if self.service_name is not None:
result['service_name'] = self.service_name
if self.offer_code is not None:
result['offer_code'] = self.offer_code
if self.offer_name is not None:
result['offer_name'] = self.offer_name
if self.oms_data is not None:
result['oms_data'] = self.oms_data
if self.oms_biz_no is not None:
result['oms_biz_no'] = self.oms_biz_no
if self.verification_cache is not None:
result['verification_cache'] = self.verification_cache
if self.verification_url is not None:
result['verification_url'] = self.verification_url
if self.bpms_id is not None:
result['bpms_id'] = self.bpms_id
if self.config_data is not None:
result['config_data'] = self.config_data
if self.status is not None:
result['status'] = self.status
if self.creator is not None:
result['creator'] = self.creator
if self.modifor is not None:
result['modifor'] = self.modifor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('id') is not None:
self.id = m.get('id')
if m.get('domain_code') is not None:
self.domain_code = m.get('domain_code')
if m.get('domain_name') is not None:
self.domain_name = m.get('domain_name')
if m.get('domain_version') is not None:
self.domain_version = m.get('domain_version')
if m.get('product_code') is not None:
self.product_code = m.get('product_code')
if m.get('product_name') is not None:
self.product_name = m.get('product_name')
if m.get('service_code') is not None:
self.service_code = m.get('service_code')
if m.get('service_name') is not None:
self.service_name = m.get('service_name')
if m.get('offer_code') is not None:
self.offer_code = m.get('offer_code')
if m.get('offer_name') is not None:
self.offer_name = m.get('offer_name')
if m.get('oms_data') is not None:
self.oms_data = m.get('oms_data')
if m.get('oms_biz_no') is not None:
self.oms_biz_no = m.get('oms_biz_no')
if m.get('verification_cache') is not None:
self.verification_cache = m.get('verification_cache')
if m.get('verification_url') is not None:
self.verification_url = m.get('verification_url')
if m.get('bpms_id') is not None:
self.bpms_id = m.get('bpms_id')
if m.get('config_data') is not None:
self.config_data = m.get('config_data')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('modifor') is not None:
self.modifor = m.get('modifor')
return self
class UpdateAcfeewebStateResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
sync_result: bool = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 成功、失败
self.sync_result = sync_result
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.sync_result is not None:
result['sync_result'] = self.sync_result
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('sync_result') is not None:
self.sync_result = m.get('sync_result')
return self
class CreateAccountVerificationRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
id: int = None,
domain_code: str = None,
domain_name: str = None,
domain_version: str = None,
product_code: str = None,
product_name: str = None,
service_code: str = None,
service_name: str = None,
offer_code: str = None,
offer_name: str = None,
oms_data: str = None,
oms_biz_no: str = None,
verification_cache: str = None,
verification_url: str = None,
bpms_id: str = None,
config_data: str = None,
status: str = None,
creator: str = None,
modifor: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
# 出账验证主键id
self.id = id
# 计量域code
self.domain_code = domain_code
# 计量域名称
self.domain_name = domain_name
# 计量域版本
self.domain_version = domain_version
# 业务产品code
self.product_code = product_code
# 业务产品名称
self.product_name = product_name
# 渠道产品code
self.service_code = service_code
# 渠道产品名称
self.service_name = service_name
# 商品code
self.offer_code = offer_code
# 商品名称
self.offer_name = offer_name
# 验证的计量数据
self.oms_data = oms_data
# 计量数据业务幂等号
self.oms_biz_no = oms_biz_no
# 缓存报文
self.verification_cache = verification_cache
# 缓存链接
self.verification_url = verification_url
# 审批流id
self.bpms_id = bpms_id
# 配置报文
self.config_data = config_data
# 状态
self.status = status
# 创建人
self.creator = creator
# 修改人
self.modifor = modifor
def validate(self):
self.validate_required(self.id, 'id')
self.validate_required(self.domain_code, 'domain_code')
self.validate_required(self.domain_name, 'domain_name')
self.validate_required(self.domain_version, 'domain_version')
self.validate_required(self.product_code, 'product_code')
self.validate_required(self.product_name, 'product_name')
self.validate_required(self.service_code, 'service_code')
self.validate_required(self.service_name, 'service_name')
self.validate_required(self.offer_code, 'offer_code')
self.validate_required(self.offer_name, 'offer_name')
self.validate_required(self.oms_data, 'oms_data')
self.validate_required(self.oms_biz_no, 'oms_biz_no')
self.validate_required(self.verification_cache, 'verification_cache')
self.validate_required(self.verification_url, 'verification_url')
self.validate_required(self.bpms_id, 'bpms_id')
self.validate_required(self.config_data, 'config_data')
self.validate_required(self.status, 'status')
self.validate_required(self.creator, 'creator')
self.validate_required(self.modifor, 'modifor')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.id is not None:
result['id'] = self.id
if self.domain_code is not None:
result['domain_code'] = self.domain_code
if self.domain_name is not None:
result['domain_name'] = self.domain_name
if self.domain_version is not None:
result['domain_version'] = self.domain_version
if self.product_code is not None:
result['product_code'] = self.product_code
if self.product_name is not None:
result['product_name'] = self.product_name
if self.service_code is not None:
result['service_code'] = self.service_code
if self.service_name is not None:
result['service_name'] = self.service_name
if self.offer_code is not None:
result['offer_code'] = self.offer_code
if self.offer_name is not None:
result['offer_name'] = self.offer_name
if self.oms_data is not None:
result['oms_data'] = self.oms_data
if self.oms_biz_no is not None:
result['oms_biz_no'] = self.oms_biz_no
if self.verification_cache is not None:
result['verification_cache'] = self.verification_cache
if self.verification_url is not None:
result['verification_url'] = self.verification_url
if self.bpms_id is not None:
result['bpms_id'] = self.bpms_id
if self.config_data is not None:
result['config_data'] = self.config_data
if self.status is not None:
result['status'] = self.status
if self.creator is not None:
result['creator'] = self.creator
if self.modifor is not None:
result['modifor'] = self.modifor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('id') is not None:
self.id = m.get('id')
if m.get('domain_code') is not None:
self.domain_code = m.get('domain_code')
if m.get('domain_name') is not None:
self.domain_name = m.get('domain_name')
if m.get('domain_version') is not None:
self.domain_version = m.get('domain_version')
if m.get('product_code') is not None:
self.product_code = m.get('product_code')
if m.get('product_name') is not None:
self.product_name = m.get('product_name')
if m.get('service_code') is not None:
self.service_code = m.get('service_code')
if m.get('service_name') is not None:
self.service_name = m.get('service_name')
if m.get('offer_code') is not None:
self.offer_code = m.get('offer_code')
if m.get('offer_name') is not None:
self.offer_name = m.get('offer_name')
if m.get('oms_data') is not None:
self.oms_data = m.get('oms_data')
if m.get('oms_biz_no') is not None:
self.oms_biz_no = m.get('oms_biz_no')
if m.get('verification_cache') is not None:
self.verification_cache = m.get('verification_cache')
if m.get('verification_url') is not None:
self.verification_url = m.get('verification_url')
if m.get('bpms_id') is not None:
self.bpms_id = m.get('bpms_id')
if m.get('config_data') is not None:
self.config_data = m.get('config_data')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('modifor') is not None:
self.modifor = m.get('modifor')
return self
class CreateAccountVerificationResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
process_instance_id: str = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 审批流实例id
self.process_instance_id = process_instance_id
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.process_instance_id is not None:
result['process_instance_id'] = self.process_instance_id
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('process_instance_id') is not None:
self.process_instance_id = m.get('process_instance_id')
return self
class SyncAccountVerificationRequest(TeaModel):
def __init__(
self,
auth_token: str = None,
id: int = None,
domain_code: str = None,
domain_name: str = None,
domain_version: str = None,
product_code: str = None,
product_name: str = None,
service_code: str = None,
service_name: str = None,
offer_code: str = None,
offer_name: str = None,
oms_data: str = None,
oms_biz_no: str = None,
verification_cache: str = None,
verification_url: str = None,
bpms_id: str = None,
config_data: str = None,
status: str = None,
creator: str = None,
modifor: str = None,
):
# OAuth模式下的授权token
self.auth_token = auth_token
# 出账验证主键id
self.id = id
# 计量域code
self.domain_code = domain_code
# 计量域名称
self.domain_name = domain_name
# 计量域版本
self.domain_version = domain_version
# 业务产品code
self.product_code = product_code
# 业务产品名称
self.product_name = product_name
# 渠道产品code
self.service_code = service_code
# 渠道产品名称
self.service_name = service_name
# 商品code
self.offer_code = offer_code
# 商品名称
self.offer_name = offer_name
# 验证的计量数据
self.oms_data = oms_data
# 计量数据业务幂等号
self.oms_biz_no = oms_biz_no
# 缓存报文
self.verification_cache = verification_cache
# 缓存链接
self.verification_url = verification_url
# 审批流id
self.bpms_id = bpms_id
# 配置报文
self.config_data = config_data
# 状态
self.status = status
# 创建人
self.creator = creator
# 修改人
self.modifor = modifor
def validate(self):
self.validate_required(self.id, 'id')
self.validate_required(self.domain_code, 'domain_code')
self.validate_required(self.domain_name, 'domain_name')
self.validate_required(self.domain_version, 'domain_version')
self.validate_required(self.product_code, 'product_code')
self.validate_required(self.product_name, 'product_name')
self.validate_required(self.service_code, 'service_code')
self.validate_required(self.service_name, 'service_name')
self.validate_required(self.offer_code, 'offer_code')
self.validate_required(self.offer_name, 'offer_name')
self.validate_required(self.oms_data, 'oms_data')
self.validate_required(self.oms_biz_no, 'oms_biz_no')
self.validate_required(self.verification_cache, 'verification_cache')
self.validate_required(self.verification_url, 'verification_url')
self.validate_required(self.bpms_id, 'bpms_id')
self.validate_required(self.config_data, 'config_data')
self.validate_required(self.status, 'status')
self.validate_required(self.creator, 'creator')
self.validate_required(self.modifor, 'modifor')
def to_map(self):
result = dict()
if self.auth_token is not None:
result['auth_token'] = self.auth_token
if self.id is not None:
result['id'] = self.id
if self.domain_code is not None:
result['domain_code'] = self.domain_code
if self.domain_name is not None:
result['domain_name'] = self.domain_name
if self.domain_version is not None:
result['domain_version'] = self.domain_version
if self.product_code is not None:
result['product_code'] = self.product_code
if self.product_name is not None:
result['product_name'] = self.product_name
if self.service_code is not None:
result['service_code'] = self.service_code
if self.service_name is not None:
result['service_name'] = self.service_name
if self.offer_code is not None:
result['offer_code'] = self.offer_code
if self.offer_name is not None:
result['offer_name'] = self.offer_name
if self.oms_data is not None:
result['oms_data'] = self.oms_data
if self.oms_biz_no is not None:
result['oms_biz_no'] = self.oms_biz_no
if self.verification_cache is not None:
result['verification_cache'] = self.verification_cache
if self.verification_url is not None:
result['verification_url'] = self.verification_url
if self.bpms_id is not None:
result['bpms_id'] = self.bpms_id
if self.config_data is not None:
result['config_data'] = self.config_data
if self.status is not None:
result['status'] = self.status
if self.creator is not None:
result['creator'] = self.creator
if self.modifor is not None:
result['modifor'] = self.modifor
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('auth_token') is not None:
self.auth_token = m.get('auth_token')
if m.get('id') is not None:
self.id = m.get('id')
if m.get('domain_code') is not None:
self.domain_code = m.get('domain_code')
if m.get('domain_name') is not None:
self.domain_name = m.get('domain_name')
if m.get('domain_version') is not None:
self.domain_version = m.get('domain_version')
if m.get('product_code') is not None:
self.product_code = m.get('product_code')
if m.get('product_name') is not None:
self.product_name = m.get('product_name')
if m.get('service_code') is not None:
self.service_code = m.get('service_code')
if m.get('service_name') is not None:
self.service_name = m.get('service_name')
if m.get('offer_code') is not None:
self.offer_code = m.get('offer_code')
if m.get('offer_name') is not None:
self.offer_name = m.get('offer_name')
if m.get('oms_data') is not None:
self.oms_data = m.get('oms_data')
if m.get('oms_biz_no') is not None:
self.oms_biz_no = m.get('oms_biz_no')
if m.get('verification_cache') is not None:
self.verification_cache = m.get('verification_cache')
if m.get('verification_url') is not None:
self.verification_url = m.get('verification_url')
if m.get('bpms_id') is not None:
self.bpms_id = m.get('bpms_id')
if m.get('config_data') is not None:
self.config_data = m.get('config_data')
if m.get('status') is not None:
self.status = m.get('status')
if m.get('creator') is not None:
self.creator = m.get('creator')
if m.get('modifor') is not None:
self.modifor = m.get('modifor')
return self
class SyncAccountVerificationResponse(TeaModel):
def __init__(
self,
req_msg_id: str = None,
result_code: str = None,
result_msg: str = None,
sync_result: bool = None,
):
# 请求唯一ID,用于链路跟踪和问题排查
self.req_msg_id = req_msg_id
# 结果码,一般OK表示调用成功
self.result_code = result_code
# 异常信息的文本描述
self.result_msg = result_msg
# 成功、失败
self.sync_result = sync_result
def validate(self):
pass
def to_map(self):
result = dict()
if self.req_msg_id is not None:
result['req_msg_id'] = self.req_msg_id
if self.result_code is not None:
result['result_code'] = self.result_code
if self.result_msg is not None:
result['result_msg'] = self.result_msg
if self.sync_result is not None:
result['sync_result'] = self.sync_result
return result
def from_map(self, m: dict = None):
m = m or dict()
if m.get('req_msg_id') is not None:
self.req_msg_id = m.get('req_msg_id')
if m.get('result_code') is not None:
self.result_code = m.get('result_code')
if m.get('result_msg') is not None:
self.result_msg = m.get('result_msg')
if m.get('sync_result') is not None:
self.sync_result = m.get('sync_result')
return self
| [
"[email protected]"
]
| |
dce66acaecaeb92ead8da8165aa063f5144d1414 | 0c005f75771101fdea1f647f124343077af19c36 | /test_word_break.py | 3a437518d9123157cc2a7afd251265b23f0ac32f | [
"MIT"
]
| permissive | brigitteunger/katas | 19ff80a43d1c8fe0e6a49a6790495e716f09f10d | 3f9af88fe5d98753360457084741f573c863dc25 | refs/heads/master | 2023-01-01T00:57:45.294204 | 2020-10-13T21:01:18 | 2020-10-13T21:01:18 | 265,810,827 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,603 | py | import unittest
from typing import List, Set
from data_word_break import s_2, wordDict_2
class Solution:
def wordBreak(self, s: str, wordDict: List[str]) -> bool:
if not wordDict:
return []
set_words = set(wordDict)
dp = [False]*(len(s)+1)
dp[0] = True
for i in range(1, len(s)+1):
for j in range(0, i):
if dp[j] is True and s[j:i] in set_words:
dp[i] = True
break
return dp[-1]
class TestFindWords(unittest.TestCase):
def setUp(self):
self.sol = Solution()
def testWordBreak_1(self):
s = "leetcode"
word_dict = ["leet", "code"]
segmented = self.sol.wordBreak(s, word_dict)
self.assertTrue(segmented)
def testWordBreak_2(self):
s = "applepenapple"
word_dict = ["apple", "pen"]
segmented = self.sol.wordBreak(s, word_dict)
self.assertTrue(segmented)
def testWordBreak_3(self):
s = "catsandog"
word_dict = ["cats", "dog", "sand", "and", "cat"]
segmented = self.sol.wordBreak(s, word_dict)
self.assertFalse(segmented)
def testWordBreak_4(self):
s = "goalspecial"
word_dict = ["go", "goal", "goals", "special"]
segmented = self.sol.wordBreak(s, word_dict)
self.assertTrue(segmented)
def testWordBreak_5(self):
s = s_2
word_dict = wordDict_2
segmented = self.sol.wordBreak(s, word_dict)
self.assertFalse(segmented)
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
]
| |
dbc05861ff1d574b59cf13ace3a9c4f734503c16 | 2f330fc050de11676ab46b963b7878882e9b6614 | /memsource_cli/models/linguist_v2.py | 3fded445994ae511b98da76e5f74617d09e0bc7b | [
"Apache-2.0"
]
| permissive | zerodayz/memsource-cli-client | 609f48c18a2b6daaa639d4cb8a61da43763b5143 | c2574f1467539a49e6637c874e88d75c7ef789b3 | refs/heads/master | 2020-08-01T12:43:06.497982 | 2019-09-30T11:14:13 | 2019-09-30T11:14:13 | 210,999,654 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,216 | py | # coding: utf-8
"""
Memsource REST API
Welcome to Memsource's API documentation. To view our legacy APIs please [visit our documentation](https://wiki.memsource.com/wiki/Memsource_API) and for more information about our new APIs, [visit our blog](https://www.memsource.com/blog/2017/10/24/introducing-rest-apis-qa-with-the-memsource-api-team/). If you have any questions, please contact [Memsource Support](<mailto:[email protected]>). # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from memsource_cli.models.abstract_project_dto_v2 import AbstractProjectDtoV2 # noqa: F401,E501
from memsource_cli.models.domain_reference import DomainReference # noqa: F401,E501
from memsource_cli.models.reference_file_reference import ReferenceFileReference # noqa: F401,E501
from memsource_cli.models.sub_domain_reference import SubDomainReference # noqa: F401,E501
from memsource_cli.models.user_reference import UserReference # noqa: F401,E501
class LinguistV2(AbstractProjectDtoV2):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
}
attribute_map = {
}
def __init__(self): # noqa: E501
"""LinguistV2 - a model defined in Swagger""" # noqa: E501
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(LinguistV2, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, LinguistV2):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
ccce96391f179171085d48df2853759a97636189 | 13c599a48f0b596c314c7c570f47756fd97a2b92 | /media/capture/capture.gyp | b634f9566ebee0fbbfb564a38d48aab581a59d5b | [
"BSD-3-Clause"
]
| permissive | qichanna/chromium | a5e3d44bda4bd6511e090e25263f5de94dbfe492 | 458d956db161377610486b7c82a95fc485f60b9b | refs/heads/master | 2022-11-13T00:50:48.147260 | 2016-08-01T23:23:16 | 2016-08-01T23:28:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,219 | gyp | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
'capture_sources': [
'capture_export.h',
'content/animated_content_sampler.cc',
'content/animated_content_sampler.h',
'content/capture_resolution_chooser.cc',
'content/capture_resolution_chooser.h',
'content/screen_capture_device_core.cc',
'content/screen_capture_device_core.h',
'content/smooth_event_sampler.cc',
'content/smooth_event_sampler.h',
'content/thread_safe_capture_oracle.cc',
'content/thread_safe_capture_oracle.h',
'content/video_capture_oracle.cc',
'content/video_capture_oracle.h',
'device_monitor_mac.h',
'device_monitor_mac.mm',
'system_message_window_win.cc',
'system_message_window_win.h',
'video/android/video_capture_device_android.cc',
'video/android/video_capture_device_android.h',
'video/android/video_capture_device_factory_android.cc',
'video/android/video_capture_device_factory_android.h',
'video/fake_video_capture_device.cc',
'video/fake_video_capture_device.h',
'video/fake_video_capture_device_factory.cc',
'video/fake_video_capture_device_factory.h',
'video/file_video_capture_device.cc',
'video/file_video_capture_device.h',
'video/file_video_capture_device_factory.cc',
'video/file_video_capture_device_factory.h',
'video/linux/v4l2_capture_delegate.cc',
'video/linux/v4l2_capture_delegate.h',
'video/linux/video_capture_device_chromeos.cc',
'video/linux/video_capture_device_chromeos.h',
'video/linux/video_capture_device_factory_linux.cc',
'video/linux/video_capture_device_factory_linux.h',
'video/linux/video_capture_device_linux.cc',
'video/linux/video_capture_device_linux.h',
'video/mac/video_capture_device_avfoundation_mac.h',
'video/mac/video_capture_device_avfoundation_mac.mm',
'video/mac/video_capture_device_decklink_mac.h',
'video/mac/video_capture_device_decklink_mac.mm',
'video/mac/video_capture_device_factory_mac.h',
'video/mac/video_capture_device_factory_mac.mm',
'video/mac/video_capture_device_mac.h',
'video/mac/video_capture_device_mac.mm',
'video/scoped_result_callback.h',
'video/video_capture_device.cc',
'video/video_capture_device.h',
'video/video_capture_device_factory.cc',
'video/video_capture_device_factory.h',
'video/video_capture_device_info.cc',
'video/video_capture_device_info.h',
'video/win/capability_list_win.cc',
'video/win/capability_list_win.h',
'video/win/filter_base_win.cc',
'video/win/filter_base_win.h',
'video/win/pin_base_win.cc',
'video/win/pin_base_win.h',
'video/win/sink_filter_observer_win.h',
'video/win/sink_filter_win.cc',
'video/win/sink_filter_win.h',
'video/win/sink_input_pin_win.cc',
'video/win/sink_input_pin_win.h',
'video/win/video_capture_device_factory_win.cc',
'video/win/video_capture_device_factory_win.h',
'video/win/video_capture_device_mf_win.cc',
'video/win/video_capture_device_mf_win.h',
'video/win/video_capture_device_win.cc',
'video/win/video_capture_device_win.h'
],
'capture_unittests_sources': [
'content/animated_content_sampler_unittest.cc',
'content/capture_resolution_chooser_unittest.cc',
'content/smooth_event_sampler_unittest.cc',
'content/video_capture_oracle_unittest.cc',
'system_message_window_win_unittest.cc',
'video/fake_video_capture_device_unittest.cc',
'video/mac/video_capture_device_factory_mac_unittest.mm',
'video/video_capture_device_unittest.cc'
],
# The following files lack appropriate platform suffixes.
'conditions': [
['OS=="linux" and use_udev==1', {
'capture_sources': [
'device_monitor_udev.cc',
'device_monitor_udev.h',
],
}],
],
},
'targets': [
{
# GN version: //media/capture
'target_name': 'capture',
'type': '<(component)',
'hard_dependency': 1,
'dependencies': [
'<(DEPTH)/base/base.gyp:base',
'<(DEPTH)/base/base.gyp:base_i18n',
'<(DEPTH)/media/media.gyp:media',
'<(DEPTH)/media/media.gyp:shared_memory_support', # For audio support.
'<(DEPTH)/media/mojo/interfaces/mojo_bindings.gyp:image_capture_mojo_bindings',
'<(DEPTH)/mojo/mojo_edk.gyp:mojo_system_impl',
'<(DEPTH)/mojo/mojo_public.gyp:mojo_cpp_bindings',
'<(DEPTH)/skia/skia.gyp:skia',
'<(DEPTH)/ui/gfx/gfx.gyp:gfx',
'<(DEPTH)/ui/gfx/gfx.gyp:gfx_geometry',
],
'defines': [
'CAPTURE_IMPLEMENTATION',
],
'include_dirs': [
'<(DEPTH)/',
],
'sources': [
'<@(capture_sources)'
],
'conditions': [
['OS=="android"', {
'dependencies': [
'capture_java',
'<(DEPTH)/media/capture/video/android'
],
}],
['OS=="mac"', {
'dependencies': [
'<(DEPTH)/third_party/decklink/decklink.gyp:decklink',
],
}],
['chromeos==1', {
'dependencies': [
'<(DEPTH)/ui/display/display.gyp:display',
],
}],
['OS=="linux" and use_udev==1', {
'dependencies': [
'<(DEPTH)/device/udev_linux/udev.gyp:udev_linux',
],
}],
['OS=="win"', {
'dependencies': [
'<(DEPTH)/media/media.gyp:mf_initializer',
],
# TODO(jschuh): http://crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [ 4267, ],
}],
],
},
{
# GN version: //media/capture:capture_unittests
'target_name': 'capture_unittests',
'type': '<(gtest_target_type)',
'include_dirs': [
'<(DEPTH)/',
],
'dependencies': [
'capture',
'<(DEPTH)/base/base.gyp:base',
'<(DEPTH)/base/base.gyp:run_all_unittests',
'<(DEPTH)/media/media.gyp:media',
'<(DEPTH)/media/mojo/interfaces/mojo_bindings.gyp:image_capture_mojo_bindings',
'<(DEPTH)/mojo/mojo_edk.gyp:mojo_system_impl',
'<(DEPTH)/mojo/mojo_public.gyp:mojo_cpp_bindings',
'<(DEPTH)/testing/gmock.gyp:gmock',
'<(DEPTH)/testing/gtest.gyp:gtest',
'<(DEPTH)/ui/gfx/gfx.gyp:gfx',
'<(DEPTH)/ui/gfx/gfx.gyp:gfx_geometry',
'<(DEPTH)/ui/gfx/gfx.gyp:gfx_test_support',
],
'sources': [
'<@(capture_unittests_sources)'
],
'conditions': [
['OS=="android"', {
'dependencies': [
'<(DEPTH)/testing/android/native_test.gyp:native_test_native_code',
],
}],
['OS=="win"', {
'dependencies': [
'<(DEPTH)/media/media.gyp:mf_initializer',
],
# TODO(jschuh): http://crbug.com/167187 fix size_t to int truncations.
'msvs_disabled_warnings': [ 4267, ],
}],
], # conditions
},
],
'conditions': [
['test_isolation_mode != "noop"', {
'targets': [
{
# There's no GN equivalent to this.
'target_name': 'capture_unittests_run',
'type': 'none',
'dependencies': [
'capture_unittests',
],
'includes': [
'../../build/isolate.gypi',
],
'sources': [
'capture_unittests.isolate',
]
}
]
}],
['OS=="android"', {
'targets': [
{
'target_name': 'capture_java',
'type': 'none',
'dependencies': [
'/base/base.gyp:base',
'media_android_captureapitype',
'media_android_imageformat',
'video_capture_android_jni_headers',
],
'export_dependent_settings': [
'../base/base.gyp:base',
],
'variables': {
'java_in_dir': 'video/android/java',
},
'includes': ['../../build/java.gypi'],
},
{
'target_name': 'media_android_captureapitype',
'type': 'none',
'variables': {
'source_file': 'video/video_capture_device.h',
},
'includes': [ '../../build/android/java_cpp_enum.gypi' ],
},
{
'target_name': 'media_android_imageformat',
'type': 'none',
'variables': {
'source_file': 'video/android/video_capture_device_android.h',
},
'includes': [ '../../build/android/java_cpp_enum.gypi' ],
},
{
'target_name': 'video_capture_android_jni_headers',
'type': 'none',
'sources': [
'video/android/java/src/org/chromium/media/VideoCapture.java',
'video/android/java/src/org/chromium/media/VideoCaptureFactory.java',
],
'variables': {
'jni_gen_package': 'media',
},
'includes': ['../../build/jni_generator.gypi'],
},
{
# There's no GN equivalent to this.
'target_name': 'capture_unittests_apk',
'type': 'none',
'dependencies': [
'capture_java',
'capture_unittests',
],
'variables': {
'test_suite_name': 'capture_unittests',
},
'includes': ['../../build/apk_test.gypi'],
},
],
'conditions': [
['test_isolation_mode != "noop"', {
'targets': [
{
'target_name': 'capture_unittests_apk_run',
'type': 'none',
'dependencies': [
'capture_unittests_apk',
],
'includes': [
'../../build/isolate.gypi',
],
'sources': [
'capture_unittests_apk.isolate',
],
},
],
}],
],
}],
],
}
| [
"[email protected]"
]
| |
f104bb6f8281c3057c60069d41856b978e1b533f | c46754b9600a12df4f9d7a6320dfc19aa96b1e1d | /tests/models/x_clip/test_modeling_x_clip.py | 5c602d3d3ef732ee29ccbde5441ab84d15c0e2f7 | [
"Apache-2.0"
]
| permissive | huggingface/transformers | ccd52a0d7c59e5f13205f32fd96f55743ebc8814 | 4fa0aff21ee083d0197a898cdf17ff476fae2ac3 | refs/heads/main | 2023-09-05T19:47:38.981127 | 2023-09-05T19:21:33 | 2023-09-05T19:21:33 | 155,220,641 | 102,193 | 22,284 | Apache-2.0 | 2023-09-14T20:44:49 | 2018-10-29T13:56:00 | Python | UTF-8 | Python | false | false | 26,961 | py | # coding=utf-8
# Copyright 2022 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Testing suite for the PyTorch XCLIP model. """
import inspect
import os
import tempfile
import unittest
import numpy as np
from huggingface_hub import hf_hub_download
from transformers import XCLIPConfig, XCLIPTextConfig, XCLIPVisionConfig
from transformers.testing_utils import require_torch, require_torch_multi_gpu, require_vision, slow, torch_device
from transformers.utils import is_torch_available, is_vision_available
from ...test_configuration_common import ConfigTester
from ...test_modeling_common import (
ModelTesterMixin,
_config_zero_init,
floats_tensor,
ids_tensor,
random_attention_mask,
)
from ...test_pipeline_mixin import PipelineTesterMixin
if is_torch_available():
import torch
from torch import nn
from transformers import XCLIPModel, XCLIPTextModel, XCLIPVisionModel
from transformers.models.x_clip.modeling_x_clip import XCLIP_PRETRAINED_MODEL_ARCHIVE_LIST
if is_vision_available():
from transformers import XCLIPProcessor
class XCLIPVisionModelTester:
def __init__(
self,
parent,
batch_size=8,
image_size=30,
patch_size=2,
num_channels=3,
num_frames=8, # important; the batch size * time must be divisible by the number of frames
is_training=True,
hidden_size=32,
num_hidden_layers=2,
num_attention_heads=4,
intermediate_size=37,
mit_hidden_size=64,
dropout=0.1,
attention_dropout=0.1,
initializer_range=0.02,
scope=None,
):
self.parent = parent
self.batch_size = batch_size
self.image_size = image_size
self.patch_size = patch_size
self.num_channels = num_channels
self.num_frames = num_frames
self.is_training = is_training
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.intermediate_size = intermediate_size
self.mit_hidden_size = mit_hidden_size
self.dropout = dropout
self.attention_dropout = attention_dropout
self.initializer_range = initializer_range
self.scope = scope
# in ViT, the seq length equals the number of patches + 1 (we add 1 for the [CLS] token)
num_patches = (image_size // patch_size) ** 2
self.seq_length = num_patches + 1
def prepare_config_and_inputs(self):
pixel_values = floats_tensor(
[self.batch_size * self.num_frames, self.num_channels, self.image_size, self.image_size]
)
config = self.get_config()
return config, pixel_values
def get_config(self):
return XCLIPVisionConfig(
image_size=self.image_size,
patch_size=self.patch_size,
num_channels=self.num_channels,
num_frames=self.num_frames,
hidden_size=self.hidden_size,
num_hidden_layers=self.num_hidden_layers,
num_attention_heads=self.num_attention_heads,
intermediate_size=self.intermediate_size,
mit_hidden_size=self.mit_hidden_size,
dropout=self.dropout,
attention_dropout=self.attention_dropout,
initializer_range=self.initializer_range,
)
def create_and_check_model(self, config, pixel_values):
model = XCLIPVisionModel(config=config)
model.to(torch_device)
model.eval()
with torch.no_grad():
result = model(pixel_values)
# expected sequence length = num_patches + 1 (we add 1 for the [CLS] token)
image_size = (self.image_size, self.image_size)
patch_size = (self.patch_size, self.patch_size)
num_patches = (image_size[1] // patch_size[1]) * (image_size[0] // patch_size[0])
self.parent.assertEqual(
result.last_hidden_state.shape, (self.batch_size * self.num_frames, num_patches + 1, self.hidden_size)
)
self.parent.assertEqual(result.pooler_output.shape, (self.batch_size * self.num_frames, self.hidden_size))
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
config, pixel_values = config_and_inputs
inputs_dict = {"pixel_values": pixel_values}
return config, inputs_dict
@require_torch
class XCLIPVisionModelTest(ModelTesterMixin, unittest.TestCase):
"""
Here we also overwrite some of the tests of test_modeling_common.py, as X-CLIP does not use input_ids, inputs_embeds,
attention_mask and seq_length.
"""
all_model_classes = (XCLIPVisionModel,) if is_torch_available() else ()
fx_compatible = False
test_pruning = False
test_resize_embeddings = False
test_head_masking = False
def setUp(self):
self.model_tester = XCLIPVisionModelTester(self)
self.config_tester = ConfigTester(
self, config_class=XCLIPVisionConfig, has_text_modality=False, hidden_size=37
)
def test_config(self):
self.config_tester.run_common_tests()
@unittest.skip(reason="X-CLIP does not use inputs_embeds")
def test_inputs_embeds(self):
pass
def test_model_common_attributes(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()
for model_class in self.all_model_classes:
model = model_class(config)
self.assertIsInstance(model.get_input_embeddings(), (nn.Module))
x = model.get_output_embeddings()
self.assertTrue(x is None or isinstance(x, nn.Linear))
def test_forward_signature(self):
config, _ = self.model_tester.prepare_config_and_inputs_for_common()
for model_class in self.all_model_classes:
model = model_class(config)
signature = inspect.signature(model.forward)
# signature.parameters is an OrderedDict => so arg_names order is deterministic
arg_names = [*signature.parameters.keys()]
expected_arg_names = ["pixel_values"]
self.assertListEqual(arg_names[:1], expected_arg_names)
def test_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_model(*config_and_inputs)
def test_training(self):
pass
def test_training_gradient_checkpointing(self):
pass
@unittest.skip(reason="XCLIPVisionModel has no base class and is not available in MODEL_MAPPING")
def test_save_load_fast_init_from_base(self):
pass
@unittest.skip(reason="XCLIPVisionModel has no base class and is not available in MODEL_MAPPING")
def test_save_load_fast_init_to_base(self):
pass
@slow
def test_model_from_pretrained(self):
for model_name in XCLIP_PRETRAINED_MODEL_ARCHIVE_LIST[:1]:
model = XCLIPVisionModel.from_pretrained(model_name)
self.assertIsNotNone(model)
def test_gradient_checkpointing_backward_compatibility(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
for model_class in self.all_model_classes:
if not model_class.supports_gradient_checkpointing:
continue
print("Model class:", model_class)
config.gradient_checkpointing = True
model = model_class(config)
self.assertTrue(model.is_gradient_checkpointing)
def test_attention_outputs(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
config.return_dict = True
# we add 1 here due to the special message token in X-CLIP's vision encoder
seq_len = getattr(self.model_tester, "seq_length", None) + 1
encoder_seq_length = getattr(self.model_tester, "encoder_seq_length", seq_len)
for model_class in self.all_model_classes:
inputs_dict["output_attentions"] = True
inputs_dict["output_hidden_states"] = False
config.return_dict = True
model = model_class(config)
model.to(torch_device)
model.eval()
with torch.no_grad():
outputs = model(**self._prepare_for_class(inputs_dict, model_class))
self.assertEqual(len(outputs.attentions), self.model_tester.num_hidden_layers)
# check that output_attentions also work using config
del inputs_dict["output_attentions"]
config.output_attentions = True
model = model_class(config)
model.to(torch_device)
model.eval()
with torch.no_grad():
outputs = model(**self._prepare_for_class(inputs_dict, model_class))
self.assertEqual(len(outputs.attentions), self.model_tester.num_hidden_layers)
self.assertListEqual(
list(outputs.attentions[0].shape[-3:]),
[self.model_tester.num_attention_heads, encoder_seq_length, encoder_seq_length],
)
out_len = len(outputs)
# Check attention is always last and order is fine
inputs_dict["output_attentions"] = True
inputs_dict["output_hidden_states"] = True
model = model_class(config)
model.to(torch_device)
model.eval()
with torch.no_grad():
outputs = model(**self._prepare_for_class(inputs_dict, model_class))
self.assertEqual(out_len + 1, len(outputs))
self_attentions = outputs.attentions
self.assertEqual(len(self_attentions), self.model_tester.num_hidden_layers)
self.assertListEqual(
list(self_attentions[0].shape[-3:]),
[self.model_tester.num_attention_heads, encoder_seq_length, encoder_seq_length],
)
@require_torch_multi_gpu
def test_multi_gpu_data_parallel_forward(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
# some params shouldn't be scattered by nn.DataParallel
# so just remove them if they are present.
blacklist_non_batched_params = ["head_mask", "decoder_head_mask", "cross_attn_head_mask"]
for k in blacklist_non_batched_params:
inputs_dict.pop(k, None)
# move input tensors to cuda:O
for k, v in inputs_dict.items():
if torch.is_tensor(v):
inputs_dict[k] = v.to(0)
for model_class in self.all_model_classes:
model = model_class(config=config)
model.to(0)
model.eval()
# Wrap model in nn.DataParallel
model = nn.DataParallel(model)
with torch.no_grad():
test = self._prepare_for_class(inputs_dict, model_class)
for k, v in test.items():
if isinstance(v, torch.Tensor):
print(k, v.shape)
else:
print(k, v)
_ = model(**self._prepare_for_class(inputs_dict, model_class))
class XCLIPTextModelTester:
def __init__(
self,
parent,
batch_size=8,
seq_length=7,
is_training=True,
use_input_mask=True,
use_labels=True,
vocab_size=99,
hidden_size=32,
num_hidden_layers=2,
num_attention_heads=4,
intermediate_size=37,
dropout=0.1,
attention_dropout=0.1,
max_position_embeddings=512,
initializer_range=0.02,
scope=None,
):
self.parent = parent
self.batch_size = batch_size
self.seq_length = seq_length
self.is_training = is_training
self.use_input_mask = use_input_mask
self.use_labels = use_labels
self.vocab_size = vocab_size
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.intermediate_size = intermediate_size
self.dropout = dropout
self.attention_dropout = attention_dropout
self.max_position_embeddings = max_position_embeddings
self.initializer_range = initializer_range
self.scope = scope
def prepare_config_and_inputs(self):
input_ids = ids_tensor([self.batch_size, self.seq_length], self.vocab_size)
input_mask = None
if self.use_input_mask:
input_mask = random_attention_mask([self.batch_size, self.seq_length])
if input_mask is not None:
batch_size, seq_length = input_mask.shape
rnd_start_indices = np.random.randint(1, seq_length - 1, size=(batch_size,))
for batch_idx, start_index in enumerate(rnd_start_indices):
input_mask[batch_idx, :start_index] = 1
input_mask[batch_idx, start_index:] = 0
config = self.get_config()
return config, input_ids, input_mask
def get_config(self):
return XCLIPTextConfig(
vocab_size=self.vocab_size,
hidden_size=self.hidden_size,
num_hidden_layers=self.num_hidden_layers,
num_attention_heads=self.num_attention_heads,
intermediate_size=self.intermediate_size,
dropout=self.dropout,
attention_dropout=self.attention_dropout,
max_position_embeddings=self.max_position_embeddings,
initializer_range=self.initializer_range,
)
def create_and_check_model(self, config, input_ids, input_mask):
model = XCLIPTextModel(config=config)
model.to(torch_device)
model.eval()
with torch.no_grad():
result = model(input_ids, attention_mask=input_mask)
result = model(input_ids)
self.parent.assertEqual(result.last_hidden_state.shape, (self.batch_size, self.seq_length, self.hidden_size))
self.parent.assertEqual(result.pooler_output.shape, (self.batch_size, self.hidden_size))
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
config, input_ids, input_mask = config_and_inputs
inputs_dict = {"input_ids": input_ids, "attention_mask": input_mask}
return config, inputs_dict
@require_torch
class XCLIPTextModelTest(ModelTesterMixin, unittest.TestCase):
all_model_classes = (XCLIPTextModel,) if is_torch_available() else ()
fx_compatible = False
test_pruning = False
test_head_masking = False
def setUp(self):
self.model_tester = XCLIPTextModelTester(self)
self.config_tester = ConfigTester(self, config_class=XCLIPTextConfig, hidden_size=37)
def test_config(self):
self.config_tester.run_common_tests()
def test_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_model(*config_and_inputs)
def test_training(self):
pass
def test_training_gradient_checkpointing(self):
pass
@unittest.skip(reason="X-CLIP does not use inputs_embeds")
def test_inputs_embeds(self):
pass
@unittest.skip(reason="XCLIPTextModel has no base class and is not available in MODEL_MAPPING")
def test_save_load_fast_init_from_base(self):
pass
@unittest.skip(reason="XCLIPTextModel has no base class and is not available in MODEL_MAPPING")
def test_save_load_fast_init_to_base(self):
pass
@slow
def test_model_from_pretrained(self):
for model_name in XCLIP_PRETRAINED_MODEL_ARCHIVE_LIST[:1]:
model = XCLIPTextModel.from_pretrained(model_name)
self.assertIsNotNone(model)
class XCLIPModelTester:
def __init__(
self,
parent,
text_kwargs=None,
vision_kwargs=None,
projection_dim=64,
mit_hidden_size=64,
is_training=True,
):
if text_kwargs is None:
text_kwargs = {}
if vision_kwargs is None:
vision_kwargs = {}
self.parent = parent
self.projection_dim = projection_dim
self.mit_hidden_size = mit_hidden_size
self.text_model_tester = XCLIPTextModelTester(parent, **text_kwargs)
self.vision_model_tester = XCLIPVisionModelTester(parent, **vision_kwargs)
self.is_training = is_training
def prepare_config_and_inputs(self):
text_config, input_ids, attention_mask = self.text_model_tester.prepare_config_and_inputs()
vision_config, _ = self.vision_model_tester.prepare_config_and_inputs()
pixel_values = floats_tensor(
[
self.vision_model_tester.batch_size,
self.vision_model_tester.num_frames,
self.vision_model_tester.num_channels,
self.vision_model_tester.image_size,
self.vision_model_tester.image_size,
]
)
config = self.get_config()
return config, input_ids, attention_mask, pixel_values
def get_config(self):
return XCLIPConfig.from_text_vision_configs(
self.text_model_tester.get_config(),
self.vision_model_tester.get_config(),
projection_dim=self.projection_dim,
)
def create_and_check_model(self, config, input_ids, attention_mask, pixel_values):
model = XCLIPModel(config).to(torch_device).eval()
with torch.no_grad():
result = model(input_ids, pixel_values, attention_mask)
self.parent.assertEqual(
result.logits_per_video.shape,
(self.vision_model_tester.batch_size, self.text_model_tester.batch_size),
)
self.parent.assertEqual(
result.logits_per_text.shape,
(self.text_model_tester.batch_size, self.vision_model_tester.batch_size),
)
def prepare_config_and_inputs_for_common(self):
config_and_inputs = self.prepare_config_and_inputs()
config, input_ids, attention_mask, pixel_values = config_and_inputs
inputs_dict = {
"input_ids": input_ids,
"attention_mask": attention_mask,
"pixel_values": pixel_values,
"return_loss": True,
}
return config, inputs_dict
@require_torch
class XCLIPModelTest(ModelTesterMixin, PipelineTesterMixin, unittest.TestCase):
all_model_classes = (XCLIPModel,) if is_torch_available() else ()
pipeline_model_mapping = {"feature-extraction": XCLIPModel} if is_torch_available() else {}
fx_compatible = False
test_head_masking = False
test_pruning = False
test_resize_embeddings = False
test_attention_outputs = False
test_torchscript = False
maxdiff = None
def setUp(self):
self.model_tester = XCLIPModelTester(self)
def test_model(self):
config_and_inputs = self.model_tester.prepare_config_and_inputs()
self.model_tester.create_and_check_model(*config_and_inputs)
@unittest.skip(reason="Hidden_states is tested in individual model tests")
def test_hidden_states_output(self):
pass
@unittest.skip(reason="Inputs_embeds is tested in individual model tests")
def test_inputs_embeds(self):
pass
@unittest.skip(reason="Retain_grad is tested in individual model tests")
def test_retain_grad_hidden_states_attentions(self):
pass
@unittest.skip(reason="XCLIPModel does not have input/output embeddings")
def test_model_common_attributes(self):
pass
@unittest.skip(reason="XCLIPModel does not support feedforward chunking")
def test_feed_forward_chunking(self):
pass
# override as the `logit_scale`, `prompts_generator.alpha` parameters require special treatment
def test_initialization(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
configs_no_init = _config_zero_init(config)
for model_class in self.all_model_classes:
model = model_class(config=configs_no_init)
for name, param in model.named_parameters():
if param.requires_grad:
# check if `logit_scale` is initilized as per the original implementation
if name == "logit_scale":
self.assertAlmostEqual(
param.data.item(),
np.log(1 / 0.07),
delta=1e-3,
msg=f"Parameter {name} of model {model_class} seems not properly initialized",
)
elif name == "prompts_generator.alpha":
self.assertAlmostEqual(param.data.mean().item(), model.config.prompt_alpha)
else:
self.assertIn(
((param.data.mean() * 1e9).round() / 1e9).item(),
[0.0, 1.0],
msg=f"Parameter {name} of model {model_class} seems not properly initialized",
)
def _create_and_check_torchscript(self, config, inputs_dict):
if not self.test_torchscript:
return
configs_no_init = _config_zero_init(config) # To be sure we have no Nan
configs_no_init.torchscript = True
configs_no_init.return_dict = False
for model_class in self.all_model_classes:
model = model_class(config=configs_no_init)
model.to(torch_device)
model.eval()
try:
input_ids = inputs_dict["input_ids"]
pixel_values = inputs_dict["pixel_values"] # X-CLIP needs pixel_values
traced_model = torch.jit.trace(model, (input_ids, pixel_values))
except RuntimeError:
self.fail("Couldn't trace module.")
with tempfile.TemporaryDirectory() as tmp_dir_name:
pt_file_name = os.path.join(tmp_dir_name, "traced_model.pt")
try:
torch.jit.save(traced_model, pt_file_name)
except Exception:
self.fail("Couldn't save module.")
try:
loaded_model = torch.jit.load(pt_file_name)
except Exception:
self.fail("Couldn't load module.")
model.to(torch_device)
model.eval()
loaded_model.to(torch_device)
loaded_model.eval()
model_state_dict = model.state_dict()
loaded_model_state_dict = loaded_model.state_dict()
non_persistent_buffers = {}
for key in loaded_model_state_dict.keys():
if key not in model_state_dict.keys():
non_persistent_buffers[key] = loaded_model_state_dict[key]
loaded_model_state_dict = {
key: value for key, value in loaded_model_state_dict.items() if key not in non_persistent_buffers
}
self.assertEqual(set(model_state_dict.keys()), set(loaded_model_state_dict.keys()))
model_buffers = list(model.buffers())
for non_persistent_buffer in non_persistent_buffers.values():
found_buffer = False
for i, model_buffer in enumerate(model_buffers):
if torch.equal(non_persistent_buffer, model_buffer):
found_buffer = True
break
self.assertTrue(found_buffer)
model_buffers.pop(i)
models_equal = True
for layer_name, p1 in model_state_dict.items():
p2 = loaded_model_state_dict[layer_name]
if p1.data.ne(p2.data).sum() > 0:
models_equal = False
self.assertTrue(models_equal)
def test_load_vision_text_config(self):
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
# Save XCLIPConfig and check if we can load XCLIPVisionConfig from it
with tempfile.TemporaryDirectory() as tmp_dir_name:
config.save_pretrained(tmp_dir_name)
vision_config = XCLIPVisionConfig.from_pretrained(tmp_dir_name)
self.assertDictEqual(config.vision_config.to_dict(), vision_config.to_dict())
# Save XCLIPConfig and check if we can load XCLIPTextConfig from it
with tempfile.TemporaryDirectory() as tmp_dir_name:
config.save_pretrained(tmp_dir_name)
text_config = XCLIPTextConfig.from_pretrained(tmp_dir_name)
self.assertDictEqual(config.text_config.to_dict(), text_config.to_dict())
@slow
def test_model_from_pretrained(self):
for model_name in XCLIP_PRETRAINED_MODEL_ARCHIVE_LIST[:1]:
model = XCLIPModel.from_pretrained(model_name)
self.assertIsNotNone(model)
# We will verify our results on a spaghetti video
def prepare_video():
file = hf_hub_download(
repo_id="hf-internal-testing/spaghetti-video", filename="eating_spaghetti_8_frames.npy", repo_type="dataset"
)
video = np.load(file)
return list(video)
@require_vision
@require_torch
class XCLIPModelIntegrationTest(unittest.TestCase):
@slow
def test_inference(self):
model_name = "microsoft/xclip-base-patch32"
model = XCLIPModel.from_pretrained(model_name).to(torch_device)
processor = XCLIPProcessor.from_pretrained(model_name)
video = prepare_video()
inputs = processor(
text=["playing sports", "eating spaghetti", "go shopping"], videos=video, return_tensors="pt", padding=True
).to(torch_device)
# forward pass
with torch.no_grad():
outputs = model(**inputs)
# verify the logits
self.assertEqual(
outputs.logits_per_video.shape,
torch.Size((inputs.pixel_values.shape[0], inputs.input_ids.shape[0])),
)
self.assertEqual(
outputs.logits_per_text.shape,
torch.Size((inputs.input_ids.shape[0], inputs.pixel_values.shape[0])),
)
expected_logits = torch.tensor([[14.0181, 20.2771, 14.4776]], device=torch_device)
self.assertTrue(torch.allclose(outputs.logits_per_video, expected_logits, atol=1e-3))
| [
"[email protected]"
]
| |
cc283b9b4b09f6bf5595826d7c51710a2bbd1948 | b72dbc51279d3e59cb6410367b671f8a956314c1 | /프로그래머스/그외/1844_게임맵 최단거리.py | 0aa107ad05fc1b98b72ecda8ab28d2ebc0eba2d7 | []
| no_license | ddobokki/coding-test-practice | 7b16d20403bb1714d97adfd1f47aa7d3ccd7ea4b | c88d981a1d43b986169f7884ff3ef1498e768fc8 | refs/heads/main | 2023-07-08T15:09:32.269059 | 2021-08-08T12:19:44 | 2021-08-08T12:19:44 | 344,116,013 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,399 | py | #https://programmers.co.kr/learn/courses/30/lessons/1844
from collections import deque
def solution(maps):
answer = 0
dx = [1, -1, 0, 0] # 순서대로 동, 서, 남, 북
dy = [0, 0, 1, -1]
visit = [[-1] * len(maps[0]) for _ in range(len(maps))] # 거리 측정 및, 방문을 확인하는 visit 배열
visit[0][0] = 1 # visit이 -1이면 아직 방문을 안했다는 뜻, 탐색이 끝나고도 도달 못하면 -1을 리턴해야하므로 -1로 초기화
q = deque([(0, 0)]) # 0,0에서 시작
while q:
x, y = q.popleft() # q에서 현재 좌표를 꺼낸다.
for i in range(4):
nx, ny = x + dx[i], y + dy[i] # 순서대로 동서남북의 좌표
if (0 <= nx < len(maps[0])) and (0 <= ny < len(maps)): # 각 루프마다 동서남북으로 갈수 있는 곳인지 확인
if (maps[ny][nx] == 1) and (visit[ny][nx] == -1):
# 갈수 있는 조건 -> 맵 밖이 아니고, visit하지 않았으며 맵이 1이어야 한다.
visit[ny][nx] = visit[y][x] + 1 # 현재 visit이 거리이므로 다음칸은 visit에 1을 더한값이 이동한 거리
q.append((nx, ny)) # 다음 좌표를 q에 삽입
return visit[-1][-1]
#map = [[1, 0, 1, 1, 1], [1, 0, 1, 0, 1], [1, 0, 1, 1, 1], [1, 1, 1, 0, 1], [0, 0, 0, 0, 1]]
#print(solution(map))
| [
"[email protected]"
]
| |
9558078b495c9f41b5bcc8fde64f93bfb7668f33 | ec87c361be4a2f9f842695b6a6e8601ebd735e83 | /GuessNum.py | ee225ea9d692c0e4fc54bd50da1e7441a632581c | []
| no_license | heheddff/python2018 | 357d51bee7ea39f6a1df82101fb49c1568250e24 | 77a240dd155f679fffe33b62df57f689a8c85082 | refs/heads/master | 2020-03-27T23:13:38.789249 | 2018-12-08T14:55:21 | 2018-12-08T14:55:21 | 147,302,979 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 488 | py | #GuessNum
import random as rd
target = rd.randint(1,1000)
count = 0
while True:
try:
guess = eval(input("请输入一个猜测的整数(1至1000):"))
except:
print('输入有误,请重试,此次不计入猜测次数!')
continue
count +=1
if guess > target:
print("猜大了")
elif guess < target:
print("猜小了")
else:
print("猜对了")
break
print("此论猜测的次数是:{}".format(count))
| [
"[email protected]"
]
| |
869f7c920d278ff777f63b8b1715c4a493ae22b4 | a3be3d5f13c7831d21b23c6fdc264d45fb9448a1 | /catkin_ws/src/action_server/src/sis_arm_pick_and_place_action.py | 6ab20c6d17240871da1fba020b564b80bcd3e0c3 | []
| no_license | VIMO-Robotics/mmbot | 8a70f7ee7b074c384883ec0200cf2b624c37dfe2 | dc004f2b870bf86798d9e1d06f1c8a0c47a4c060 | refs/heads/master | 2020-06-25T10:42:34.580709 | 2019-09-09T04:47:50 | 2019-09-09T04:47:50 | 199,286,782 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,032 | py | #!/usr/bin/env python
import sys
import copy
import rospy
import moveit_commander
import moveit_msgs.msg
from geometry_msgs.msg import PoseStamped, PointStamped
from std_msgs.msg import Float64, Bool
from action_server.msg import block_pick_and_placeAction, block_pick_and_placeResult, block_pick_and_placeFeedback
import actionlib
import tf
from ik_4dof import ik_solver
import numpy as np
class block_pick_and_place(object):
def __init__(self):
self.node_name = rospy.get_name()
# Initial
self.gripper_v = 1.23 # for cube
self.br = tf.TransformBroadcaster()
self.tf_listener = tf.TransformListener()
# Thread lock
moveit_commander.roscpp_initialize(sys.argv)
self.robot = moveit_commander.RobotCommander()
self.scene = moveit_commander.PlanningSceneInterface()
self.group = moveit_commander.MoveGroupCommander("arm")
display_trajectory_publisher = rospy.Publisher(
'/move_group/display_planned_path',
moveit_msgs.msg.DisplayTrajectory,
queue_size=1)
self.gripper_cmd = rospy.Publisher('/gripper_joint/command', Float64, queue_size=1)
self.pub_current_joint_state = rospy.Publisher('/pub_current_joint_state', Bool, queue_size=1)
self._as = actionlib.SimpleActionServer('block_pick_and_place_server', block_pick_and_placeAction, execute_cb=self.execute_cb, auto_start = False)
self._feedback = block_pick_and_placeFeedback()
self._result = block_pick_and_placeResult()
self._as.start()
# safe shutdown
rospy.on_shutdown(self.onShutdown)
rospy.loginfo("[%s] Initialized " %(rospy.get_name()))
self.group.allow_replanning(True)
self.group.set_pose_reference_frame("base_link")
self.pub_current_joints()
self.gripper_action(0)
self.home_pose()
def pub_current_joints(self):
msg = Bool(data=1)
self.pub_current_joint_state.publish(msg)
rospy.sleep(1)
msg.data = 0
self.pub_current_joint_state.publish(msg)
def execute_cb(self, goal):
rospy.loginfo("Goal Received !")
self.gripper_v = goal.object_size
if goal.mode == 0 or goal.mode == 1:
self.ready_pose(1)
print "========== ready_pose finished ==========\n"
self.stack_mode(goal.pick_pose, goal.place_pose)
elif goal.mode == 2:
self.classifier_mode(goal.pick_pose, goal.place_pose)
self.home_pose()
self._result.state = True
self._as.set_succeeded(self._result)
rospy.loginfo('block_pick_and_place: Succeeded')
def stack_mode(self,pick_pose, place_pose):
for i in range(len(pick_pose)):
print "========== pick_pose =========="
print [pick_pose[i].pose.position.x, pick_pose[i].pose.position.y, pick_pose[i].pose.position.z]
print "========== place_pose =========="
print [place_pose[i].pose.position.x, place_pose[i].pose.position.y, place_pose[i].pose.position.z]
self.pre_action_pose(copy.deepcopy(pick_pose[i]))
print "========== pre_action_pose finished =========="
self.action_pose(pick_pose[i])
print "========== Pick finished =========="
self.gripper_action(1)
print "========== grasp cube finished =========="
self.pre_action_pose(copy.deepcopy(pick_pose[i]))
print "========== pre_action_pose finished =========="
self.ready_pose(0)
print "========== ready_pose finished =========="
self.pre_action_pose(copy.deepcopy(place_pose[i]))
print "========== pre_action_pose finished =========="
self.action_pose(place_pose[i])
print "========== Place finished =========="
self.gripper_action(0)
print "========== Place cube finished =========="
self.pre_action_pose(copy.deepcopy(place_pose[i]))
print "========== pre_action_pose finished =========="
self.ready_pose(0)
print "========== ready_pose finished ==========\n"
def classifier_mode(self,pick_pose, place_pose):
for i in range(len(pick_pose)):
print "========== pick_pose =========="
print [pick_pose[i].pose.position.x, pick_pose[i].pose.position.y, pick_pose[i].pose.position.z]
print "========== place_pose =========="
print [place_pose[i].pose.position.x, place_pose[i].pose.position.y, place_pose[i].pose.position.z]
self.pre_action_pose(copy.deepcopy(pick_pose[i]))
print "========== pre_action_pose finished =========="
self.action_pose(pick_pose[i])
print "========== Pick finished =========="
self.gripper_action(1)
print "========== grasp cube finished =========="
self.pre_action_pose(copy.deepcopy(pick_pose[i]))
print "========== pre_action_pose finished =========="
self.ready_pose(0)
print "========== ready_pose finished =========="
self.pre_action_pose(copy.deepcopy(place_pose[i]))
print "========== pre_action_pose finished =========="
self.action_pose(place_pose[i])
print "========== Place finished =========="
self.gripper_action(0)
print "========== Place cube finished =========="
self.pre_action_pose(copy.deepcopy(place_pose[i]))
print "========== pre_action_pose finished =========="
self.ready_pose(2)
print "========== ready_pose finished ==========\n"
def home_pose(self):
self.execute_fk(0.7976700097005335, -2.1782527187976104, 2.188479257383515, 0.4601942363656924)
self.execute_fk(-0.02045307717180855, -2.162912910918754, 2.0657607943526637, 0.8)
def ready_pose(self, state):
if state == 0:
self.execute_fk(0,-0.3681553890925539,1.6055665579869711,1.6413594430376361)
elif state == 1:
# self.execute_fk(0.8130098175793898, -2.0197413707160945, 2.1782527187976104, 0.3425890426277932)
self.execute_fk(0.7976700097005335, -2.1782527187976104, 2.188479257383515, 0.4601942363656924)
self.execute_fk(0,-0.3681553890925539,1.6055665579869711,1.6413594430376361)
elif state == 2:
self.execute_fk(0,-0.3681553890925539,1.6055665579869711,1.6413594430376361)
self.execute_fk(0.7976700097005335, -2.1782527187976104, 2.188479257383515, 0.4601942363656924)
# rospy.sleep(1.5)
def pre_action_pose(self, pre_pose):
pre_pose.pose.position.z += 0.02
# print [pre_pose.pose.position.x, pre_pose.pose.position.y, pre_pose.pose.position.z]
self.find_ik_and_execute(pre_pose)
# rospy.sleep(1.5)
def action_pose(self, pose):
# print [pose.pose.position.x, pose.pose.position.y, pose.pose.position.z]
self.find_ik_and_execute(pose)
def execute_fk(self, theta_1, theta_2, theta_3, theta_4):
if rospy.is_shutdown():
rospy.loginfo('%s: Finished' % self._action_name)
self._as.set_preempted()
joint_values = self.group.get_current_joint_values()
joint_values[0] = theta_1
joint_values[1] = theta_2
joint_values[2] = theta_3
joint_values[3] = theta_4
self.group.set_joint_value_target(joint_values)
plan = self.group.plan()
return self.group.execute(plan,wait=True)
def gripper_action(self, state):
if state:
msg = Float64(data=self.gripper_v) #1.23
s_t = 1.5
else:
msg = Float64(data=0)
s_t = 2
self.gripper_cmd.publish(msg)
rospy.sleep(s_t)
def find_ik_and_execute(self, pose_transformed):
x = pose_transformed.pose.position.x
y = pose_transformed.pose.position.y
z = pose_transformed.pose.position.z
ik_candidate = ik_solver(x, y, z, -90)
# print "========== Find ",len(ik_candidate)," Plan =========="
if not np.isnan(ik_candidate.all()):
for theta_1, theta_2, theta_3, theta_4 in ik_candidate:
# while not rospy.is_shutdown():
try:
if self.execute_fk(theta_1, theta_2, theta_3, theta_4):
# rospy.loginfo("========== Execute Plan ==========")
# print [theta_1, theta_2, theta_3, theta_4]
break
except Exception as e:
# rospy.loginfo(e)
# print "------------- Failed -------------"
# print [theta_1, theta_2, theta_3, theta_4],"\n"
continue
else:
rospy.loginfo("========== Cannot Find Solution ==========")
self._result.state = False
self._as.set_aborted(self._result)
def onShutdown(self):
rospy.loginfo("[%s] Shutting down..." %self.node_name)
rospy.sleep(0.5) #To make sure that it gets published.
rospy.loginfo("[%s] Shutdown" %self.node_name)
if __name__ == '__main__':
rospy.init_node('block_pick_and_place',anonymous=False)
block_pick_and_place = block_pick_and_place()
rospy.on_shutdown(block_pick_and_place.onShutdown)
rospy.spin()
| [
"[email protected]"
]
| |
811c54e32c59559195243cf283c1baeaf6bea67e | 41ede4fd3bfba1bff0166bca7aee80dcf21434c6 | /ayhanyalcinsoy/Desktop/xfce/addon/thunar-archive-plugin/actions.py | 105c7d47862ca22da89c73e94b0087b806747df5 | []
| no_license | pisilinux/playground | a7db4b42559a21cc72fd4c8649e0231ab6a3eb3c | e4e12fff8a847ba210befc8db7e2af8556c3adf7 | refs/heads/master | 2022-08-12T23:03:27.609506 | 2022-08-11T18:28:19 | 2022-08-11T18:28:19 | 8,429,459 | 16 | 22 | null | 2022-08-11T18:28:20 | 2013-02-26T09:37:11 | Python | UTF-8 | Python | false | false | 683 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Licensed under the GNU General Public License, version 3.
# See the file http://www.gnu.org/copyleft/gpl.txt.
from pisi.actionsapi import autotools
from pisi.actionsapi import get
from pisi.actionsapi import pisitools
from pisi.actionsapi import shelltools
def setup():
shelltools.export("LDFLAGS", "%s -lgtk-x11-2.0 -lthunarx-2" % get.LDFLAGS())
autotools.configure("--disable-static \
--disable-dependency-tracking")
def build():
autotools.make()
def install():
autotools.rawInstall("DESTDIR=%s" % get.installDIR())
pisitools.dodoc("AUTHORS", "ChangeLog", "COPYING", "NEWS", "README")
| [
"[email protected]"
]
| |
e98d8dcea92717dc00bba19ef5d887b8d521e12e | ba949e02c0f4a7ea0395a80bdc31ed3e5f5fcd54 | /problems/dp/Solution115.py | aec8a435c4f15acf75a0391afcaed2b0f9481f66 | [
"MIT"
]
| permissive | akaliutau/cs-problems-python | 6bc0a74064f6e9687fe58b13763da1fdf2e1f626 | 9b1bd8e3932be62135a38a77f955ded9a766b654 | refs/heads/master | 2023-05-11T22:19:06.711001 | 2021-06-04T11:14:42 | 2021-06-04T11:14:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,170 | py | """ Given two strings s and t, return the number of distinct subsequences of s
which equals t. A string's subsequence is a new string formed from the
original string by deleting some (can be none) of the characters without
disturbing the relative positions of the remaining characters. (i.e., "ACE"
is a subsequence of "ABCDE" while "AEC" is not). It's guaranteed the answer
fits on a 32-bit signed integer.
Example 1: Input: s = "rabbbit", t = "rabbit" Output: 3
Explanation: As shown below, there are 3 ways you can
generate "rabbit" from S.
____ __
rabbbit
__ ____
rabbbit
___ ___
rabbbit
IDEA:
lets t=ra, s=ram
if we have a sequence
[ra]
number of distinct subsequences, layer by layer:
i=0, t="", [""] ["r"] ["ra"] ["ram"]
\
i=1, t="r", [""]<-["r"]<-["r"]<- ["r"]
i=2, t="ra", [""]<-["r"]<-["r"]<- ["r"]
where
\ = use data from previous iteration, like this
["", r, a, ra] = [{"",r} + {"",r} * a]
<- = copy prev result-set
["", r, a, ra] --> ["", r, a, ra]
"""
class Solution115:
pass
| [
"[email protected]"
]
| |
a92309f4c06a45e9fc8a12855d0fbe22d95c8feb | a0c53168a4bdcfb0aa917d6d2c602f0999443a10 | /projexui/widgets/xurlwidget.py | 40516a9749b83fb4e82b2ccb12a331191a731d1e | []
| no_license | kanooshka/DPS_PIPELINE | 8067154c59ca5c8c9c09740969bb6e8537021903 | df2fcdecda5bce98e4235ffddde1e99f334562cc | refs/heads/master | 2021-05-24T04:32:03.457648 | 2018-09-07T13:25:11 | 2018-09-07T13:25:11 | 29,938,064 | 3 | 2 | null | 2020-07-23T23:06:37 | 2015-01-27T22:26:01 | Python | UTF-8 | Python | false | false | 3,438 | py | """ Defines the XUrlWidget class """
# define authorship information
__authors__ = ['Eric Hulser']
__author__ = ','.join(__authors__)
__credits__ = []
__copyright__ = 'Copyright (c) 2011, Projex Software, LLC'
__license__ = 'LGPL'
__maintainer__ = 'Projex Software, LLC'
__email__ = '[email protected]'
import webbrowser
from projexui import qt
from PyQt4.QtCore import Qt
from PyQt4.QtGui import QWidget,\
QHBoxLayout,\
QIcon,\
QToolButton
from projexui.widgets.xlineedit import XLineEdit
from projexui import resources
class XUrlWidget(QWidget):
urlChanged = qt.Signal(str)
urlEdited = qt.Signal()
def __init__( self, parent ):
super(XUrlWidget, self).__init__(parent)
# define the interface
self._urlEdit = XLineEdit(self)
self._urlButton = QToolButton(self)
self._urlButton.setAutoRaise(True)
self._urlButton.setIcon(QIcon(resources.find('img/web.png')))
self._urlButton.setToolTip('Browse Link')
self._urlButton.setFocusPolicy(Qt.NoFocus)
self._urlEdit.setHint('http://')
layout = QHBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(0)
layout.addWidget(self._urlEdit)
layout.addWidget(self._urlButton)
self.setLayout(layout)
self.setFocusPolicy(Qt.StrongFocus)
# create connections
self._urlEdit.textChanged.connect(self.urlChanged)
self._urlEdit.textEdited.connect(self.urlEdited)
self._urlButton.clicked.connect(self.browse)
def blockSignals( self, state ):
"""
Blocks the signals for this widget and its sub-parts.
:param state | <bool>
"""
super(XUrlWidget, self).blockSignals(state)
self._urlEdit.blockSignals(state)
self._urlButton.blockSignals(state)
def browse( self ):
"""
Brings up a web browser with the address in a Google map.
"""
webbrowser.open(self.url())
def hint( self ):
"""
Returns the hint associated with this widget.
:return <str>
"""
return self._urlEdit.hint()
def lineEdit( self ):
"""
Returns the line edit linked with this widget.
:return <XLineEdit>
"""
return self._urlEdit
def setFocus(self):
"""
Sets the focus for this widget on its line edit.
"""
self._urlEdit.setFocus()
@qt.Slot(str)
def setHint( self, hint ):
"""
Sets the hint associated with this widget.
:param hint | <str>
"""
self._urlEdit.setHint(hint)
@qt.Slot(str)
def setUrl( self, url ):
"""
Sets the url for this widget to the inputed url.
:param url | <str>
"""
self._urlEdit.setText(str(url))
def url( self ):
"""
Returns the current url from the edit.
:return <str>
"""
return str(self._urlEdit.text())
x_hint = qt.Property(str, hint, setHint)
x_url = qt.Property(str, url, setUrl)
__designer_plugins__ = [XUrlWidget] | [
"[email protected]"
]
| |
1401a17efdbfb7b2ff484178a6944d5e373dd1f7 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03488/s626861342.py | 65eae1c60abd7285830f846f3f83e1f7681f124f | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,207 | py | s=input()
X,Y=map(int,input().split())
from collections import defaultdict
def solve(da,dp,G):
for i in range(len(da)):
tmp=set()
for j in dp:
tmp.add(j-da[i])
tmp.add(j+da[i])
dp=tmp
if G in dp:
return True
else:
return False
"""amax=0
for i in a:
amax+=abs(i)
if amax==0:
if G==0:
return True
else:
return False
if a[0]==G:
return True
dp=[[0]*2*amax for _ in range(3)]
dp[0][a[0]]=1
for i in range(1,len(a)):
p=a[i]
for j in range(-amax,amax):
dp[i%3][j-p]+=dp[(i-1)%3][j]
dp[i%3][j+p]+=dp[(i-1)%3][j]
#print(dp)
if dp[i%3][G]>=1:
return True
return False"""
"""#print(a)
dp=set()
dp.add(a[0])
dp2=set()
for i in range(1,len(a)):
for j in dp:
dp2.add(j-a[i])
dp2.add(j+a[i])
dp=copy.deepcopy(dp2)
#print(dp)
if G in dp:
return True
return False"""
d=[len(x) for x in s.split("T")]
dx=d[2::2]
dy=d[1::2]
if solve(dx,{d[0]},X) and solve(dy,{0},Y):
print("Yes")
else:
print("No")
| [
"[email protected]"
]
| |
e87293f0dbca65cf9e8eb987d30bb7c5e8ed590e | 29b1b15e4fef90717ff7bf8b13ab9a23cdc17c51 | /finalproduct/testapp/migrations/0003_comments.py | 24bc341b3f011285e4bdec6c57d8b347120c9b42 | []
| no_license | deepawalekedar319/DjangoProjects | 93fe59812593a1e1b8f542c8c5b1642bc95f6da4 | 1780b703a3022ea17dc188ad98b0f17bb14fa12f | refs/heads/main | 2023-09-03T04:48:21.201822 | 2021-11-08T05:28:00 | 2021-11-08T05:28:00 | 425,706,071 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,135 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.17 on 2020-10-31 13:23
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('testapp', '0002_auto_20201027_1439'),
]
operations = [
migrations.CreateModel(
name='Comments',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=32)),
('email', models.EmailField(max_length=254)),
('body', models.TextField()),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('active', models.BooleanField(default=True)),
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='testapp.Post')),
],
options={
'ordering': ('-created',),
},
),
]
| [
"[email protected]"
]
| |
5631077333222d3476b68d4a173ce9d25f7396be | caf8cbcafd448a301997770165b323438d119f5e | /.history/mercari/mercari_search_20201124184144.py | 21031f8d0dc5f3408e41cd1680d9f13d31082892 | [
"MIT"
]
| permissive | KustomApe/nerdape | 03e0691f675f13ce2aefa46ee230111247e90c72 | aef6fb2d1f8c364b26d91bf8570b4487a24de69a | refs/heads/main | 2023-01-23T10:13:26.584386 | 2020-11-28T22:29:49 | 2020-11-28T22:29:49 | 309,897,105 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,776 | py | from selenium import webdriver
from selenium.webdriver.support.ui import Select
import pandas as pd
import re
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import PyQt5
import time
"""[Initial Settings]
初期設定
"""
options = webdriver.ChromeOptions()
options.add_argument('--headeless')
options.add_argument('--disable-gpu')
options.add_argument('--lang-ja')
browser = webdriver.Chrome(chrome_options=options, executable_path='./chromedriver')
"""[CSS Selector Settings]
CSSセレクターの設定
"""
PAGER = "li.pager-next"
body > div.default-container > main > div.l-content > ul > li.pager-next.visible-pc > ul > li:nth-child(1) > a
word = input("検索したいキーワードを入力してください:")
while True:
if PAGER:
n = 1
res = browser.get("https://www.mercari.com/jp/search/?page="+str(n)+"&keyword="+word)
df_main = pd.DataFrame(columns=['在庫有無','タイトル','値段','URL'])
df_graf = pd.DataFrame(columns=['SOLD','PRICE'])
browser.get(res)
item_boxlist = browser.find_elements_by_css_selector(".items-box")
for item_box in item_boxlist:
try:
if len(item_box.find_elements_by_css_selector(".item-sold-out-badge")) > 0:
sold = "SOLD"
else:
sold = "NOT SOLD"
sub_title = item_box.find_element_by_class_name("items-box-body")
title = sub_title.find_element_by_tag_name("h3").text
item_price = item_box.find_element_by_css_selector(".items-box-price")
price_text = item_price.text
price_text = re.sub(r",", "", price_text).lstrip("¥ ")
price_text_int = int(price_text)
print(price_text_int)
url = item_box.find_element_by_tag_name("a").get_attribute("href")
data = pd.Series( [ sold,title,price_text_int,url ], index=df_main.columns )
grdata = pd.Series( [ sold,price_text_int ], index=df_graf.columns )
df_main = df_main.append( data, ignore_index=True )
df_graf = df_graf.append( grdata, ignore_index=True )
except Exception as e:
print(e)
btn = browser.find_element_by_css_selector(PAGER).get_attribute('href')
n += 1
print('next url:{}'.format(btn))
time.sleep(3)
browser.get(btn)
print('Moving to next page...')
else:
print('No items anymore...')
break
print(df_main)
sns.stripplot(x='SOLD', y='PRICE', data=df_graf)
plt.show()
sns.pairplot(df_graf,hue="SOLD")
plt.show()
print('Writing out to CSV file...')
df_main.to_csv("pricedata.csv", encoding="utf_8_sig")
print("Done") | [
"[email protected]"
]
| |
64cfaf128c32f6121b1d5cd6194329ba27f2532b | c0792645c156cb9e20a1aa2b28c565150358bc6e | /apps/inmueble/migrations/0007_auto_20180413_2159.py | 67c3d2b9cf04f9cc969b9db636a4659a8eea6221 | []
| no_license | clioo/Praver | b22fd92886e0399845adb4366663cae6a7d7853b | 523f0d78e0a2039a5bae3e539c93e2c2415a0840 | refs/heads/master | 2020-03-11T12:38:54.272392 | 2018-06-28T18:24:21 | 2018-06-28T18:24:21 | 130,003,043 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 636 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2018-04-14 03:59
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('inmueble', '0006_imagenesinmbueble'),
]
operations = [
migrations.AlterField(
model_name='inmueble',
name='latitud',
field=models.CharField(blank=True, max_length=100),
),
migrations.AlterField(
model_name='inmueble',
name='longitud',
field=models.CharField(blank=True, max_length=100),
),
]
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.