blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a1c8c70909a8fc1c7ae9cd1d3c59271506fb61df | cb73fe89463892c8c147c6995e220f5b1635fabb | /AtCoder Beginner Contest 174/q3.py | ac39b97c6a5853fd19f555ce923192f306771a27 | [] | no_license | Haraboo0814/AtCoder | 244f6fd17e8f6beee2d46fbfaea6a8e798878920 | 7ad794fd85e8d22d4e35087ed38f453da3c573ca | refs/heads/master | 2023-06-15T20:08:37.348078 | 2021-07-17T09:31:30 | 2021-07-17T09:31:30 | 254,162,544 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,004 | py | import sys
from io import StringIO
import unittest
def resolve():
k = int(input())
x = 7 % k
for i in range(1, k + 1):
if x == 0:
print(i)
return
x = (x * 10 + 7) % k
print(-1)
class TestClass(unittest.TestCase):
def assertIO(self, input, output):
stdout, stdin = sys.stdout, sys.stdin
sys.stdout, sys.stdin = StringIO(), StringIO(input)
resolve()
sys.stdout.seek(0)
out = sys.stdout.read()[:-1]
sys.stdout, sys.stdin = stdout, stdin
self.assertEqual(out, output)
def test_入力例_1(self):
input = """101"""
output = """4"""
self.assertIO(input, output)
def test_入力例_2(self):
input = """2"""
output = """-1"""
self.assertIO(input, output)
def test_入力例_3(self):
input = """999983"""
output = """999982"""
self.assertIO(input, output)
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
06aedfb3216e2eaac1c2235aa66ef85e9bef765f | f9318e10a48e9e87f7c1537c8f2e647dda677e4c | /octopus/modules/account/account.py | f4cf289eb6fd78bae031846b8ce19af629b05fda | [
"Apache-2.0"
] | permissive | JiscPER/magnificent-octopus | 250579020ead7a8d8424e96a8c3e811b12cc4d90 | e1be9a5f779a38122c5ad9d1d5365bb9a1dc2cf1 | refs/heads/master | 2023-07-20T00:27:26.018800 | 2016-01-22T16:07:24 | 2016-01-22T16:07:24 | 46,710,009 | 0 | 1 | NOASSERTION | 2023-07-10T11:22:43 | 2015-11-23T09:29:26 | Python | UTF-8 | Python | false | false | 10,571 | py | import uuid, json
from flask import Blueprint, request, url_for, flash, redirect, make_response
from flask import render_template, abort
from flask.ext.login import login_user, logout_user, current_user, login_required
from octopus.core import app
from octopus.lib.webapp import ssl_required, request_wants_json, flash_with_url, is_safe_url
from octopus.lib import mail
from octopus.modules.account.factory import AccountFactory
from octopus.modules.account import exceptions
blueprint = Blueprint('account', __name__)
@app.login_manager.user_loader
def load_account_for_login_manager(userid):
from octopus.modules.account.factory import AccountFactory
acc = AccountFactory.get_model().pull(userid)
return acc
def get_redirect_target(form=None):
form_target = ''
if form and hasattr(form, 'next') and getattr(form, 'next'):
form_target = form.next.data
for target in form_target, request.args.get('next', []):
if not target:
continue
if target == is_safe_url(target):
return target
return url_for(app.config.get("ACCOUNT_LOGIN_REDIRECT_ROUTE", "index"))
def _do_login(user):
return login_user(user, remember=True)
def _do_logout():
logout_user()
@blueprint.route('/login', methods=['GET', 'POST'])
@ssl_required
def login():
# current_info = {'next': request.args.get('next', '')}
fc = AccountFactory.get_login_formcontext(request.form)
if request.method == 'POST':
if fc.validate():
password = fc.form.password.data
email = fc.form.email.data
Account = AccountFactory.get_model()
try:
user = Account.pull_by_email(email)
except exceptions.NonUniqueAccountException:
flash("Permanent Error: unable to log you in with these credentials - please contact an administrator", "error")
return fc.render_template()
if user is not None:
if not user.can_log_in():
flash('Invalid credentials', 'error')
return fc.render_template()
if user.check_password(password):
inlog = _do_login(user)
if not inlog:
flash("Problem logging in", "error")
return fc.render_template()
else:
flash('Welcome back.', 'success')
return redirect(get_redirect_target(form=fc.form))
else:
flash('Incorrect username/password', 'error')
return fc.render_template()
else:
flash('Incorrect username/password', 'error')
return fc.render_template()
else:
flash('Invalid credentials', 'error')
return fc.render_template()
@blueprint.route('/logout')
@ssl_required
def logout():
_do_logout()
flash('You are now logged out', 'success')
return redirect(url_for(app.config.get("ACCOUNT_LOGOUT_REDIRECT_ROUTE", "index")))
@blueprint.route('/<username>', methods=['GET', 'POST', 'DELETE'])
@login_required
@ssl_required
def username(username):
Account = AccountFactory.get_model()
acc = Account.pull(username)
if acc is None:
try:
acc = Account.pull_by_email(username)
except exceptions.NonUniqueAccountException:
flash("Permanent Error: these user credentials are invalid - please contact an administrator", "error")
return redirect(url_for(("logut")))
if acc is None:
abort(404)
# actions on this page are only availble to the actual user, or a user with the edit-users role
if current_user.id != acc.id or not current_user.has_role(app.config.get("ACCOUNT_EDIT_USERS_ROLE")):
abort(401)
# if this is a request for the user page, just render it
if request.method == "GET":
fc = AccountFactory.get_user_formcontext(acc)
return fc.render_template()
is_delete = request.method == "DELETE" or (request.method == "POST" and request.values.get("submit", False) == "Delete")
if is_delete:
# validate the delete
if not current_user.check_password(request.values.get("password")):
flash("Incorrect password", "error")
fc = AccountFactory.get_user_formcontext(acc=acc)
return fc.render_template()
# if the password validates, go ahead and do it
acc.remove() # Note we don't use the DAO's delete method - this allows the model to decide the delete behaviour
_do_logout()
flash('Account {x} deleted'.format(x=username), "success")
return redirect(url_for(app.config.get("ACCOUNT_LOGOUT_REDIRECT_ROUTE", "index")))
if request.method == "POST":
fc = AccountFactory.get_user_formcontext(acc=acc, form_data=request.form)
# attempt to validate the form
if not fc.validate():
flash("There was a problem when submitting the form", "error")
return fc.render_template()
# if the form validates, then check the legality of the submission
try:
fc.legal()
except exceptions.AccountException as e:
flash(e.message, "error")
return fc.render_template()
# if we get to here, then update the user record
fc.finalise()
# tell the user that everything is good
flash("Account updated", "success")
# end with a redirect because some details have changed
return redirect(url_for("account.username", username=fc.target.email))
@blueprint.route('/forgot', methods=['GET', 'POST'])
@ssl_required
def forgot():
if request.method == "GET":
fc = AccountFactory.get_forgot_formcontext()
return fc.render_template()
if request.method == 'POST':
fc = AccountFactory.get_forgot_formcontext(form_data=request.form)
# attempt to validate the form
if not fc.validate():
flash("There was a problem when submitting the form", "error")
return fc.render_template()
# call finalise on the context, to trigger the reset process
try:
fc.finalise()
except exceptions.NonUniqueAccountException:
flash("Permanent Error: cannot reset password for this account - please contact an administrator", "error")
return fc.render_template()
except exceptions.AccountNotFoundException:
flash('Your account email address is not recognised.', 'error')
return fc.render_template()
except exceptions.AccountException:
flash("Unable to reset the password for this account", "error")
return fc.render_template()
# if we get to here, reset was successful, so we should redirect the user
return redirect(url_for(app.config.get("ACCOUNT_FORGOT_REDIRECT_ROUTE", "account.forgot_pending")))
@blueprint.route("/forgot-pending", methods=["GET"])
@ssl_required
def forgot_pending():
return render_template("account/forgot_pending.html")
@blueprint.route("/reset/<reset_token>", methods=["GET", "POST"])
@ssl_required
def reset(reset_token):
Account = AccountFactory.get_model()
acc = Account.get_by_reset_token(reset_token)
if acc is None:
abort(404)
if not acc.can_log_in():
abort(404)
if request.method == "GET":
fc = AccountFactory.get_reset_formcontext(acc)
return fc.render_template()
elif request.method == "POST":
fc = AccountFactory.get_reset_formcontext(acc, request.form)
if not fc.validate():
flash("There was a problem with your form", "error")
return fc.render_template()
# if the form is good, finalise the user's password change
fc.finalise()
# log the user in
_do_login(acc)
flash("Password has been reset and you have been logged in", "success")
return redirect(url_for(app.config.get("ACCOUNT_LOGIN_REDIRECT_ROUTE", "index")))
@blueprint.route('/')
@login_required
@ssl_required
def index():
if not app.config.get("ACCOUNT_LIST_USERS", False):
abort(404)
if not current_user.has_role(app.config.get("ACCOUNT_LIST_USERS_ROLE", "list_users")):
abort(401)
return render_template('account/users.html')
@blueprint.route('/register', methods=['GET', 'POST'])
@ssl_required
def register():
# access to registration may not be for the public
if current_user.is_anonymous() and not app.config.get("ACCOUNT_ALLOW_REGISTER", False):
abort(404)
if request.method == "GET":
fc = AccountFactory.get_register_formcontext()
return fc.render_template()
elif request.method == "POST":
fc = AccountFactory.get_register_formcontext(request.form)
if not fc.validate():
flash("There was a problem with your form", "error")
return fc.render_template()
# if the form validates, then check the legality of the submission
try:
fc.legal()
except exceptions.AccountException as e:
flash(e.message, "error")
return fc.render_template()
# if we get to here, then create the user record
fc.finalise()
# tell the user that everything is good
flash("Account created - activation token sent", "success")
# redirect to the appropriate next page
return redirect(url_for(app.config.get("ACCOUNT_REGISTER_REDIECT_ROUTE")))
@blueprint.route("/activate/<activation_token>", methods=["GET", "POST"])
@ssl_required
def activate(activation_token):
account = AccountFactory.get_model().get_by_activation_token(activation_token)
if account is None:
abort(404)
if not account.can_log_in():
abort(404)
if request.method == "GET":
fc = AccountFactory.get_activate_formcontext(account)
return fc.render_template()
elif request.method == "POST":
fc = AccountFactory.get_activate_formcontext(account, request.form)
if not fc.validate():
flash("There was a problem with your form", "error")
return fc.render_template()
# if the form is good, finalise the user's password change
fc.finalise()
# log the user in
_do_login(account)
flash("Your account has been activated and you have been logged in", "success")
return redirect(url_for(app.config.get("ACCOUNT_LOGIN_REDIRECT_ROUTE", "index")))
| [
"[email protected]"
] | |
f480fc8f2b9e68eea63991f2c1e899917ba3f613 | c5148bc364dac753c0872bd5676027a30b260486 | /build/lib/biosteam/units/decorators/_design.py | bd20530d6c47562af76bc4bada9d063ee6920593 | [
"MIT",
"NCSA",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Ecoent/biosteam | 86f47c713a2cae5d6261b6c2c7734ccf7a90fb4e | f1371386d089df3aa8ce041175f210c0318c1fe0 | refs/heads/master | 2021-02-24T14:10:23.158984 | 2020-03-05T03:43:17 | 2020-03-05T03:43:17 | 245,433,768 | 1 | 0 | NOASSERTION | 2020-03-06T13:59:27 | 2020-03-06T13:59:26 | null | UTF-8 | Python | false | false | 4,647 | py | # -*- coding: utf-8 -*-
"""
Created on Mon May 6 17:19:41 2019
@author: yoelr
"""
__all__ = ('design', 'add_design')
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 17 21:18:50 2019
@author: yoelr
"""
from thermosteam.base import stream_units_of_measure
__all__ = ('design',)
# %% Design Center class
def _design(self):
D = self.design_results
U = self._units
for i, j in self._design_basis_: D[i] = j(self, U[i])
class DesignCenter:
"""Create a DesignCenter object that manages all design basis functions. When called, it returns a Unit class decorator that adds a design item to the given Unit class."""
__slots__ = ('design_basis_functions',)
def __init__(self):
self.design_basis_functions = {}
def define(self, design_basis):
"""Define a new design basis.
Parameters
----------
design_basis : function
Should accept the unit_object and the units_of_measure and return design basis value.
.. Note::
Design basis is registered with the name of the design basis function.
"""
name = design_basis.__name__.replace('_', ' ').capitalize()
functions = self.design_basis_functions
if name in functions: raise ValueError(f"design basis '{name}' already implemented")
functions[name] = design_basis
return design_basis
def __call__(self, name, units, fsize=None):
"""Return a Unit class decorator that adds a size/design requirement to the class.
Parameters
----------
name : str
Name of design item.
units : str
Units of measure of design item.
fsize : function
Should return design item given the Unit object. If None, defaults to function predefined for given name and units.
"""
return lambda cls: self._add_design2cls(cls, name, units, fsize)
def _add_design2cls(self, cls, name, units, fsize):
"""Add size/design requirement to class.
Parameters
----------
cls : Unit class.
name : str
Name of design item.
units : str
Units of measure of design item.
fsize : function
Should return design item given the Unit object. If None, defaults to function predefined for given name and units.
Examples
--------
:doc:`Unit decorators`
"""
f = fsize or self.design_basis_functions[name.capitalize()]
# Make sure new _units dictionary is defined
if not cls._units:
cls._units = {}
elif '_units' not in cls.__dict__:
cls._units = cls._units.copy()
# Make sure design basis is not defined
if name in cls._units:
raise RuntimeError(f"design basis '{name}' already defined in class")
else:
cls._units[name] = units
# Add design basis
if cls._design is _design:
cls._design_basis_.append((name, f))
elif '_design' in cls.__dict__:
raise RuntimeError("'_design' method already implemented")
else:
cls._design_basis_ = [(name, f)]
cls._design = _design
return cls
def __contains__(self, basis):
return basis in self.__dict__
def __iter__(self):
yield from self.__dict__
def __repr__(self):
return f"<{type(self).__name__}: {', '.join(self)}>"
# %% Design factories
design = DesignCenter() #: Used to decorate classes with new design item
@design.define
def flow_rate(self, units):
if self._N_ins == 1:
return self._ins[0].get_total_flow(units)
elif self._N_outs == 1:
return self._outs[0].get_total_flow(units)
elif self._N_ins < self._N_outs:
return sum([i.get_total_flow(units) for i in self._ins])
else:
return sum([i.get_total_flow(units) for i in self._outs])
H_units = stream_units_of_measure['H']
@design.define
def duty(self, units):
self._duty = duty = self.H_out - self.H_in
self.heat_utilities[0](duty, self.ins[0].T, self.outs[0].T)
return H_units.conversion_factor(units) * duty
@design.define
def dry_flow_rate(self, units):
ins = self._ins
flow_in = sum([i.get_total_flow(units) for i in ins])
moisture = sum([i.get_flow(units, IDs='7732-18-5') for i in ins])
return flow_in - moisture
del flow_rate, duty, dry_flow_rate
| [
"[email protected]"
] | |
a54031a091dc8ef18fbd886a0d9a5de58b59c0fa | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03326/s432961114.py | 004e333b7354de53b77847c25af19fb0cad2aa12 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 595 | py | n,m = map(int, input().split())
ab = [list(map(int, input().split())) for _ in range(n)]
import itertools
nums = [0,1]
ll = list(itertools.product(nums,repeat=3))
res = 0
for i in ll:
temp =[]
for j in range(n):
if i[0]==0:
x = ab[j][0]
else:
x = -ab[j][0]
if i[1]==0:
y = ab[j][1]
else:
y = -ab[j][1]
if i[2] ==0:
z = ab[j][2]
else:
z = -ab[j][2]
temp.append(x+y+z)
tempp =list(sorted(temp,reverse=True))
res = max(res,sum(tempp[:m]))
print(res)
| [
"[email protected]"
] | |
6f0fb9c49884e780a516de1d650e4a68ef9638bb | 8937c4d452c98699610923f76a395a2247f576df | /demo/download_demo_data.py | 849791f18789882a992f77b515e00bad9d872f31 | [] | no_license | mistycheney/MouseBrainAtlas | 812b204af06ed303f3c12d5c81edef50c8d9d1ed | bffbaa1ede9297084e64fc197716e63d5cb54275 | refs/heads/master | 2020-04-11T13:44:09.632311 | 2018-11-20T22:32:15 | 2018-11-20T22:32:15 | 20,377,173 | 3 | 9 | null | 2017-03-15T19:39:27 | 2014-06-01T12:42:08 | Jupyter Notebook | UTF-8 | Python | false | false | 3,974 | py | #! /usr/bin/env python
import sys, os
sys.path.append(os.path.join(os.environ['REPO_DIR'], 'utilities'))
from utilities2015 import *
from metadata import *
from data_manager import *
import argparse
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description='This script downloads input data for demo.')
parser.add_argument("-d", "--demo_data_dir", type=str, help="Directory to store demo input data", default='demo_data')
args = parser.parse_args()
# demo_data_dir = '/home/yuncong/Brain/demo_data/'
def download_to_demo(fp):
demo_data_dir = args.demo_data_dir
s3_http_prefix = 'https://s3-us-west-1.amazonaws.com/mousebrainatlas-data/'
url = s3_http_prefix + fp
demo_fp = os.path.join(demo_data_dir, fp)
execute_command('wget -N -P \"%s\" \"%s\"' % (os.path.dirname(demo_fp), url))
return demo_fp
##### For registration demo. #####
fp = DataManager.get_sorted_filenames_filename(stack='DEMO999')
rel_fp = relative_to_local(fp, local_root=DATA_ROOTDIR)
download_to_demo(rel_fp)
fp = DataManager.get_anchor_filename_filename(stack='DEMO999')
rel_fp = relative_to_local(fp, local_root=DATA_ROOTDIR)
anchor_fp_demo = download_to_demo(rel_fp)
anchor_fn = DataManager.load_data(anchor_fp_demo, filetype='anchor')
fp = DataManager.get_section_limits_filename_v2(stack='DEMO999', anchor_fn=anchor_fn)
rel_fp = relative_to_local(fp, local_root=DATA_ROOTDIR)
download_to_demo(rel_fp)
fp = DataManager.get_cropbox_filename_v2(stack='DEMO999', prep_id=2, anchor_fn=anchor_fn)
rel_fp = relative_to_local(fp, local_root=DATA_ROOTDIR)
download_to_demo(rel_fp)
download_to_demo(os.path.join('CSHL_simple_global_registration', 'DEMO999_T_atlas_wrt_canonicalAtlasSpace_subject_wrt_wholebrain_atlasResol.bp'))
# Download subject detection maps
for name_s in ['3N_R', '4N_R', '12N']:
fp = DataManager.get_score_volume_filepath_v3(stack_spec={'name':'DEMO999', 'detector_id':799, 'resolution':'10.0um', 'vol_type':'score'}, structure=name_s)
rel_fp = relative_to_local(fp, local_root=ROOT_DIR)
download_to_demo(rel_fp)
fp = DataManager.get_score_volume_origin_filepath_v3(stack_spec={'name':'DEMO999', 'detector_id':799, 'resolution':'10.0um', 'vol_type':'score'}, structure=name_s, wrt='wholebrain')
rel_fp = relative_to_local(fp, local_root=ROOT_DIR)
download_to_demo(rel_fp)
# Download atlas
for name_s in ['3N_R', '4N_R', '3N_R_surround_200um', '4N_R_surround_200um','12N', '12N_surround_200um']:
fp = DataManager.get_score_volume_filepath_v3(stack_spec={'name':'atlasV7', 'resolution':'10.0um', 'vol_type':'score'}, structure=name_s)
rel_fp = relative_to_local(fp, local_root=ROOT_DIR)
download_to_demo(rel_fp)
fp = DataManager.get_score_volume_origin_filepath_v3(stack_spec={'name':'atlasV7', 'resolution':'10.0um', 'vol_type':'score'}, structure=name_s, wrt='canonicalAtlasSpace')
rel_fp = relative_to_local(fp, local_root=ROOT_DIR)
download_to_demo(rel_fp)
##### For visualization demo. #####
# Download images
for sec in range(221, 238):
fp = DataManager.get_image_filepath_v2(stack='DEMO999', prep_id=2, resol='raw', version='NtbNormalizedAdaptiveInvertedGammaJpeg', section=sec)
rel_fp = relative_to_local(fp, local_root=DATA_ROOTDIR)
download_to_demo(rel_fp)
fp = DataManager.get_original_volume_filepath_v2(stack_spec={'name':'DEMO999', 'resolution':'10.0um', 'vol_type':'intensity', 'prep_id':'wholebrainWithMargin'}, structure=None)
rel_fp = relative_to_local(fp, local_root=ROOT_DIR)
download_to_demo(rel_fp)
fp = DataManager.get_original_volume_origin_filepath_v3(stack_spec={'name':'DEMO999', 'resolution':'10.0um', 'vol_type':'intensity', 'prep_id':'wholebrainWithMargin'}, structure=None)
rel_fp = relative_to_local(fp, local_root=ROOT_DIR)
download_to_demo(rel_fp)
download_to_demo(os.path.join('CSHL_simple_global_registration', 'DEMO999_registered_atlas_structures_wrt_wholebrainXYcropped_xysecTwoCorners.json'))
| [
"[email protected]"
] | |
e5d11a77ce1c989514ae0215922f88d8f5fb9851 | 137b2969323f7fb20cf4b72bf24ff98339a410c8 | /tutorial/2/cal_pi_python.py | c9c4f802a962d65871cd0ef97a9fb7dd63ca74cd | [] | no_license | kangheeyong/STUDY-make-lda-lib | c91b6d9a5bff7b25a0c00b63e61f93fbd60d2a8e | 0f658032cf63922a0a5fc2fb5f34b8f22e97c94f | refs/heads/master | 2021-02-12T06:27:27.900312 | 2020-04-03T10:56:21 | 2020-04-03T10:56:21 | 244,568,658 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 234 | py | # calc_pi.py
def recip_square(i):
return 1. / i ** 2
def approx_pi(n=10000000):
val = 0.
for k in range(1, n + 1):
val += recip_square(k)
return (6 * val) ** .5
if __name__ == '__main__':
approx_pi()
| [
"[email protected]"
] | |
879b16bbb01c016b6e984839fa4a918be7adb5cf | f8e8e365c9cf58b61d72655bc2340baeaed5baff | /Leetcode/Python Solutions/Stack/largestRectangleinHistogram.py | 39ee39bff0455b2a950049c9ecd1446682f4e05b | [
"MIT"
] | permissive | Mostofa-Najmus-Sakib/Applied-Algorithm | 39a69f6b9ed113efe4a420d19cad79e0aa317637 | bc656fd655617407856e0ce45b68585fa81c5035 | refs/heads/master | 2023-08-31T19:54:34.242559 | 2021-11-05T03:43:35 | 2021-11-05T03:43:35 | 412,263,430 | 0 | 0 | MIT | 2021-09-30T23:45:29 | 2021-09-30T23:45:25 | null | UTF-8 | Python | false | false | 641 | py | """
LeetCode Problem: 84. Largest Rectangle in Histogram
Link: https://leetcode.com/problems/largest-rectangle-in-histogram/
Language: Python
Written by: Mostofa Adib Shakib
Time Complexity: O(N)
Space Complexity: O(N)
"""
class Solution:
def largestRectangleArea(self, heights: List[int]) -> int:
heights.append(0)
stack = [-1]
ans = 0
for i in range(len(heights)):
while heights[i] < heights[stack[-1]]:
h = heights[stack.pop()]
w = i - stack[-1] - 1
ans = max(ans, h * w)
stack.append(i)
return ans | [
"[email protected]"
] | |
7c0f136ffcd56de3056adde9a1237a0f37c7128a | 9cd180fc7594eb018c41f0bf0b54548741fd33ba | /sdk/python/pulumi_azure_nextgen/storagesync/v20190601/cloud_endpoint.py | 5c78675b69b623e9ab26c75f703f023c90e7a642 | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | MisinformedDNA/pulumi-azure-nextgen | c71971359450d03f13a53645171f621e200fe82d | f0022686b655c2b0744a9f47915aadaa183eed3b | refs/heads/master | 2022-12-17T22:27:37.916546 | 2020-09-28T16:03:59 | 2020-09-28T16:03:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,839 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = ['CloudEndpoint']
class CloudEndpoint(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
azure_file_share_name: Optional[pulumi.Input[str]] = None,
cloud_endpoint_name: Optional[pulumi.Input[str]] = None,
friendly_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
storage_account_resource_id: Optional[pulumi.Input[str]] = None,
storage_account_tenant_id: Optional[pulumi.Input[str]] = None,
storage_sync_service_name: Optional[pulumi.Input[str]] = None,
sync_group_name: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
Cloud Endpoint object.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] azure_file_share_name: Azure file share name
:param pulumi.Input[str] cloud_endpoint_name: Name of Cloud Endpoint object.
:param pulumi.Input[str] friendly_name: Friendly Name
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[str] storage_account_resource_id: Storage Account Resource Id
:param pulumi.Input[str] storage_account_tenant_id: Storage Account Tenant Id
:param pulumi.Input[str] storage_sync_service_name: Name of Storage Sync Service resource.
:param pulumi.Input[str] sync_group_name: Name of Sync Group resource.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['azure_file_share_name'] = azure_file_share_name
if cloud_endpoint_name is None:
raise TypeError("Missing required property 'cloud_endpoint_name'")
__props__['cloud_endpoint_name'] = cloud_endpoint_name
__props__['friendly_name'] = friendly_name
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['storage_account_resource_id'] = storage_account_resource_id
__props__['storage_account_tenant_id'] = storage_account_tenant_id
if storage_sync_service_name is None:
raise TypeError("Missing required property 'storage_sync_service_name'")
__props__['storage_sync_service_name'] = storage_sync_service_name
if sync_group_name is None:
raise TypeError("Missing required property 'sync_group_name'")
__props__['sync_group_name'] = sync_group_name
__props__['backup_enabled'] = None
__props__['last_operation_name'] = None
__props__['last_workflow_id'] = None
__props__['name'] = None
__props__['partnership_id'] = None
__props__['provisioning_state'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:storagesync/latest:CloudEndpoint"), pulumi.Alias(type_="azure-nextgen:storagesync/v20170605preview:CloudEndpoint"), pulumi.Alias(type_="azure-nextgen:storagesync/v20180402:CloudEndpoint"), pulumi.Alias(type_="azure-nextgen:storagesync/v20180701:CloudEndpoint"), pulumi.Alias(type_="azure-nextgen:storagesync/v20181001:CloudEndpoint"), pulumi.Alias(type_="azure-nextgen:storagesync/v20190201:CloudEndpoint"), pulumi.Alias(type_="azure-nextgen:storagesync/v20190301:CloudEndpoint"), pulumi.Alias(type_="azure-nextgen:storagesync/v20191001:CloudEndpoint"), pulumi.Alias(type_="azure-nextgen:storagesync/v20200301:CloudEndpoint")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(CloudEndpoint, __self__).__init__(
'azure-nextgen:storagesync/v20190601:CloudEndpoint',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'CloudEndpoint':
"""
Get an existing CloudEndpoint resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return CloudEndpoint(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="azureFileShareName")
def azure_file_share_name(self) -> pulumi.Output[Optional[str]]:
"""
Azure file share name
"""
return pulumi.get(self, "azure_file_share_name")
@property
@pulumi.getter(name="backupEnabled")
def backup_enabled(self) -> pulumi.Output[str]:
"""
Backup Enabled
"""
return pulumi.get(self, "backup_enabled")
@property
@pulumi.getter(name="friendlyName")
def friendly_name(self) -> pulumi.Output[Optional[str]]:
"""
Friendly Name
"""
return pulumi.get(self, "friendly_name")
@property
@pulumi.getter(name="lastOperationName")
def last_operation_name(self) -> pulumi.Output[Optional[str]]:
"""
Resource Last Operation Name
"""
return pulumi.get(self, "last_operation_name")
@property
@pulumi.getter(name="lastWorkflowId")
def last_workflow_id(self) -> pulumi.Output[Optional[str]]:
"""
CloudEndpoint lastWorkflowId
"""
return pulumi.get(self, "last_workflow_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="partnershipId")
def partnership_id(self) -> pulumi.Output[Optional[str]]:
"""
Partnership Id
"""
return pulumi.get(self, "partnership_id")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[Optional[str]]:
"""
CloudEndpoint Provisioning State
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="storageAccountResourceId")
def storage_account_resource_id(self) -> pulumi.Output[Optional[str]]:
"""
Storage Account Resource Id
"""
return pulumi.get(self, "storage_account_resource_id")
@property
@pulumi.getter(name="storageAccountTenantId")
def storage_account_tenant_id(self) -> pulumi.Output[Optional[str]]:
"""
Storage Account Tenant Id
"""
return pulumi.get(self, "storage_account_tenant_id")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource. Ex- Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| [
"[email protected]"
] | |
d21803cc3e6025eb6eac8884a3604a3dfc0f2cfe | b0d7d91ccb7e388829abddb31b4aa04a2f9365cd | /archive-20200922/binary-search/first_bad_version2.py | 62556ed0f8ac0ef51c7ea5298bc5dd50a4ec3b3f | [] | no_license | clarkngo/python-projects | fe0e0aa02896debe82d1e9de84b1ae7d00932607 | 139a20063476f9847652b334a8495b7df1e80e27 | refs/heads/master | 2021-07-02T10:45:31.242041 | 2020-10-25T08:59:23 | 2020-10-25T08:59:23 | 188,570,684 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,351 | py | # https://leetcode.com/problems/first-bad-version/submissions/
# You are a product manager and currently leading a team to develop a new product. Unfortunately, the latest version of your product fails the quality check. Since each version is developed based on the previous version, all the versions after a bad version are also bad.
# Suppose you have n versions [1, 2, ..., n] and you want to find out the first bad one, which causes all the following ones to be bad.
# You are given an API bool isBadVersion(version) which will return whether version is bad. Implement a function to find the first bad version. You should minimize the number of calls to the API.
# Example:
# Given n = 5, and version = 4 is the first bad version.
# call isBadVersion(3) -> false
# call isBadVersion(5) -> true
# call isBadVersion(4) -> true
# Then 4 is the first bad version.
# The isBadVersion API is already defined for you.
# @param version, an integer
# @return a bool
# def isBadVersion(version):
import bisect
class Solution:
def firstBadVersion(self, n):
"""
:type n: int
:rtype: int
"""
self.__getitem__ = isBadVersion
return bisect.bisect_left(self, True, 1, n)
import unittest
a = Solution()
class Test(unittest.TestCase):
def test(self):
self.assertEqual(a.firstBadVersion()) | [
"[email protected]"
] | |
21418667a8fe05896963725a6a68685b6845f11a | 468a20df682ba43484f1953797f343011f77d7c1 | /app.py | 31669bddb87e95ebaf2f6c422b3575cae13d671e | [
"MIT"
] | permissive | hchen13/capshelper | 447006d363420e87a1ca4389ab1388b496495cdd | aea799c07064369642c3db557e939634c6c5da70 | refs/heads/master | 2020-03-21T21:09:30.904144 | 2018-06-29T03:20:30 | 2018-06-29T03:20:30 | 139,049,101 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,922 | py | import json
import os
from butler import Butler
from butler.visualize import *
from downloader import *
from settings import ROOT_DIR
butler = Butler()
downloader = Downloader()
main_coins = 'USDT BTC ETH'.split(" ")
def update_watchlist():
watchlist = []
for counter in main_coins:
coins = downloader.get_top_coins(counter, limit=20)
watchlist += coins
return list(set(watchlist))
def get_watchlist(from_cache=True):
cache_file = "watchlist.json"
cache_path = os.path.join(ROOT_DIR, cache_file)
if from_cache and os.path.exists(cache_path):
return json.load(open(cache_path, 'r'))
watchlist = update_watchlist()
json.dump(watchlist, open(cache_path, 'w'))
return watchlist
def collect(base, counter):
base = base.upper()
counter = counter.upper()
ts = butler.latest_timestamp(base, counter)
if ts is None:
ts = datetime(2017, 2, 1, 0, 0).timestamp()
data = downloader.get_candlesticks(base, counter, start=ts)
if len(data):
butler.save_candlesticks(data)
butler.update_indicators(base, counter)
def single_run():
watchlist = get_watchlist(from_cache=True)
for counter in main_coins:
for base in watchlist:
counter = counter.upper()
base = base.upper()
if base == counter:
continue
collect(base, counter)
def prepare_train_data(path):
train_end = datetime(2018, 6, 1, 23, 59).timestamp()
valid_end = datetime(2018, 6, 10, 23, 59).timestamp()
test_end = datetime.now().timestamp()
butler.generate_train_files(path, 'train', end=train_end)
butler.generate_train_files(path, 'valid', start=train_end + 1, end=valid_end)
butler.generate_train_files(path, 'test', start=valid_end + 1, end=test_end)
if __name__ == '__main__':
# get_watchlist(False)
# single_run()
prepare_train_data('data/')
| [
"[email protected]"
] | |
edfd582480b72d10d09c194a3b7b3e81e62b6e35 | 14b5679d88afa782dc5d6b35878ab043089a060a | /students/Siyang Liu/voicecontrol_changechoth/voice.py | 7236b542f70ca123741e24926ba00368fad76e9a | [] | no_license | mutiangua/EIS2020 | c541ef32623f67f9277945cd39cff3c02f06e4dd | 92aa2711b763a2c93be238825c445bf2db8da391 | refs/heads/master | 2022-11-18T05:21:47.567342 | 2020-07-11T10:11:21 | 2020-07-11T10:11:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,374 | py | import wave
from aip import AipSpeech
from xpinyin import Pinyin
import pyaudio
import os
import cv2
CHUNK = 1024
FORMAT = pyaudio.paInt16
CHANNELS = 2
RATE = 8000
RECORD_SECONDS = 5
WAVE_OUTPUT_FILENAME = "audio.wav"
APP_ID = '19165946'
API_KEY = 'D7BqfDPOj9ORbG85FL5jOQjh'
SECRET_KEY = 'skL4Mag0dGquseo08RaVsDgM1ABMxGN7'
client = AipSpeech(APP_ID, API_KEY, SECRET_KEY)
STATE = 0
TIME_START = 0
TIME_END = 0
num = 0
x = 0
y = 0
w = 0
h = 0
face_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml')
pd = cv2.imread('white.jpg')
pd1 = cv2.imread('yellow.jpg')
pd2 = cv2.imread('black.jpg')
img = cv2.imread('freedom.jpg')
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
for (x, y, w, h) in faces:
pass
head = img[y:(y + h), x:(x + w)]
head = cv2.resize(head, (130, 130), interpolation=cv2.INTER_CUBIC)
cv2.namedWindow('result')
def readFile(fileName):
with open(fileName, 'rb') as fp:
return fp.read()
def writeFile(fileName, result):
with open(fileName, 'wb') as fp:
fp.write(result)
def getBaiduText():
p = pyaudio.PyAudio()
stream = p.open(format=FORMAT,
channels=CHANNELS,
rate=RATE,
input=True,
frames_per_buffer=CHUNK)
stream.start_stream()
print("* 开始录音......")
frames = []
for i in range(0, int(RATE / CHUNK * RECORD_SECONDS)):
data = stream.read(CHUNK)
frames.append(data)
stream.stop_stream()
wf = wave.open(WAVE_OUTPUT_FILENAME, 'wb')
wf.setnchannels(CHANNELS)
wf.setsampwidth(p.get_sample_size(FORMAT))
wf.setframerate(RATE)
wf.writeframes(b''.join(frames))
print("* 正在识别......")
result = client.asr(readFile('audio.wav'), 'wav', 16000, {
'dev_pid': 1537,
})
if result["err_no"] == 0:
for t in result["result"]:
return t
else:
print("没有识别到语音\n")
return ""
def getBaiduVoice(text):
result = client.synthesis(text, 'zh', 6, {'vol': 5, 'per': 4, 'spd': 5})
if not isinstance(result, dict):
writeFile("back.mp3", result)
os.system("back.mp3")
def getPinYin(result):
pin = Pinyin()
return pin.get_pinyin(result)
def pic(pd4):
cv2.destroyWindow('result')
pd4[185:315, 315:445] = head[0:130, 0:130]
cv2.imshow('result', pd4)
def wakeUp(pinyin):
if getPinYin("白色衣服") in pinyin:
print("好的,白色衣服")
pic(pd)
elif getPinYin("黄色衣服") in pinyin:
print("好的,黄色衣服")
pic(pd1)
elif getPinYin("黑色衣服") in pinyin:
print("好的,黑色衣服")
pic(pd2)
def main():
pic(pd)
if cv2.waitKey(10) & 0xFF == 'q':
return
while True:
result = getBaiduText()
pinyin = getPinYin(result)
print("等待唤醒")
print(result)
wakeUp(pinyin)
if cv2.waitKey(10) & 0xFF == 'q':
break
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
os.system("back.mp3")
os.system("audio.wav")
os.system("rmdir /s/q __pycache__")
| [
"[email protected]"
] | |
dc2dd5439cb5bc87b057d15c4313f5adde3c50df | cebf2e5276e6d064d0ec86beaf1129fe0d0fd582 | /days051-060/day059/capstone/blog.py | 5a570cc0b88eb25417e462a8cc83d88159821495 | [] | no_license | SheikhFahimFayasalSowrav/100days | 532a71c5c790bc28b9fd93c936126a082bc415f5 | 0af9f2f16044facc0ee6bce96ae5e1b5f88977bc | refs/heads/master | 2023-06-14T06:18:44.109685 | 2021-07-08T16:58:13 | 2021-07-08T16:58:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 562 | py | import requests
from flask import Flask, render_template
app = Flask(__name__)
posts = requests.get('https://api.npoint.io/a6ff5a040e0baf25233b').json()
@app.route('/')
def home():
return render_template('index.html', posts=posts)
@app.route('/about')
def about():
return render_template('about.html')
@app.route('/contact')
def contact():
return render_template('contact.html')
@app.route('/post/<int:index>')
def post(index):
return render_template('post.html', post=posts[index])
if __name__ == '__main__':
app.run(debug=True)
| [
"[email protected]"
] | |
4eaa041d0256e39539ed200f4a816597c3d3edad | ccbb5c8f53448af1a4721dbbfd06fc1ee72f58a9 | /setup.py | 4d756203887506c35187f9a7a08ac108a4b197af | [
"BSD-2-Clause"
] | permissive | jorisvandenbossche/spatialpandas | ed7c05e2d3e2c2223fdcbeaa78279edf200c5a80 | b63ebe619b8b8692fe282662725d23a50007acd9 | refs/heads/master | 2020-09-07T22:55:03.581677 | 2019-11-04T22:50:48 | 2019-11-04T22:50:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 230 | py | from setuptools import setup, find_packages
setup(name='spatialpandas',
packages=find_packages(exclude=('tests',)),
install_requires=['pandas', 'dask', 'numba', 'numpy'],
tests_require=['pytest', 'hypothesis'])
| [
"[email protected]"
] | |
7fb0fdcff4227dc74d10c6bffc07eb836805e31f | f2889a13368b59d8b82f7def1a31a6277b6518b7 | /661.py | 020da6437c8f01ec4483274e593f6429a27683c4 | [] | no_license | htl1126/leetcode | dacde03de5c9c967e527c4c3b29a4547154e11b3 | c33559dc5e0bf6879bb3462ab65a9446a66d19f6 | refs/heads/master | 2023-09-01T14:57:57.302544 | 2023-08-25T15:50:56 | 2023-08-25T15:50:56 | 29,514,867 | 7 | 1 | null | null | null | null | UTF-8 | Python | false | false | 548 | py | class Solution:
def imageSmoother(self, img: List[List[int]]) -> List[List[int]]:
r, c = len(img), len(img[0])
ans = [[0] * c for _ in range(r)]
for i in range(r):
for j in range(c):
t = size = 0
for x in range(-1, 2, 1):
for y in range(-1, 2, 1):
if 0 <= i + x < r and 0 <= j + y < c:
t += img[i + x][j + y]
size += 1
ans[i][j] = t // size
return ans
| [
"[email protected]"
] | |
ec4463cf20a2f83c632703cc8a7283095b474336 | 993f18c21402d7a4ff21ddb7ff2ec6c80e466f20 | /onnx/reference/ops/aionnxml/__init__.py | e3f999338b6c99221d365ccc5f53f06fb9824265 | [
"Apache-2.0"
] | permissive | onnx/onnx | 10d3916803c7babff89ec0fa9045127bcccad376 | 8a475b34cb3875df311a46f57571646498f5bda7 | refs/heads/main | 2023-08-18T18:50:03.388353 | 2023-08-16T22:18:46 | 2023-08-16T22:18:46 | 102,692,863 | 16,164 | 4,150 | Apache-2.0 | 2023-09-14T17:10:38 | 2017-09-07T04:53:45 | Python | UTF-8 | Python | false | false | 96 | py | # SPDX-License-Identifier: Apache-2.0
from onnx.reference.ops.aionnxml._op_list import load_op
| [
"[email protected]"
] | |
13009baf812cd8747ff405145799588fa9fb1406 | 9ae6ce54bf9a2a86201961fdbd5e7b0ec913ff56 | /google/ads/googleads/v9/errors/types/query_error.py | 8758e1728e679b9b1207402920a2e48a6a25d5ba | [
"Apache-2.0"
] | permissive | GerhardusM/google-ads-python | 73b275a06e5401e6b951a6cd99af98c247e34aa3 | 676ac5fcb5bec0d9b5897f4c950049dac5647555 | refs/heads/master | 2022-07-06T19:05:50.932553 | 2022-06-17T20:41:17 | 2022-06-17T20:41:17 | 207,535,443 | 0 | 0 | Apache-2.0 | 2019-09-10T10:58:55 | 2019-09-10T10:58:55 | null | UTF-8 | Python | false | false | 3,253 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v9.errors",
marshal="google.ads.googleads.v9",
manifest={"QueryErrorEnum",},
)
class QueryErrorEnum(proto.Message):
r"""Container for enum describing possible query errors.
"""
class QueryError(proto.Enum):
r"""Enum describing possible query errors."""
UNSPECIFIED = 0
UNKNOWN = 1
QUERY_ERROR = 50
BAD_ENUM_CONSTANT = 18
BAD_ESCAPE_SEQUENCE = 7
BAD_FIELD_NAME = 12
BAD_LIMIT_VALUE = 15
BAD_NUMBER = 5
BAD_OPERATOR = 3
BAD_PARAMETER_NAME = 61
BAD_PARAMETER_VALUE = 62
BAD_RESOURCE_TYPE_IN_FROM_CLAUSE = 45
BAD_SYMBOL = 2
BAD_VALUE = 4
DATE_RANGE_TOO_WIDE = 36
DATE_RANGE_TOO_NARROW = 60
EXPECTED_AND = 30
EXPECTED_BY = 14
EXPECTED_DIMENSION_FIELD_IN_SELECT_CLAUSE = 37
EXPECTED_FILTERS_ON_DATE_RANGE = 55
EXPECTED_FROM = 44
EXPECTED_LIST = 41
EXPECTED_REFERENCED_FIELD_IN_SELECT_CLAUSE = 16
EXPECTED_SELECT = 13
EXPECTED_SINGLE_VALUE = 42
EXPECTED_VALUE_WITH_BETWEEN_OPERATOR = 29
INVALID_DATE_FORMAT = 38
MISALIGNED_DATE_FOR_FILTER = 64
INVALID_STRING_VALUE = 57
INVALID_VALUE_WITH_BETWEEN_OPERATOR = 26
INVALID_VALUE_WITH_DURING_OPERATOR = 22
INVALID_VALUE_WITH_LIKE_OPERATOR = 56
OPERATOR_FIELD_MISMATCH = 35
PROHIBITED_EMPTY_LIST_IN_CONDITION = 28
PROHIBITED_ENUM_CONSTANT = 54
PROHIBITED_FIELD_COMBINATION_IN_SELECT_CLAUSE = 31
PROHIBITED_FIELD_IN_ORDER_BY_CLAUSE = 40
PROHIBITED_FIELD_IN_SELECT_CLAUSE = 23
PROHIBITED_FIELD_IN_WHERE_CLAUSE = 24
PROHIBITED_RESOURCE_TYPE_IN_FROM_CLAUSE = 43
PROHIBITED_RESOURCE_TYPE_IN_SELECT_CLAUSE = 48
PROHIBITED_RESOURCE_TYPE_IN_WHERE_CLAUSE = 58
PROHIBITED_METRIC_IN_SELECT_OR_WHERE_CLAUSE = 49
PROHIBITED_SEGMENT_IN_SELECT_OR_WHERE_CLAUSE = 51
PROHIBITED_SEGMENT_WITH_METRIC_IN_SELECT_OR_WHERE_CLAUSE = 53
LIMIT_VALUE_TOO_LOW = 25
PROHIBITED_NEWLINE_IN_STRING = 8
PROHIBITED_VALUE_COMBINATION_IN_LIST = 10
PROHIBITED_VALUE_COMBINATION_WITH_BETWEEN_OPERATOR = 21
STRING_NOT_TERMINATED = 6
TOO_MANY_SEGMENTS = 34
UNEXPECTED_END_OF_QUERY = 9
UNEXPECTED_FROM_CLAUSE = 47
UNRECOGNIZED_FIELD = 32
UNEXPECTED_INPUT = 11
REQUESTED_METRICS_FOR_MANAGER = 59
FILTER_HAS_TOO_MANY_VALUES = 63
__all__ = tuple(sorted(__protobuf__.manifest))
| [
"[email protected]"
] | |
b67319a271d923790927e483e58fe947902af3ae | a50fc03c5de39fb321f07016ef64e13d98fa7c50 | /MyDB/data/make_labels/work_flow.py | 287d933be1953d53aad6cc87108bf26781f58287 | [
"Apache-2.0"
] | permissive | PKQ1688/text_detection | a94c435b3e2ee962b7489a094438ad052d7e7655 | e306b003f2e8eb9f8d07fc95d2d9def14fa8b38c | refs/heads/master | 2022-11-21T18:09:49.430313 | 2020-07-29T08:44:50 | 2020-07-29T08:44:50 | 246,490,664 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,440 | py | # -*- coding:utf-8 -*-
# @author :adolf
from data.make_labels.make_border_map import MakeBorderMap
from data.make_labels.make_shrink_map import MakeShrinkMap
import numpy as np
from data.data_utils.clockwise_points import order_points_clockwise
import cv2
import os
# img_path = '/home/shizai/data2/ocr_data/rctw/imgs/rctw_image_3890.jpg'
# gt_path = '/home/shizai/data2/ocr_data/rctw/gts/rctw_image_3890.txt'
def get_annotation(gt_path, ignore_tags=['*', '###']):
boxes = list()
texts = list()
ignores = list()
with open(gt_path, encoding='utf-8', mode='r') as f:
for line in f.readlines():
params = line.strip().strip('\ufeff').strip('\xef\xbb\xbf').split(',')
# print(params)
try:
box = order_points_clockwise(np.array(list(map(float, params[:8]))).reshape(-1, 2))
# print(box)
if cv2.contourArea(box) > 0:
boxes.append(box)
texts.append(params[8])
ignores.append(params[8] in ignore_tags)
except Exception as e:
print(e)
print('get annotation is failed {}'.format(gt_path))
data = {'text_polys': np.array(boxes),
'texts': texts,
'ignore_tags': ignores}
return data
# data = get_annotation(gt_path)
# img = cv2.imread(img_path)
# img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
# data['img'] = img
# print(data['ignore_tags'])
# data = MakeShrinkMap()(data)
# cv2.imwrite('images_result/ori_img.png', img)
# print(data['shrink_map'])
# data = MakeBorderMap()(data)
# print(data.keys())
# cv2.imwrite('images_result/shrink_map.png', (data['shrink_map'] * 255).astype(np.uint8))
# cv2.imwrite('images_result/shrink_mask.png', (data['shrink_mask'] * 255).astype(np.uint8))
# cv2.imwrite('images_result/threshold_map.png', (data['threshold_map'] * 255).astype(np.uint8))
# cv2.imwrite('images_result/threshold_mask.png', (data['threshold_mask'] * 255).astype(np.uint8))
def make_use_label(file_path, img_name):
img_path = os.path.join(file_path, 'imgs', img_name)
gt_name = 'gt_' + img_name.replace('png', 'txt').replace('jpg', 'txt').replace('jpeg', 'txt')
gt_path = os.path.join(file_path, 'gts', gt_name)
data = get_annotation(gt_path)
img = cv2.imread(img_path)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
data['img'] = img
data = MakeShrinkMap()(data)
data = MakeBorderMap()(data)
cv2.imwrite(os.path.join(file_path, 'shrink_map', img_name), data['shrink_map'])
cv2.imwrite(os.path.join(file_path, 'shrink_mask', img_name), data['shrink_mask'])
#
cv2.imwrite(os.path.join(file_path, 'threshold_map', img_name), data['threshold_map'])
cv2.imwrite(os.path.join(file_path, 'threshold_mask', img_name), data['threshold_mask'])
rctw_path = "/home/shizai/data2/ocr_data/rctw"
rctw_list = os.listdir(os.path.join(rctw_path, 'imgs'))
# print('begin...')
# for rctw_img in rctw_list:
# make_use_label(rctw_path, rctw_img)
#
# third_path = "/home/shizai/data2/ocr_data/third_data"
# third_list = os.listdir(os.path.join(third_path, 'imgs'))
#
# for third_img in third_list:
# make_use_label(third_path, third_img)
# print('end...')
icdar_path = "/home/shizai/data2/ocr_data/icdar2015/train/"
icdar_list = os.listdir(os.path.join(icdar_path, 'imgs'))
for icdar_img in icdar_list:
make_use_label(icdar_path, icdar_img)
| [
"[email protected]"
] | |
7ea5ff8329c041e70598b5838b3b5ef768c4e455 | 41f98451dffd4a8719a39ec5c0291e4812e89e59 | /td_cours/td_2_structures_iteratives/td_2_ex_8.py | 2dc44faf6fa8722ada0faf27830904a25f5637b1 | [] | no_license | mbagrel1/isn | 742926de4989ea28b9c4f1b6538ac04d6f34eef0 | 480e60efa51d886810c7c7616c839ad9f3b2ec14 | refs/heads/master | 2020-05-18T19:53:29.186118 | 2019-05-12T11:14:00 | 2019-05-12T11:14:00 | 184,618,541 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py |
for j in range(10):
for i in range(10):
print"*",
print""
| [
"[email protected]"
] | |
c970668ee9d13930e662701f9264a1f3549c7dbb | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/77/usersdata/232/41242/submittedfiles/exercicio24.py | 3eac611f36a389948a2e0b1b66783af935b9b375 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 296 | py | # -*- coding: utf-8 -*-
import math
a=int(input('Digite o valor de a: '))
b=int(input('Digite o valor de b: '))
if a>b:
for i in range (1,b,1):
if (a%i)==0 and (b%i)==0:
print(i)
else:
for i in range (1,a,1):
if (b%i)==0 and (a%i)==0:
print(i)
| [
"[email protected]"
] | |
bbdba525d199606ba340c0a5217998203f805593 | df6c141f5fb53c093b75da13275576728d40cb6c | /tests/core/parse/test_parse_delimited.py | 0259f7b44c53a32886d111e2a8987d9e7d35ef1b | [
"MIT"
] | permissive | conormancone-cimpress/mygrations | 3adee758dc5b9f8c0abb3e097a7d7146042696bf | 30d1d568ca7d6c38dbc5211834dd2d04c0bcf078 | refs/heads/master | 2022-04-03T20:55:54.892085 | 2020-02-18T11:31:24 | 2020-02-18T11:31:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,632 | py | import unittest
from mygrations.core.parse.rule_delimited import rule_delimited
class test_parse_delimited(unittest.TestCase):
def get_rule(self, name, separator, quote, literal):
return rule_delimited(
False, {
'name': name,
'separator': separator,
'quote': quote
}, {
'type': 'literal',
'value': literal
}
)
def test_name_required(self):
with self.assertRaises(ValueError):
self.get_rule('', ',', '`', 'asdf')
def test_separator_required(self):
with self.assertRaises(ValueError):
self.get_rule('bob', '', '', 'asdf')
def test_no_multi_character_separator(self):
with self.assertRaises(ValueError):
self.get_rule('bob', 'as', '', 'asdf')
def test_no_multi_character_quote(self):
with self.assertRaises(ValueError):
self.get_rule('bob', ',', 'as', 'asdf')
def test_literal_required(self):
with self.assertRaises(ValueError):
rule_delimited(False, {'name': 'bob', 'separator': ',', 'quote': '`'}, {})
def test_can_init_with_name_and_separator(self):
rule = self.get_rule('bob', ',', '', 'asdf')
self.assertEquals(rule.name, 'bob')
self.assertEquals(rule.separator, ',')
def test_parse_without_quote(self):
rule = self.get_rule('bob', ',', '', ')')
self.assertTrue(rule.parse('1,2,3,4)'))
self.assertEquals(['1', '2', '3', '4'], rule.result)
self.assertEquals(')', rule.leftovers)
def test_parse_optional_quotes(self):
rule = self.get_rule('bob', ',', '`', ')')
self.assertTrue(rule.parse('asdf,`bob`,huh,`okay`) sup'))
self.assertEquals(['asdf', 'bob', 'huh', 'okay'], rule.result)
self.assertEquals(') sup', rule.leftovers)
def test_syntax_error_missing_quote(self):
with self.assertRaises(SyntaxError):
rule = self.get_rule('bob', ',', '`', ')')
rule.parse('asdf,`bob)')
def test_separator_in_quotes(self):
rule = self.get_rule('bob', ',', '`', ')')
self.assertTrue(rule.parse('asdf,`bob,`,huh,`okay`) sup'))
self.assertEquals(['asdf', 'bob,', 'huh', 'okay'], rule.result)
self.assertEquals(') sup', rule.leftovers)
def test_alternate_characters(self):
rule = self.get_rule('bob', 'X', '<', 'asdf')
self.assertTrue(rule.parse('<hey<X<sup<asdf'))
self.assertEquals(['hey', 'sup'], rule.result)
self.assertEquals('asdf', rule.leftovers)
| [
"[email protected]"
] | |
f2371f9dde81ea8197eea23b216630c67d818e85 | e984812ab56f50a14979d72222e3a6e011789324 | /python/LambPlot/plotConfiguration/WH_SS/Full2017nanov6/aliases.py | bc1ec4edbc864bc2f4c9a200c17c2cae143dbd7c | [] | no_license | LambdaFramework/LambdaNano | b3348addb299d48d60a0f5a8516acdd732c6f5c1 | fa50e69dace802dcc07fa3e0f977f49e879a8809 | refs/heads/master | 2021-07-06T07:17:06.130973 | 2020-12-07T21:30:40 | 2020-12-07T21:30:40 | 208,898,103 | 0 | 2 | null | 2019-10-04T08:16:01 | 2019-09-16T21:06:42 | Python | UTF-8 | Python | false | false | 10,866 | py | import os
import copy
import inspect
from samples import samples
configurations = os.path.realpath(inspect.getfile(inspect.currentframe())) # this file
configurations = os.path.dirname(configurations) # ggH2016
configurations = os.path.dirname(configurations) # Differential
configurations = os.path.dirname(configurations) # Configurations
aliases = {}
# imported from samples.py:
# samples, signals
mc = [skey for skey in samples if skey not in ('Fake', 'DATA')]
eleWP = 'mvaFall17V1Iso_WP90_tthmva_70'
muWP = 'cut_Tight_HWWW_tthmva_80'
eleWP_old = 'mvaFall17V1Iso_WP90'
muWP_old = 'cut_Tight_HWWW'
aliases['LepWPCut'] = {
'expr' : 'LepCut2l__ele_mvaFall17V1Iso_WP90__mu_cut_Tight_HWWW',
}
aliases['LepWPCut_tthmva'] = {
'expr': 'LepCut2l__ele_mvaFall17V1Iso_WP90__mu_cut_Tight_HWWW*( (abs(Lepton_pdgId[0])==11 || Muon_mvaTTH[Lepton_muonIdx[0]]>0.8) && (abs(Lepton_pdgId[1])==11 || Muon_mvaTTH[Lepton_muonIdx[1]]>0.8) && (abs(Lepton_pdgId[0])==13 || Electron_mvaTTH[Lepton_electronIdx[0]]>0.70) && (abs(Lepton_pdgId[1])==13 || Electron_mvaTTH[Lepton_electronIdx[1]]>0.70))',
'samples': mc + ['DATA']
}
aliases['gstarLow'] = {
'expr': 'Gen_ZGstar_mass >0 && Gen_ZGstar_mass < 4',
'samples': 'WgS'
}
aliases['gstarHigh'] = {
'expr': 'Gen_ZGstar_mass <0 || Gen_ZGstar_mass > 4',
'samples': 'WZ'
}
# Fake leptons transfer factor
aliases['fakeW'] = {
'expr': 'fakeW2l_ele_'+eleWP+'_mu_'+muWP,
'samples': ['Fake']
}
# And variations - already divided by central values in formulas !
aliases['fakeWEleUp'] = {
'expr': 'fakeW2l_ele_'+eleWP+'_mu_'+muWP+'_EleUp',
'samples': ['Fake']
}
aliases['fakeWEleDown'] = {
'expr': 'fakeW2l_ele_'+eleWP+'_mu_'+muWP+'_EleDown',
'samples': ['Fake']
}
aliases['fakeWMuUp'] = {
'expr': 'fakeW2l_ele_'+eleWP+'_mu_'+muWP+'_MuUp',
'samples': ['Fake']
}
aliases['fakeWMuDown'] = {
'expr': 'fakeW2l_ele_'+eleWP+'_mu_'+muWP+'_MuDown',
'samples': ['Fake']
}
aliases['fakeWStatEleUp'] = {
'expr': 'fakeW2l_ele_'+eleWP+'_mu_'+muWP+'_statEleUp',
'samples': ['Fake']
}
aliases['fakeWStatEleDown'] = {
'expr': 'fakeW2l_ele_'+eleWP+'_mu_'+muWP+'_statEleDown',
'samples': ['Fake']
}
aliases['fakeWStatMuUp'] = {
'expr': 'fakeW2l_ele_'+eleWP+'_mu_'+muWP+'_statMuUp',
'samples': ['Fake']
}
aliases['fakeWStatMuDown'] = {
'expr': 'fakeW2l_ele_'+eleWP+'_mu_'+muWP+'_statMuDown',
'samples': ['Fake']
}
# gen-matching to prompt only (GenLepMatch2l matches to *any* gen lepton)
aliases['PromptGenLepMatch2l'] = {
'expr': '(Lepton_promptgenmatched[0]*Lepton_promptgenmatched[1])',
'samples': mc
}
aliases['Top_pTrw'] = {
'expr': '(topGenPt * antitopGenPt > 0.) * (TMath::Sqrt(TMath::Exp(0.0615 - 0.0005 * topGenPt) * TMath::Exp(0.0615 - 0.0005 * antitopGenPt))) + (topGenPt * antitopGenPt <= 0.)',
'samples': ['top']
}
# Jet bins
# using Alt$(CleanJet_pt[n], 0) instead of Sum$(CleanJet_pt >= 30) because jet pt ordering is not strictly followed in JES-varied samples
# No jet with pt > 30 GeV
aliases['zeroJet'] = {
'expr': 'Alt$(CleanJet_pt[0], 0) < 30.'
}
aliases['oneJet'] = {
'expr': 'Alt$(CleanJet_pt[0], 0) > 30.'
}
aliases['multiJet'] = {
'expr': 'Alt$(CleanJet_pt[1], 0) > 30.'
}
# B tagging
aliases['bVeto'] = {
'expr': 'Sum$(CleanJet_pt > 20. && abs(CleanJet_eta) < 2.5 && Jet_btagDeepB[CleanJet_jetIdx] > 0.1522) == 0'
}
aliases['bReq'] = {
'expr': 'Sum$(CleanJet_pt > 30. && abs(CleanJet_eta) < 2.5 && Jet_btagDeepB[CleanJet_jetIdx] > 0.1522) >= 1'
}
# CR definitions
aliases['topcr'] = {
'expr': 'mtw2>30 && mll>50 && ((zeroJet && !bVeto) || bReq)'
}
aliases['dycr'] = {
'expr': 'mth<60 && mll>40 && mll<80 && bVeto'
}
aliases['wwcr'] = {
'expr': 'mth>60 && mtw2>30 && mll>100 && bVeto'
}
# SR definition
aliases['sr'] = {
'expr': 'mth>60 && mtw2>30 && bVeto'
}
# B tag scale factors
aliases['bVetoSF'] = {
'expr': 'TMath::Exp(Sum$(TMath::Log((CleanJet_pt>20 && abs(CleanJet_eta)<2.5)*Jet_btagSF_shape[CleanJet_jetIdx]+1*(CleanJet_pt<20 || abs(CleanJet_eta)>2.5))))',
'samples': mc
}
aliases['bReqSF'] = {
'expr': 'TMath::Exp(Sum$(TMath::Log((CleanJet_pt>30 && abs(CleanJet_eta)<2.5)*Jet_btagSF_shape[CleanJet_jetIdx]+1*(CleanJet_pt<30 || abs(CleanJet_eta)>2.5))))',
'samples': mc
}
aliases['btagSF'] = {
'expr': '(bVeto || (topcr && zeroJet))*bVetoSF + (topcr && !zeroJet)*bReqSF',
'samples': mc
}
for shift in ['jes', 'lf', 'hf', 'lfstats1', 'lfstats2', 'hfstats1', 'hfstats2', 'cferr1', 'cferr2']:
for targ in ['bVeto', 'bReq']:
alias = aliases['%sSF%sup' % (targ, shift)] = copy.deepcopy(aliases['%sSF' % targ])
alias['expr'] = alias['expr'].replace('btagSF_shape', 'btagSF_shape_up_%s' % shift)
alias = aliases['%sSF%sdown' % (targ, shift)] = copy.deepcopy(aliases['%sSF' % targ])
alias['expr'] = alias['expr'].replace('btagSF_shape', 'btagSF_shape_down_%s' % shift)
aliases['btagSF%sup' % shift] = {
'expr': aliases['btagSF']['expr'].replace('SF', 'SF' + shift + 'up'),
'samples': mc
}
aliases['btagSF%sdown' % shift] = {
'expr': aliases['btagSF']['expr'].replace('SF', 'SF' + shift + 'down'),
'samples': mc
}
'''
puidSFSource = '%s/src/LatinoAnalysis/NanoGardener/python/data/JetPUID_effcyandSF.root' % os.getenv('CMSSW_BASE')
aliases['PUJetIdSF'] = {
'linesToAdd': [
'gSystem->AddIncludePath("-I%s/src");' % os.getenv('CMSSW_BASE'),
'.L %s/patches/pujetidsf_event.cc+' % configurations
],
'class': 'PUJetIdEventSF',
'args': (puidSFSource, '2017', 'loose'),
'samples': mc
}
'''
## data/MC scale factors
#aliases['SFweight'] = {
# 'expr': ' * '.join(['SFweight2l', 'LepSF2l__ele_' + eleWP_old + '__mu_' + muWP_old, 'LepWPCut', 'btagSF', 'PrefireWeight','PUJetIdSF']),
# 'samples': mc
#}
## variations
aliases['SFweightEleUp'] = {
'expr': 'LepSF2l__ele_'+eleWP_old+'__Up',
'samples': mc
}
aliases['SFweightEleDown'] = {
'expr': 'LepSF2l__ele_'+eleWP_old+'__Do',
'samples': mc
}
aliases['SFweightMuUp'] = {
'expr': 'LepSF2l__mu_'+muWP_old+'__Up',
'samples': mc
}
aliases['SFweightMuDown'] = {
'expr': 'LepSF2l__mu_'+muWP_old+'__Do',
'samples': mc
}
#############################################
### Total SFs, i.e. ttHMVA+old lepton SFs ###
#############################################
#aliases['ttHMVA_SF_2l'] = {'linesToAdd': ['.L %s/patches/compute_SF_BETA.C+' % configurations],
# 'class': 'compute_SF',
# 'args' : ('2017', 2, 'total_SF'),
# 'samples': mc
# }
############################################################
### Up/Down variations for single leptons in order of Pt ###
############################################################
aliases['ttHMVA_SF_Up_0'] = {'linesToAdd': ['.L %s/patches/compute_SF_BETA.C+' % configurations],
'class': 'compute_SF',
'args' : ('2017', 4, 'single_SF_up', 0),
'samples': mc
}
aliases['ttHMVA_SF_Up_1'] = {'linesToAdd': ['.L %s/patches/compute_SF_BETA.C+' % configurations],
'class': 'compute_SF',
'args' : ('2017', 4, 'single_SF_up', 1),
'samples': mc
}
aliases['ttHMVA_SF_Down_0'] = {'linesToAdd': ['.L %s/patches/compute_SF_BETA.C+' % configurations],
'class': 'compute_SF',
'args' : ('2017', 4, 'single_SF_down', 0),
'samples': mc
}
aliases['ttHMVA_SF_Down_1'] = {'linesToAdd': ['.L %s/patches/compute_SF_BETA.C+' % configurations],
'class': 'compute_SF',
'args' : ('2017', 4, 'single_SF_down', 1),
'samples': mc
}
##############################################################################
### Up/Down variations for electrons, i.e. LepSF2l__ele_'+eleWP+'__Up/Down ###
##############################################################################
aliases['ttHMVA_2l_ele_SF_Up'] = {'expr' : '(ttHMVA_SF_Up_0*(TMath::Abs(Lepton_pdgId[0]) == 11) + (TMath::Abs(Lepton_pdgId[0]) == 13)) *\
(ttHMVA_SF_Up_1*(TMath::Abs(Lepton_pdgId[1]) == 11) + (TMath::Abs(Lepton_pdgId[1]) == 13))',
'samples': mc
}
aliases['ttHMVA_2l_ele_SF_Down'] = {'expr' : '(ttHMVA_SF_Down_0*(TMath::Abs(Lepton_pdgId[0]) == 11) + (TMath::Abs(Lepton_pdgId[0]) == 13)) *\
(ttHMVA_SF_Down_1*(TMath::Abs(Lepton_pdgId[1]) == 11) + (TMath::Abs(Lepton_pdgId[1]) == 13))',
'samples': mc
}
########################################################################
### Up/Down variations for muons, i.e. LepSF2l__mu_'+muWP+'__Up/Down ###
########################################################################
aliases['ttHMVA_2l_mu_SF_Up'] = {'expr' : '(ttHMVA_SF_Up_0*(TMath::Abs(Lepton_pdgId[0]) == 13) + (TMath::Abs(Lepton_pdgId[0]) == 11)) *\
(ttHMVA_SF_Up_1*(TMath::Abs(Lepton_pdgId[1]) == 13) + (TMath::Abs(Lepton_pdgId[1]) == 11))',
'samples': mc
}
aliases['ttHMVA_2l_mu_SF_Down'] = {'expr' : '(ttHMVA_SF_Down_0*(TMath::Abs(Lepton_pdgId[0]) == 13) + (TMath::Abs(Lepton_pdgId[0]) == 11)) *\
(ttHMVA_SF_Down_1*(TMath::Abs(Lepton_pdgId[1]) == 13) + (TMath::Abs(Lepton_pdgId[1]) == 11))',
'samples': mc
}
# data/MC scale factors
#aliases['SFweight'] = {
# 'expr': ' * '.join(['SFweight2l', 'ttHMVA_SF_2l', 'LepWPCut', 'btagSF', 'PrefireWeight','PUJetIdSF']),
# 'samples': mc
#}
aliases['SFweight'] = {
'expr' : 'SFweight2l*LepSF2l__ele_mvaFall17V1Iso_WP90__mu_cut_Tight_HWWW*LepWPCut*PrefireWeight*PUJetIdSF',
'samples' : mc
}
aliases['SFweight_tthmva'] = {
'expr' : 'SFweight2l*ttHMVA_SF_2l*LepWPCut_tthmva*PrefireWeight*PUJetIdSF',
'samples' : mc
}
'''
# GGHUncertaintyProducer wasn't run for 2017 nAODv5 non-private
thus = [
'ggH_mu',
'ggH_res',
'ggH_mig01',
'ggH_mig12',
'ggH_VBF2j',
'ggH_VBF3j',
'ggH_pT60',
'ggH_pT120',
'ggH_qmtop'
]
for thu in thus:
aliases[thu] = {
'linesToAdd': ['.L %s/Differential/gghuncertainty.cc+' % configurations],
'class': 'GGHUncertainty',
'args': (thu,),
'samples': ['ggH_hww']
}
'''
| [
"[email protected]"
] | |
c0eda41227ed6365f7984d500065a66cfb9ad2b5 | fe20c7d7589c2eab11b5c04468a5035421b292cc | /pipenv/vendor/tomlkit/items.py | 781e2e984322a3656460516be735af5ba0fead2b | [
"MIT",
"BSD-3-Clause"
] | permissive | Anthonymcqueen21/pipenv | 0043181731f49b4cbb1b5e37b9820d9902ca9aae | 3ce8d7c6bb8007f93f011bd0a7c32b3e977d379f | refs/heads/master | 2020-04-04T22:32:57.421827 | 2018-11-05T14:41:05 | 2018-11-05T14:41:05 | 156,327,730 | 2 | 0 | MIT | 2018-11-06T04:53:11 | 2018-11-06T04:53:11 | null | UTF-8 | Python | false | false | 29,080 | py | from __future__ import unicode_literals
import re
import string
from datetime import date
from datetime import datetime
from datetime import time
import sys
if sys.version_info >= (3, 4):
from enum import Enum
else:
from pipenv.vendor.backports.enum import Enum
from ._compat import PY2
from ._compat import decode
from ._compat import unicode
from ._utils import escape_string
if PY2:
from pipenv.vendor.backports.functools_lru_cache import lru_cache
else:
from functools import lru_cache
def item(value, _parent=None):
from .container import Container
if isinstance(value, Item):
return value
if isinstance(value, bool):
return Bool(value, Trivia())
elif isinstance(value, int):
return Integer(value, Trivia(), str(value))
elif isinstance(value, float):
return Float(value, Trivia(), str(value))
elif isinstance(value, dict):
val = Table(Container(), Trivia(), False)
for k, v in sorted(value.items(), key=lambda i: (isinstance(i[1], dict), i[0])):
val[k] = item(v, _parent=val)
return val
elif isinstance(value, list):
if value and isinstance(value[0], dict):
a = AoT([])
else:
a = Array([], Trivia())
for v in value:
if isinstance(v, dict):
table = Table(Container(), Trivia(), True)
for k, _v in sorted(
v.items(), key=lambda i: (isinstance(i[1], dict), i[0])
):
i = item(_v)
if isinstance(table, InlineTable):
i.trivia.trail = ""
table[k] = item(i)
v = table
a.append(v)
return a
elif isinstance(value, (str, unicode)):
escaped = escape_string(value)
return String(StringType.SLB, value, escaped, Trivia())
elif isinstance(value, datetime):
return DateTime(value, Trivia(), value.isoformat().replace("+00:00", "Z"))
elif isinstance(value, date):
return Date(value, Trivia(), value.isoformat())
elif isinstance(value, time):
return Time(value, Trivia(), value.isoformat())
raise ValueError("Invalid type {}".format(type(value)))
class StringType(Enum):
# Single Line Basic
SLB = '"'
# Multi Line Basic
MLB = '"""'
# Single Line Literal
SLL = "'"
# Multi Line Literal
MLL = "'''"
@property
@lru_cache(maxsize=None)
def unit(self): # type: () -> str
return self.value[0]
@lru_cache(maxsize=None)
def is_basic(self): # type: () -> bool
return self in {StringType.SLB, StringType.MLB}
@lru_cache(maxsize=None)
def is_literal(self): # type: () -> bool
return self in {StringType.SLL, StringType.MLL}
@lru_cache(maxsize=None)
def is_singleline(self): # type: () -> bool
return self in {StringType.SLB, StringType.SLL}
@lru_cache(maxsize=None)
def is_multiline(self): # type: () -> bool
return self in {StringType.MLB, StringType.MLL}
@lru_cache(maxsize=None)
def toggle(self): # type: () -> StringType
return {
StringType.SLB: StringType.MLB,
StringType.MLB: StringType.SLB,
StringType.SLL: StringType.MLL,
StringType.MLL: StringType.SLL,
}[self]
class Trivia:
"""
Trivia information (aka metadata).
"""
def __init__(
self, indent=None, comment_ws=None, comment=None, trail=None
): # type: (str, str, str, str) -> None
# Whitespace before a value.
self.indent = indent or ""
# Whitespace after a value, but before a comment.
self.comment_ws = comment_ws or ""
# Comment, starting with # character, or empty string if no comment.
self.comment = comment or ""
# Trailing newline.
if trail is None:
trail = "\n"
self.trail = trail
class KeyType(Enum):
"""
The type of a Key.
Keys can be bare (unquoted), or quoted using basic ("), or literal (')
quotes following the same escaping rules as single-line StringType.
"""
Bare = ""
Basic = '"'
Literal = "'"
class Key:
"""
A key value.
"""
def __init__(self, k, t=None, sep=None, dotted=False): # type: (str) -> None
if t is None:
if any(
[c not in string.ascii_letters + string.digits + "-" + "_" for c in k]
):
t = KeyType.Basic
else:
t = KeyType.Bare
self.t = t
if sep is None:
sep = " = "
self.sep = sep
self.key = k
self._dotted = dotted
@property
def delimiter(self): # type: () -> str
return self.t.value
def is_dotted(self): # type: () -> bool
return self._dotted
def as_string(self): # type: () -> str
return "{}{}{}".format(self.delimiter, self.key, self.delimiter)
def __hash__(self): # type: () -> int
return hash(self.key)
def __eq__(self, other): # type: (Key) -> bool
if isinstance(other, Key):
return self.key == other.key
return self.key == other
def __str__(self): # type: () -> str
return self.as_string()
def __repr__(self): # type: () -> str
return "<Key {}>".format(self.as_string())
class Item(object):
"""
An item within a TOML document.
"""
def __init__(self, trivia): # type: (Trivia) -> None
self._trivia = trivia
@property
def trivia(self): # type: () -> Trivia
return self._trivia
@property
def discriminant(self): # type: () -> int
raise NotImplementedError()
def as_string(self): # type: () -> str
raise NotImplementedError()
# Helpers
def comment(self, comment): # type: (str) -> Item
if not comment.strip().startswith("#"):
comment = "# " + comment
self._trivia.comment_ws = " "
self._trivia.comment = comment
return self
def indent(self, indent): # type: (int) -> Item
if self._trivia.indent.startswith("\n"):
self._trivia.indent = "\n" + " " * indent
else:
self._trivia.indent = " " * indent
return self
def _getstate(self, protocol=3):
return (self._trivia,)
def __reduce__(self):
return self.__reduce_ex__(2)
def __reduce_ex__(self, protocol):
return self.__class__, self._getstate(protocol)
class Whitespace(Item):
"""
A whitespace literal.
"""
def __init__(self, s, fixed=False): # type: (str, bool) -> None
self._s = s
self._fixed = fixed
@property
def s(self): # type: () -> str
return self._s
@property
def value(self): # type: () -> str
return self._s
@property
def trivia(self): # type: () -> Trivia
raise RuntimeError("Called trivia on a Whitespace variant.")
@property
def discriminant(self): # type: () -> int
return 0
def is_fixed(self): # type: () -> bool
return self._fixed
def as_string(self): # type: () -> str
return self._s
def __repr__(self): # type: () -> str
return "<{} {}>".format(self.__class__.__name__, repr(self._s))
def _getstate(self, protocol=3):
return self._s, self._fixed
class Comment(Item):
"""
A comment literal.
"""
@property
def discriminant(self): # type: () -> int
return 1
def as_string(self): # type: () -> str
return "{}{}{}".format(
self._trivia.indent, decode(self._trivia.comment), self._trivia.trail
)
def __str__(self): # type: () -> str
return "{}{}".format(self._trivia.indent, decode(self._trivia.comment))
class Integer(int, Item):
"""
An integer literal.
"""
def __new__(cls, value, trivia, raw): # type: (int, Trivia, str) -> Integer
return super(Integer, cls).__new__(cls, value)
def __init__(self, _, trivia, raw): # type: (int, Trivia, str) -> None
super(Integer, self).__init__(trivia)
self._raw = raw
self._sign = False
if re.match(r"^[+\-]\d+$", raw):
self._sign = True
@property
def discriminant(self): # type: () -> int
return 2
@property
def value(self): # type: () -> int
return self
def as_string(self): # type: () -> str
return self._raw
def __add__(self, other):
result = super(Integer, self).__add__(other)
return self._new(result)
def __radd__(self, other):
result = super(Integer, self).__radd__(other)
if isinstance(other, Integer):
return self._new(result)
return result
def __sub__(self, other):
result = super(Integer, self).__sub__(other)
return self._new(result)
def __rsub__(self, other):
result = super(Integer, self).__rsub__(other)
if isinstance(other, Integer):
return self._new(result)
return result
def _new(self, result):
raw = str(result)
if self._sign:
sign = "+" if result >= 0 else "-"
raw = sign + raw
return Integer(result, self._trivia, raw)
def _getstate(self, protocol=3):
return int(self), self._trivia, self._raw
class Float(float, Item):
"""
A float literal.
"""
def __new__(cls, value, trivia, raw): # type: (float, Trivia, str) -> Integer
return super(Float, cls).__new__(cls, value)
def __init__(self, _, trivia, raw): # type: (float, Trivia, str) -> None
super(Float, self).__init__(trivia)
self._raw = raw
self._sign = False
if re.match(r"^[+\-].+$", raw):
self._sign = True
@property
def discriminant(self): # type: () -> int
return 3
@property
def value(self): # type: () -> float
return self
def as_string(self): # type: () -> str
return self._raw
def __add__(self, other):
result = super(Float, self).__add__(other)
return self._new(result)
def __radd__(self, other):
result = super(Float, self).__radd__(other)
if isinstance(other, Float):
return self._new(result)
return result
def __sub__(self, other):
result = super(Float, self).__sub__(other)
return self._new(result)
def __rsub__(self, other):
result = super(Float, self).__rsub__(other)
if isinstance(other, Float):
return self._new(result)
return result
def _new(self, result):
raw = str(result)
if self._sign:
sign = "+" if result >= 0 else "-"
raw = sign + raw
return Float(result, self._trivia, raw)
def _getstate(self, protocol=3):
return float(self), self._trivia, self._raw
class Bool(Item):
"""
A boolean literal.
"""
def __init__(self, value, trivia): # type: (float, Trivia) -> None
super(Bool, self).__init__(trivia)
self._value = value
@property
def discriminant(self): # type: () -> int
return 4
@property
def value(self): # type: () -> bool
return self._value
def as_string(self): # type: () -> str
return str(self._value).lower()
def _getstate(self, protocol=3):
return self._value, self._trivia
class DateTime(Item, datetime):
"""
A datetime literal.
"""
def __new__(cls, value, *_): # type: (..., datetime, ...) -> datetime
return datetime.__new__(
cls,
value.year,
value.month,
value.day,
value.hour,
value.minute,
value.second,
value.microsecond,
tzinfo=value.tzinfo,
)
def __init__(self, _, trivia, raw): # type: (datetime, Trivia, str) -> None
super(DateTime, self).__init__(trivia)
self._raw = raw
@property
def discriminant(self): # type: () -> int
return 5
@property
def value(self): # type: () -> datetime
return self
def as_string(self): # type: () -> str
return self._raw
def __add__(self, other):
result = super(DateTime, self).__add__(other)
return self._new(result)
def __sub__(self, other):
result = super(DateTime, self).__sub__(other)
return self._new(result)
def _new(self, result):
raw = result.isoformat()
return DateTime(result, self._trivia, raw)
def _getstate(self, protocol=3):
return (
datetime(
self.year,
self.month,
self.day,
self.hour,
self.minute,
self.second,
self.microsecond,
self.tzinfo,
),
self._trivia,
self._raw,
)
class Date(Item, date):
"""
A date literal.
"""
def __new__(cls, value, *_): # type: (..., date, ...) -> date
return date.__new__(cls, value.year, value.month, value.day)
def __init__(self, _, trivia, raw): # type: (date, Trivia, str) -> None
super(Date, self).__init__(trivia)
self._raw = raw
@property
def discriminant(self): # type: () -> int
return 6
@property
def value(self): # type: () -> date
return self
def as_string(self): # type: () -> str
return self._raw
def __add__(self, other):
result = super(Date, self).__add__(other)
return self._new(result)
def __sub__(self, other):
result = super(Date, self).__sub__(other)
return self._new(result)
def _new(self, result):
raw = result.isoformat()
return Date(result, self._trivia, raw)
def _getstate(self, protocol=3):
return (datetime(self.year, self.month, self.day), self._trivia, self._raw)
class Time(Item, time):
"""
A time literal.
"""
def __new__(cls, value, *_): # type: (time, ...) -> time
return time.__new__(
cls, value.hour, value.minute, value.second, value.microsecond
)
def __init__(self, _, trivia, raw): # type: (time, Trivia, str) -> None
super(Time, self).__init__(trivia)
self._raw = raw
@property
def discriminant(self): # type: () -> int
return 7
@property
def value(self): # type: () -> time
return self
def as_string(self): # type: () -> str
return self._raw
def _getstate(self, protocol=3):
return (
time(self.hour, self.minute, self.second, self.microsecond, self.tzinfo),
self._trivia,
self._raw,
)
class Array(Item, list):
"""
An array literal
"""
def __init__(self, value, trivia): # type: (list, Trivia) -> None
super(Array, self).__init__(trivia)
list.__init__(
self, [v.value for v in value if not isinstance(v, (Whitespace, Comment))]
)
self._value = value
@property
def discriminant(self): # type: () -> int
return 8
@property
def value(self): # type: () -> list
return self
def is_homogeneous(self): # type: () -> bool
if not self:
return True
discriminants = [
i.discriminant
for i in self._value
if not isinstance(i, (Whitespace, Comment))
]
return len(set(discriminants)) == 1
def as_string(self): # type: () -> str
return "[{}]".format("".join(v.as_string() for v in self._value))
def append(self, _item): # type: () -> None
if self._value:
self._value.append(Whitespace(", "))
it = item(_item)
super(Array, self).append(it.value)
self._value.append(it)
if not self.is_homogeneous():
raise ValueError("Array has mixed types elements")
if not PY2:
def clear(self):
super(Array, self).clear()
self._value.clear()
def __iadd__(self, other): # type: (list) -> Array
if not isinstance(other, list):
return NotImplemented
for v in other:
self.append(v)
return self
def __delitem__(self, key):
super(Array, self).__delitem__(key)
j = 0 if key >= 0 else -1
for i, v in enumerate(self._value if key >= 0 else reversed(self._value)):
if key < 0:
i = -i - 1
if isinstance(v, (Comment, Whitespace)):
continue
if j == key:
del self._value[i]
if i < 0 and abs(i) > len(self._value):
i += 1
if i < len(self._value) - 1 and isinstance(self._value[i], Whitespace):
del self._value[i]
break
j += 1 if key >= 0 else -1
def __str__(self):
return str(
[v.value for v in self._value if not isinstance(v, (Whitespace, Comment))]
)
def __repr__(self):
return str(self)
def _getstate(self, protocol=3):
return self._value, self._trivia
class Table(Item, dict):
"""
A table literal.
"""
def __init__(
self,
value,
trivia,
is_aot_element,
is_super_table=False,
name=None,
display_name=None,
): # type: (tomlkit.container.Container, Trivia, bool, ...) -> None
super(Table, self).__init__(trivia)
self.name = name
self.display_name = display_name
self._value = value
self._is_aot_element = is_aot_element
self._is_super_table = is_super_table
for k, v in self._value.body:
if k is not None:
super(Table, self).__setitem__(k.key, v)
@property
def value(self): # type: () -> tomlkit.container.Container
return self._value
@property
def discriminant(self): # type: () -> int
return 9
@property
def value(self): # type: () -> tomlkit.container.Container
return self._value
def add(self, key, item=None): # type: (Union[Key, Item, str], Any) -> Item
if item is None:
if not isinstance(key, (Comment, Whitespace)):
raise ValueError(
"Non comment/whitespace items must have an associated key"
)
key, item = None, key
return self.append(key, item)
def append(self, key, _item): # type: (Union[Key, str], Any) -> Table
"""
Appends a (key, item) to the table.
"""
if not isinstance(_item, Item):
_item = item(_item)
self._value.append(key, _item)
if isinstance(key, Key):
key = key.key
if key is not None:
super(Table, self).__setitem__(key, _item)
m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent)
if not m:
return self
indent = m.group(1)
if not isinstance(_item, Whitespace):
m = re.match("(?s)^([^ ]*)(.*)$", _item.trivia.indent)
if not m:
_item.trivia.indent = indent
else:
_item.trivia.indent = m.group(1) + indent + m.group(2)
return self
def remove(self, key): # type: (Union[Key, str]) -> Table
self._value.remove(key)
if isinstance(key, Key):
key = key.key
if key is not None:
super(Table, self).__delitem__(key)
return self
def is_aot_element(self): # type: () -> bool
return self._is_aot_element
def is_super_table(self): # type: () -> bool
return self._is_super_table
def as_string(self, prefix=None): # type: () -> str
return self._value.as_string(prefix=prefix)
# Helpers
def indent(self, indent): # type: (int) -> Table
super(Table, self).indent(indent)
m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent)
if not m:
indent = ""
else:
indent = m.group(1)
for k, item in self._value.body:
if not isinstance(item, Whitespace):
item.trivia.indent = indent + item.trivia.indent
return self
def keys(self): # type: () -> Generator[str]
for k in self._value.keys():
yield k
def values(self): # type: () -> Generator[Item]
for v in self._value.values():
yield v
def items(self): # type: () -> Generator[Item]
for k, v in self._value.items():
yield k, v
def update(self, other): # type: (Dict) -> None
for k, v in other.items():
self[k] = v
def __contains__(self, key): # type: (Union[Key, str]) -> bool
return key in self._value
def __getitem__(self, key): # type: (Union[Key, str]) -> Item
return self._value[key]
def __setitem__(self, key, value): # type: (Union[Key, str], Any) -> None
if not isinstance(value, Item):
value = item(value)
self._value[key] = value
if key is not None:
super(Table, self).__setitem__(key, value)
m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent)
if not m:
return
indent = m.group(1)
if not isinstance(value, Whitespace):
m = re.match("(?s)^([^ ]*)(.*)$", value.trivia.indent)
if not m:
value.trivia.indent = indent
else:
value.trivia.indent = m.group(1) + indent + m.group(2)
def __delitem__(self, key): # type: (Union[Key, str]) -> None
self.remove(key)
def __repr__(self):
return super(Table, self).__repr__()
def _getstate(self, protocol=3):
return (
self._value,
self._trivia,
self._is_aot_element,
self._is_super_table,
self.name,
self.display_name,
)
class InlineTable(Item, dict):
"""
An inline table literal.
"""
def __init__(
self, value, trivia
): # type: (tomlkit.container.Container, Trivia) -> None
super(InlineTable, self).__init__(trivia)
self._value = value
for k, v in self._value.body:
if k is not None:
super(InlineTable, self).__setitem__(k.key, v)
@property
def discriminant(self): # type: () -> int
return 10
@property
def value(self): # type: () -> Dict
return self._value
def append(self, key, _item): # type: (Union[Key, str], Any) -> InlineTable
"""
Appends a (key, item) to the table.
"""
if not isinstance(_item, Item):
_item = item(_item)
if not isinstance(_item, (Whitespace, Comment)):
if not _item.trivia.indent and len(self._value) > 0:
_item.trivia.indent = " "
self._value.append(key, _item)
if isinstance(key, Key):
key = key.key
if key is not None:
super(InlineTable, self).__setitem__(key, _item)
return self
def remove(self, key): # type: (Union[Key, str]) -> InlineTable
self._value.remove(key)
if isinstance(key, Key):
key = key.key
if key is not None:
super(InlineTable, self).__delitem__(key)
return self
def as_string(self): # type: () -> str
buf = "{"
for i, (k, v) in enumerate(self._value.body):
if k is None:
if i == len(self._value.body) - 1:
buf = buf.rstrip(",")
buf += v.as_string()
continue
buf += "{}{}{}{}{}{}".format(
v.trivia.indent,
k.as_string(),
k.sep,
v.as_string(),
v.trivia.comment,
v.trivia.trail.replace("\n", ""),
)
if i != len(self._value.body) - 1:
buf += ","
buf += "}"
return buf
def keys(self): # type: () -> Generator[str]
for k in self._value.keys():
yield k
def values(self): # type: () -> Generator[Item]
for v in self._value.values():
yield v
def items(self): # type: () -> Generator[Item]
for k, v in self._value.items():
yield k, v
def update(self, other): # type: (Dict) -> None
for k, v in other.items():
self[k] = v
def __contains__(self, key): # type: (Union[Key, str]) -> bool
return key in self._value
def __getitem__(self, key): # type: (Union[Key, str]) -> Item
return self._value[key]
def __setitem__(self, key, value): # type: (Union[Key, str], Any) -> None
if not isinstance(value, Item):
value = item(value)
self._value[key] = value
if key is not None:
super(InlineTable, self).__setitem__(key, value)
m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent)
if not m:
return
indent = m.group(1)
if not isinstance(value, Whitespace):
m = re.match("(?s)^([^ ]*)(.*)$", value.trivia.indent)
if not m:
value.trivia.indent = indent
else:
value.trivia.indent = m.group(1) + indent + m.group(2)
def __delitem__(self, key): # type: (Union[Key, str]) -> None
self.remove(key)
def __repr__(self):
return super(InlineTable, self).__repr__()
def _getstate(self, protocol=3):
return (self._value, self._trivia)
class String(unicode, Item):
"""
A string literal.
"""
def __new__(cls, t, value, original, trivia):
return super(String, cls).__new__(cls, value)
def __init__(
self, t, _, original, trivia
): # type: (StringType, str, original, Trivia) -> None
super(String, self).__init__(trivia)
self._t = t
self._original = original
@property
def discriminant(self): # type: () -> int
return 11
@property
def value(self): # type: () -> str
return self
def as_string(self): # type: () -> str
return "{}{}{}".format(self._t.value, decode(self._original), self._t.value)
def __add__(self, other):
result = super(String, self).__add__(other)
return self._new(result)
def __sub__(self, other):
result = super(String, self).__sub__(other)
return self._new(result)
def _new(self, result):
return String(self._t, result, result, self._trivia)
def _getstate(self, protocol=3):
return self._t, unicode(self), self._original, self._trivia
class AoT(Item, list):
"""
An array of table literal
"""
def __init__(
self, body, name=None, parsed=False
): # type: (List[Table], Optional[str]) -> None
self.name = name
self._body = []
self._parsed = parsed
super(AoT, self).__init__(Trivia(trail=""))
for table in body:
self.append(table)
@property
def body(self): # type: () -> List[Table]
return self._body
@property
def discriminant(self): # type: () -> int
return 12
@property
def value(self): # type: () -> List[Dict[Any, Any]]
return [v.value for v in self._body]
def append(self, table): # type: (Table) -> Table
m = re.match("(?s)^[^ ]*([ ]+).*$", self._trivia.indent)
if m:
indent = m.group(1)
m = re.match("(?s)^([^ ]*)(.*)$", table.trivia.indent)
if not m:
table.trivia.indent = indent
else:
table.trivia.indent = m.group(1) + indent + m.group(2)
if not self._parsed and "\n" not in table.trivia.indent and self._body:
table.trivia.indent = "\n" + table.trivia.indent
self._body.append(table)
super(AoT, self).append(table)
return table
def as_string(self): # type: () -> str
b = ""
for table in self._body:
b += table.as_string(prefix=self.name)
return b
def __repr__(self): # type: () -> str
return "<AoT {}>".format(self.value)
def _getstate(self, protocol=3):
return self._body, self.name, self._parsed
class Null(Item):
"""
A null item.
"""
def __init__(self): # type: () -> None
pass
@property
def discriminant(self): # type: () -> int
return -1
@property
def value(self): # type: () -> None
return None
def as_string(self): # type: () -> str
return ""
def _getstate(self, protocol=3):
return tuple()
| [
"[email protected]"
] | |
96e148bc4a0214e66c46be3fb70e8b07f9f28a1b | 52b2e3470cd4b91975b2e1caed8d1c93c20e5d05 | /tools/misc/dedup.py | b45b5fdcaa724397e39049fcdfd692a60aaaf159 | [] | no_license | xprime480/projects | c2f9a82bbe91e00859568dc27ae17c3b5dd873e3 | 3c5eb2d53bd7fa198edbe27d842ee5b5ff56e226 | refs/heads/master | 2020-04-27T03:51:29.456979 | 2019-04-12T14:34:39 | 2019-04-12T14:34:39 | 174,037,060 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,003 | py | #!/usr/bin/python
class Deduper(object) :
def __init__(self) :
self.lines = {}
self.count = 0
def addLine(self, line) :
self.count += 1
self.lines[line] = self.count
def getLines(self) :
lineListWithIndex = [(index,line) for line,index in self.lines.items()]
lineListWithIndex.sort()
linesSortedByIndex = [line for index,line in lineListWithIndex]
return linesSortedByIndex
class FileDeduper(object) :
def __init__(self, fileName) :
deduper = Deduper()
with open(fileName) as fileHandle :
for line in fileHandle.readlines() :
deduper.addLine(line[:-1])
self.lines = deduper.getLines()
def getLines(self) :
return self.lines
def dedupFile(fileName) :
deduper = FileDeduper(fileName)
for line in deduper.getLines() :
print line
if __name__ == '__main__' :
import sys
for fileName in sys.argv[1:] :
dedupFile(fileName)
| [
"[email protected]"
] | |
481366b4ed79ce490dd3f6c4e8e0913f760fd9bb | b96d4479c86b971a23d20854000aecd6e1f8ce0a | /audit/mixins.py | 1c261b7a7072d808856ef448bae880fab709c7f9 | [] | no_license | dbsiavichay/invenco | 0eb3d74e8403dbed9d4d9459bd25c8ae107368fe | 11e06d1ae694f9ffc158400fc63f4b81f1807875 | refs/heads/master | 2022-11-29T21:08:05.075194 | 2019-07-23T14:06:51 | 2019-07-23T14:06:51 | 92,068,624 | 1 | 0 | null | 2022-11-22T01:15:08 | 2017-05-22T15:22:30 | JavaScript | UTF-8 | Python | false | false | 864 | py | # -*- coding: utf-8 -*-
from django.contrib.contenttypes.models import ContentType
from django.contrib.admin.models import LogEntry, ADDITION, CHANGE, DELETION
from django.utils.encoding import force_unicode
class AuditMixin(object):
def save_log(self, user, message, ACTION):
log = LogEntry.objects.create(
user_id = user.id,
content_type_id = ContentType.objects.get_for_model(self).id,
object_id = self.id,
object_repr = force_unicode(self),
action_flag = ACTION,
change_message = message
)
def save_addition(self, user):
message = '[{"añadidos": {}}]'
self.save_log(user, message, ADDITION)
def save_edition(self, user):
self.save_log(user, '[{"cambiados": {"fields": []}}]', CHANGE)
def save_deletion(self, user):
self.save_log(user, '[{"eliminados": {}}]', DELETION) | [
"[email protected]"
] | |
808d060c64c007cbf5ccbed2a10e6f19c169a93e | ff6248be9573caec94bea0fa2b1e4b6bf0aa682b | /StudentProblem/10.21.9.56/3/1569573341.py | 7861b0d7864d83874341994b8d4a4c9183986b35 | [] | no_license | LennartElbe/codeEvo | 0e41b1a7705204e934ef71a5a28c047366c10f71 | e89b329bc9edd37d5d9986f07ca8a63d50686882 | refs/heads/master | 2020-12-21T17:28:25.150352 | 2020-03-26T10:22:35 | 2020-03-26T10:22:35 | 236,498,032 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 645 | py | import functools
import typing
import string
import random
import pytest
def leap(jahrzahl: int) -> bool:
'return True if the given year is schaltjahr and false if not'
if jahrzahl > 1582:
if jahrzahl % 100 == 0 and jahrzahl% 400 != 0:
return True
else:
return False
else:
print('jahrzahl ist kleiner als 1582')
######################################################################
## Lösung Teil 2 (Tests)
def test_leap():
assert leap(155) == jahrzahl ist kleiner als 1582
assert leap(2000) == False
######################################################################
| [
"[email protected]"
] | |
cc3f3bdd7b7d3c7b6073fc600dab76adaa827007 | 0dd881b86146eff46a99e3100a12addcb5b1bde9 | /No701 Insert into a Binary Search Tree.py | bf808a212bff8626fffb83d0862a468224faf119 | [] | no_license | BaijingML/leetcode | 8b04599ba6f1f9cf12fbb2726f6a1463a42f0a70 | 0ba37ea32ad71d9467f73da6f9e71971911f1d4c | refs/heads/master | 2020-03-22T05:07:17.884441 | 2020-01-10T12:13:54 | 2020-01-10T12:13:54 | 138,399,745 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 628 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def insertIntoBST(self, root: TreeNode, val: int) -> TreeNode:
if not root:
return root
if val < root.val:
if root.left:
self.insertIntoBST(root.left, val)
else:
root.left = TreeNode(val)
else:
if root.right:
self.insertIntoBST(root.right, val)
else:
root.right = TreeNode(val)
return root | [
"[email protected]"
] | |
ab2368e353ecfb086908d5635656a7bd22fb9cbb | 5f67c696967456c063e5f8a0d14cf18cf845ad38 | /archiv/_python/py4inf/xml1.py | e71260c5a8b2930fed02fa39bbc048217dc8cb67 | [] | no_license | wuxi20/Pythonista | 3f2abf8c40fd6554a4d7596982c510e6ba3d6d38 | acf12d264615749f605a0a6b6ea7ab72442e049c | refs/heads/master | 2020-04-02T01:17:39.264328 | 2019-04-16T18:26:59 | 2019-04-16T18:26:59 | 153,848,116 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 281 | py | import xml.etree.ElementTree as ET
data = '''
<person>
<name>Chuck</name>
<phone type="intl">
+1 734 303 4456
</phone>
<email hide="yes"/>
</person>'''
tree = ET.fromstring(data)
print('Name:',tree.find('name').text)
print('Attr:',tree.find('email').get('hide'))
| [
"[email protected]"
] | |
7923e0d4e6524bc8d6329971c1e892fc33f5efa7 | ea99544eef7572b194c2d3607fa7121cb1e45872 | /apps/support/migrations/0003_auto_20190407_1007.py | abf7098fd524718ce25631fdde2aa89a1d5d749a | [] | no_license | ash018/FFTracker | 4ab55d504a9d8ba9e541a8b682bc821f112a0866 | 11be165f85cda0ffe7a237d011de562d3dc64135 | refs/heads/master | 2022-12-02T15:04:58.543382 | 2019-10-05T12:54:27 | 2019-10-05T12:54:27 | 212,999,035 | 0 | 0 | null | 2022-11-22T03:58:29 | 2019-10-05T12:53:26 | Python | UTF-8 | Python | false | false | 470 | py | # Generated by Django 2.2 on 2019-04-07 10:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('support', '0002_customersupport_user'),
]
operations = [
migrations.AlterField(
model_name='customersupport',
name='status',
field=models.PositiveSmallIntegerField(choices=[(2, 'Resolved'), (1, 'In progress'), (0, 'Pending')], default=0),
),
]
| [
"[email protected]"
] | |
301498f89b89d64d5a6b050f084e33d3e27e9569 | 53784d3746eccb6d8fca540be9087a12f3713d1c | /res/packages/scripts/scripts/client/gui/Scaleform/daapi/view/lobby/cyberSport/CyberSportUnitsListView.py | 37f7802efad2cd66a095df4f913fd0d67f3ec9e6 | [] | no_license | webiumsk/WOT-0.9.17.1-CT | 736666d53cbd0da6745b970e90a8bac6ea80813d | d7c3cf340ae40318933e7205bf9a17c7e53bac52 | refs/heads/master | 2021-01-09T06:00:33.898009 | 2017-02-03T21:40:17 | 2017-02-03T21:40:17 | 80,870,824 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 5,519 | py | # 2017.02.03 21:49:44 Střední Evropa (běžný čas)
# Embedded file name: scripts/client/gui/Scaleform/daapi/view/lobby/cyberSport/CyberSportUnitsListView.py
from UnitBase import UNIT_BROWSER_TYPE
from gui.Scaleform.daapi.view.lobby.rally.rally_dps import ManualSearchDataProvider
from gui.Scaleform.daapi.view.meta.CyberSportUnitsListMeta import CyberSportUnitsListMeta
from gui.Scaleform.genConsts.CYBER_SPORT_ALIASES import CYBER_SPORT_ALIASES
from gui.Scaleform.locale.CYBERSPORT import CYBERSPORT
from gui.Scaleform.locale.RES_ICONS import RES_ICONS
from gui.prb_control.settings import REQUEST_TYPE
from gui.shared import events
from gui.shared.view_helpers import CooldownHelper
from helpers import int2roman
from helpers.i18n import makeString as _ms
from gui.shared.formatters import text_styles
class CyberSportUnitsListView(CyberSportUnitsListMeta):
def __init__(self):
super(CyberSportUnitsListView, self).__init__()
self._unitTypeFlags = UNIT_BROWSER_TYPE.ALL
self._cooldown = CooldownHelper(self.getCoolDownRequests(), self._onCooldownHandle, events.CoolDownEvent.PREBATTLE)
self.__currentEmblem = None
return
def getPyDataProvider(self):
return ManualSearchDataProvider()
def getCoolDownRequests(self):
return [REQUEST_TYPE.UNITS_LIST]
def loadPrevious(self):
listReq = self.prbEntity.getBrowser()
if listReq:
listReq.request(req=REQUEST_TYPE.UNITS_NAV_LEFT)
def loadNext(self):
listReq = self.prbEntity.getBrowser()
if listReq:
listReq.request(req=REQUEST_TYPE.UNITS_NAV_RIGHT)
def refreshTeams(self):
listReq = self.prbEntity.getBrowser()
if listReq:
listReq.request(req=REQUEST_TYPE.UNITS_REFRESH)
def getRallyDetails(self, index):
if index != self._searchDP.selectedRallyIndex:
self.__currentEmblem = None
cfdUnitID, vo = self._searchDP.getRally(index)
listReq = self.prbEntity.getBrowser()
if listReq:
listReq.setSelectedID(cfdUnitID)
self.__setDetails(vo)
return
def onPrbEntitySwitching(self):
browser = self.prbEntity.getBrowser()
if browser:
browser.stop()
def _populate(self):
super(CyberSportUnitsListView, self)._populate()
self._cooldown.start()
self.prbEntity.getBrowser().start(self.__onUnitsListUpdated)
self.as_setHeaderS({'title': text_styles.promoTitle(CYBERSPORT.WINDOW_UNITLISTVIEW_TITLE),
'createBtnLabel': CYBERSPORT.WINDOW_UNITLISTVIEW_CREATE_BTN,
'createBtnTooltip': None,
'createBtnEnabled': True,
'columnHeaders': self.__getColumnHeaders()})
return
def _dispose(self):
self._cooldown.stop()
self._cooldown = None
super(CyberSportUnitsListView, self)._dispose()
return
def _onUserActionReceived(self, _, user):
self.__updateView(user)
def _doEnableNavButtons(self, isEnabled):
self.as_updateNavigationBlockS({'previousVisible': True,
'previousEnabled': isEnabled,
'nextVisible': True,
'nextEnabled': isEnabled})
def _onCooldownHandle(self, isInCooldown):
self._doEnableNavButtons(not isInCooldown)
def __getColumnHeaders(self):
return [self.__createHedader('', 82, 'center', RES_ICONS.MAPS_ICONS_STATISTIC_RATING24),
self.__createHedader(CYBERSPORT.WINDOW_UNIT_UNITLISTVIEW_COMMANDER, 152),
self.__createHedader(CYBERSPORT.WINDOW_UNIT_UNITLISTVIEW_DESCRIPTION, 220),
self.__createHedader(CYBERSPORT.WINDOW_UNIT_UNITLISTVIEW_PLAYERS, 76)]
def __createHedader(self, label, buttonWidth, position = 'left', iconSource = None):
return {'label': label,
'buttonWidth': buttonWidth,
'iconSource': iconSource,
'enabled': False,
'textAlign': position}
def __updateVehicleLabel(self):
settings = self.prbEntity.getRosterSettings()
self._updateVehiclesLabel(int2roman(settings.getMinLevel()), int2roman(settings.getMaxLevel()))
def __onUnitsListUpdated(self, selectedID, isFullUpdate, isReqInCoolDown, units):
if isFullUpdate:
selectedIdx = self._searchDP.rebuildList(selectedID, units)
self._doEnableNavButtons(not isReqInCoolDown)
else:
selectedIdx = self._searchDP.updateList(selectedID, units)
if selectedIdx is not None:
self.as_selectByIndexS(selectedIdx)
return
def __setDetails(self, vo):
linkage = CYBER_SPORT_ALIASES.COMMNAD_DETAILS_LINKAGE_JOIN_TO_NONSTATIC
self.as_setDetailsS({'viewLinkage': linkage,
'data': vo})
self.__updateVehicleLabel()
def __refreshDetails(self, idx):
_, vo = self._searchDP.getRally(idx)
self.__setDetails(vo)
def __updateView(self, user):
self._searchDP.updateListItem(user.getID())
self.__refreshDetails(self._searchDP.selectedRallyIndex)
def __recenterList(self):
listReq = self.prbEntity.getBrowser()
if listReq:
listReq.request(req=REQUEST_TYPE.UNITS_RECENTER, unitTypeFlags=self._unitTypeFlags)
# okay decompyling c:\Users\PC\wotsources\files\originals\res\packages\scripts\scripts\client\gui\Scaleform\daapi\view\lobby\cyberSport\CyberSportUnitsListView.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2017.02.03 21:49:44 Střední Evropa (běžný čas)
| [
"[email protected]"
] | |
410054c7d49eb4fa18c33fc8e799b007a006b702 | 873257c67e1bb2756053f88c4f9331d14c00424b | /NGC5257/RotationCurve/Bbarolo/12CO21/test2/run3/pyscript.py | fabc02448b4e0be15da32c400bb493e108e642fd | [] | no_license | heh15/Arp240 | 64058dd9c84653e3a7035e5ee088c55a6b4119e3 | cae7bf59ebaaa9f69d2204a1be522f4c0b76d7f7 | refs/heads/master | 2020-08-27T12:14:36.002105 | 2020-06-19T00:42:34 | 2020-06-19T00:42:34 | 156,797,417 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,701 | py | import numpy as np
import os
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from matplotlib.colorbar import ColorbarBase
from astropy.io import fits
from astropy.visualization import LinearStretch, PowerStretch
from astropy.visualization.mpl_normalize import ImageNormalize
from astropy.visualization import PercentileInterval
matplotlib.rc('xtick',direction='in')
matplotlib.rc('ytick',direction='in')
# PARAMETERS: plotting the fit parameters
gname = 'NGC_5257'
outfolder = './run3/'
file1 = outfolder+'ringlog1.txt'
file2 = outfolder+'ringlog2.txt'
filesb= outfolder+'densprof.txt'
twostage=False
plotmask=False
rad,vrot,disp,inc,pa,z0,xpos,ypos,vsys,vrad = np.genfromtxt(file1,skip_header=1,usecols=(1,2,3,4,5,7,9,10,11,12),unpack=True)
err1_l, err1_h = np.zeros(shape=(10,len(rad))), np.zeros(shape=(10,len(rad)))
color=color2='#B22222'
max_vrot,max_vdisp,max_inc,max_pa=np.max(vrot),np.max(disp),np.max(inc),np.max(pa)
max_z0,max_xpos,max_ypos,max_vsys=np.max(z0),np.max(xpos),np.max(ypos),np.max(vsys)
max_rad = 1.1*np.max(rad)
if twostage:
rad2, vrot2,disp2,inc2,pa2,z02,xpos2,ypos2,vsys2, vrad2 = np.genfromtxt(file2,skip_header=1,usecols=(1,2,3,4,5,7,9,10,11,12),unpack=True)
err2_l, err2_h = np.zeros(shape=(10,len(rad2))), np.zeros(shape=(10,len(rad2)))
color='#A0A0A0'
max_vrot,max_vdisp,max_inc,max_pa=np.maximum(max_vrot,np.max(vrot2)),np.maximum(max_vdisp,np.max(disp2)),np.maximum(max_inc,np.max(inc2)),np.maximum(max_pa,np.max(pa2))
max_z0,max_xpos,max_ypos,max_vsys=np.maximum(max_z0,np.max(z02)),np.maximum(max_xpos,np.max(xpos2)),np.maximum(max_ypos,np.max(ypos2)),np.maximum(max_vsys,np.max(vsys2))
rad_sd, surfdens, sd_err = np.genfromtxt(filesb, usecols=(0,3,4),unpack=True)
# Opening maps and retrieving intensity map units
f0 = fits.open(outfolder+'/maps/NGC_5257_0mom.fits')
f1 = fits.open(outfolder+'/maps/NGC_5257_1mom.fits')
f2 = fits.open(outfolder+'/maps/NGC_5257_2mom.fits')
bunit = f0[0].header['BUNIT']
fig1=plt.figure(figsize=(11.69,8.27), dpi=150)
plt.rc('font',family='sans-serif',serif='Helvetica',size=10)
params = {'text.usetex': False, 'mathtext.fontset': 'cm', 'mathtext.default': 'regular', 'errorbar.capsize': 0}
plt.rcParams.update(params)
fig_ratio = 11.69/8.27
nrows, ncols = 3,3
x_axis_length, y_axis_length = 0.27, 0.13
x_sep, y_sep = 0.07,0.015
ax, bottom_corner = [], [0.1,0.7]
for i in range (nrows):
bottom_corner[0], axcol, ylen = 0.1, [], y_axis_length
if i==0: ylen *= 1.8
for j in range (ncols):
axcol.append(fig1.add_axes([bottom_corner[0],bottom_corner[1],x_axis_length,ylen*fig_ratio]))
bottom_corner[0]+=x_axis_length+x_sep
ax.append(axcol)
bottom_corner[1]-=(y_axis_length+y_sep)*fig_ratio
axis=ax[0][0]
axis.tick_params(axis='both',which='both',bottom='on',top='on',labelbottom='off',labelleft='on')
axis.set_xlim(0,max_rad)
axis.set_ylim(0,1.2*max_vrot)
axis.set_ylabel('v$_\mathrm{rot}$ (km/s)', fontsize=14)
axis.errorbar(rad,vrot, yerr=[err1_l[0],-err1_h[0]],fmt='o', color=color)
if twostage: axis.errorbar(rad2,vrot2, yerr=[err2_l[0],-err2_h[0]],fmt='o', color=color2)
axis=ax[1][0]
axis.set_xlim(0,max_rad)
axis.set_ylabel('i (deg)', fontsize=14)
axis.tick_params(axis='both',which='both',bottom='on',top='on',labelbottom='off',labelleft='on')
axis.errorbar(rad,inc, yerr=[err1_l[4],-err1_h[4]],fmt='o', color=color)
if twostage: axis.errorbar(rad2,inc2,yerr=[err2_l[4],-err2_h[4]], fmt='o-', color=color2)
axis=ax[2][0]
axis.set_xlim(0,max_rad)
axis.set_ylabel('$\phi$ (deg)', fontsize=14)
axis.set_xlabel('Radius (arcsec)', fontsize=14, labelpad=10)
axis.tick_params(axis='both',which='both',bottom='on',top='on',labelbottom='on',labelleft='on')
axis.errorbar(rad,pa, yerr=[err1_l[5],-err1_h[5]],fmt='o', color=color)
if twostage: axis.errorbar(rad2,pa2,yerr=[err2_l[5],-err2_h[5]], fmt='o-', color=color2)
axis=ax[0][1]
axis.set_xlim(0,max_rad)
axis.set_ylim(0,1.2*max_vdisp)
axis.set_ylabel('$\sigma_\mathrm{gas}$ (km/s)', fontsize=14)
axis.tick_params(axis='both',which='both',bottom='on',top='on',labelbottom='off',labelleft='on')
axis.errorbar(rad,disp, yerr=[err1_l[1],-err1_h[1]],fmt='o', color=color)
if twostage: axis.errorbar(rad2,disp2, yerr=[err2_l[1],-err2_h[1]],fmt='o', color=color2)
axis=ax[1][1]
axis.set_xlim(0,max_rad)
axis.set_ylabel('x$_0$ (pix)', fontsize=14)
axis.tick_params(axis='both',which='both',bottom='on',top='on',labelbottom='off',labelleft='on')
axis.errorbar(rad,xpos, yerr=[err1_l[6],-err1_h[6]],fmt='o', color=color)
if twostage: axis.errorbar(rad2,xpos2,yerr=[err2_l[6],-err2_h[6]],fmt='o-', color=color2)
axis=ax[2][1]
axis.set_xlim(0,max_rad)
axis.set_ylabel('y$_0$ (pix)', fontsize=14)
axis.set_xlabel('Radius (arcsec)', fontsize=14, labelpad=10)
axis.tick_params(axis='both',which='both',bottom='on',top='on',labelbottom='on',labelleft='on')
axis.errorbar(rad,ypos, yerr=[err1_l[7],-err1_h[7]],fmt='o', color=color)
if twostage: axis.errorbar(rad2,ypos2, yerr=[err2_l[7],-err2_h[7]],fmt='o-', color=color2)
axis=ax[0][2]
axis.set_xlim(0,max_rad)
axis.set_ylabel('$\Sigma}$ ('+bunit+')', fontsize=14)
axis.tick_params(axis='both',which='both',bottom='on',top='on',labelbottom='off',labelleft='on')
axis.errorbar(rad_sd,surfdens, yerr=sd_err,fmt='o', color=color2)
axis=ax[1][2]
axis.set_xlim(0,max_rad)
axis.set_ylabel('V$_\mathrm{rad}$ (km/s)', fontsize=14)
axis.tick_params(axis='both',which='both',bottom='off',top='on',labelbottom='off',labelleft='on')
axis.errorbar(rad,vrad, yerr=[err1_l[9],-err1_h[9]],fmt='o', color=color)
if twostage==True: axis.errorbar(rad2,vrad2,yerr=[err2_l[9],-err2_h[9]],fmt='o', color=color2)
axis=ax[2][2]
axis.set_xlim(0,max_rad)
axis.set_ylabel('v$_\mathrm{sys}$ (km/s)', fontsize=14)
axis.set_xlabel('Radius (arcsec)', fontsize=14, labelpad=10)
axis.tick_params(axis='both',which='both',bottom='on',top='on',labelbottom='on',labelleft='on')
axis.errorbar(rad,vsys, yerr=[err1_l[8],-err1_h[8]],fmt='o', color=color)
if twostage==True: axis.errorbar(rad2,vsys2,yerr=[err2_l[8],-err2_h[8]],fmt='o', color=color2)
plt.savefig(outfolder+'plot_parameters.pdf', orientation = 'landscape', format = 'pdf',bbox_inches='tight')
# CHANNEL MAPS: Setting all the needed variables
image = fits.open('image/NGC5257_12CO21_pbcor_cube_masked.fits')
image_mas = fits.open(outfolder+'mask.fits')
xmin, xmax = 345, 615
ymin, ymax = 347, 617
zmin, zmax = 0, 69
data = image[0].data[zmin:zmax+1,ymin:ymax+1,xmin:xmax+1]
data_mas = image_mas[0].data[zmin:zmax+1,ymin:ymax+1,xmin:xmax+1]
head = image[0].header
zsize=data[:,0,0].size
cdeltsp=0.1
cont = 0.0182647
v = np.array([1,2,4,8,16,32,64])*cont
v_neg = [-cont]
interval = PercentileInterval(99.5)
vmax = interval.get_limits(data)[1]
norm = ImageNormalize(vmin=cont, vmax=vmax, stretch=PowerStretch(0.5))
files_mod, typ = [], []
for thisFile in os.listdir(outfolder):
if 'mod_azim.fits' in thisFile: files_mod.append(thisFile)
if len(files_mod)==1: typ.append('AZIM')
for thisFile in os.listdir(outfolder):
if 'mod_local.fits' in thisFile: files_mod.append(thisFile)
if len(files_mod)==2: typ.append('LOCAL')
elif (len(files_mod)==1 and len(typ)==0): typ.append('LOCAL')
elif (len(files_mod)==len(typ)==0): exit()
# Beginning channel map plot
xcen, ycen, phi = [np.nanmean(xpos)-xmin,np.nanmean(ypos)-ymin,np.nanmean(pa)]
if twostage==True: xcen, ycen, phi = [np.nanmean(xpos2)-xmin,np.nanmean(ypos2)-ymin,np.nanmean(pa2)]
for k in range (len(files_mod)):
image_mod = fits.open(outfolder+files_mod[k])
data_mod = image_mod[0].data[zmin:zmax+1,ymin:ymax+1,xmin:xmax+1]
plt.figure(figsize=(8.27, 11.69), dpi=100)
grid = [gridspec.GridSpec(2,5),gridspec.GridSpec(2,5),gridspec.GridSpec(2,5)]
grid[0].update(top=0.90, bottom=0.645, left=0.05, right=0.95, wspace=0.0, hspace=0.0)
grid[1].update(top=0.60, bottom=0.345, left=0.05, right=0.95, wspace=0.0, hspace=0.0)
grid[2].update(top=0.30, bottom=0.045, left=0.05, right=0.95, wspace=0.0, hspace=0.0)
matplotlib.rcParams['contour.negative_linestyle'] = 'solid'
num = 0
for j in range (0,3):
for i in range (0,5):
chan = int(num*(zsize)/15)
z = data[chan,:,:]
z_mod = data_mod[chan,:,:]
#New matplotlib draws wrong contours when no contours are found. This is a workaround.
if np.all(z_mod<v[0]): z_mod[:,:] =0
velo_kms = (chan+1-1)*10.0016+-299.85
velo = ' v = ' + str(int(velo_kms)) + ' km/s'
ax = plt.subplot(grid[j][0,i])
ax.tick_params(axis='both',which='both',bottom='on',top='on',labelbottom='off',labelleft='off')
ax.set_title(velo, fontsize=10,loc='left')
ax.imshow(z,origin='lower',cmap = matplotlib.cm.Greys,norm=norm,aspect='auto',interpolation='none')
ax.contour(z,v,origin='lower',linewidths=0.7,colors='#00008B')
ax.contour(z,v_neg,origin='lower',linewidths=0.1,colors='gray')
ax.plot(xcen,ycen,'x',color='#0FB05A',markersize=7,mew=2)
if plotmask:
ax.contour(data_mas[chan],[1],origin='lower',linewidths=2,colors='k')
if (j==i==0):
ax.text(0, 1.4, gname, transform=ax.transAxes,fontsize=15,va='center')
lbar = 0.5*(xmax-xmin)*cdeltsp
ltex = "%.0f'' "%lbar if lbar>10 else "%.2f'' "%lbar
if lbar>600: ltex = "%.0f' "%(lbar/60.)
ax.annotate('', xy=(4.5, 1.4), xycoords='axes fraction', xytext=(5, 1.4),arrowprops=dict(arrowstyle='<->', color='k'))
ax.text(4.75,1.50,ltex,transform=ax.transAxes,fontsize=11, ha='center')
bmaj, bmin, bpa = 10.1879/float(xmax-xmin), 5.21832/float(ymax-ymin),115.549
beam = matplotlib.patches.Ellipse((3.5, 1.4), bmaj, bmin, bpa, color='#5605D0', clip_on=False, transform=ax.transAxes, alpha=0.2)
ax.add_artist(beam)
ax.text(3.6+bmaj/1.8,1.4,'Beam',transform=ax.transAxes,fontsize=11, ha='left',va='center')
ax = plt.subplot(grid[j][1,i])
ax.tick_params(axis='both',which='both',bottom='on',top='on',labelbottom='off',labelleft='off')
ax.imshow(z_mod,origin='lower',cmap = matplotlib.cm.Greys,norm=norm,aspect='auto',interpolation='none')
ax.contour(z_mod,v,origin='lower',linewidths=0.7,colors='#B22222')
ax.plot(xcen,ycen,'x',color='#0FB05A',markersize=7,mew=2)
if (i==0 and j==2):
clab = 'Contour levels at 2$^n \, c_{min}$, where $c_{min}$ = %s beam-1 Jy and n = 0,1,..,8 '%cont
ax.text(0.01,-0.16,clab,transform=ax.transAxes,fontsize=11, ha='left',va='center')
num = num+1
outfile = 'plot_chanmaps.pdf'
if (typ[k]=='AZIM'): outfile = 'plot_chanmaps_azim.pdf'
if (typ[k]=='LOCAL'): outfile = 'plot_chanmaps_local.pdf'
plt.savefig(outfolder+outfile, orientation = 'portrait', format = 'pdf')
image_mod.close()
image.close()
# Now plotting the position-velocity diagrams
files_pva_mod, files_pvb_mod = [], []
for thisFile in os.listdir(outfolder+'pvs/'):
if 'pv_a_azim.fits' in thisFile: files_pva_mod.append(thisFile)
if 'pv_b_azim.fits' in thisFile: files_pvb_mod.append(thisFile)
for thisFile in os.listdir(outfolder+'pvs/'):
if 'pv_a_local.fits' in thisFile: files_pva_mod.append(thisFile)
if 'pv_b_local.fits' in thisFile: files_pvb_mod.append(thisFile)
image_maj = fits.open(outfolder+'pvs/'+gname+'_pv_a.fits')
image_min = fits.open(outfolder+'pvs/'+gname+'_pv_b.fits')
image_mas_maj = fits.open(outfolder+'pvs/'+gname+'mask_pv_a.fits')
image_mas_min = fits.open(outfolder+'pvs/'+gname+'mask_pv_b.fits')
head = [image_maj[0].header,image_min[0].header]
crpixpv = np.array([head[0]['CRPIX1'],head[1]['CRPIX1']])
cdeltpv = np.array([head[0]['CDELT1'],head[1]['CDELT1']])
crvalpv = np.array([head[0]['CRVAL1'],head[1]['CRVAL1']])
xminpv, xmaxpv = np.floor(crpixpv-1-135), np.ceil(crpixpv-1 +135)
if xminpv[0]<0: xminpv[0]=0
if xminpv[1]<0: xminpv[1]=0
if xmaxpv[0]>=head[0]['NAXIS1']: xmaxpv[0]=head[0]['NAXIS1']-1
if xmaxpv[1]>=head[1]['NAXIS1']: xmaxpv[1]=head[1]['NAXIS1']-1
data_maj = image_maj[0].data[zmin:zmax+1,int(xminpv[0]):int(xmaxpv[0])+1]
data_min = image_min[0].data[zmin:zmax+1,int(xminpv[1]):int(xmaxpv[1])+1]
data_mas_maj = image_mas_maj[0].data[zmin:zmax+1,int(xminpv[0]):int(xmaxpv[0])+1]
data_mas_min = image_mas_min[0].data[zmin:zmax+1,int(xminpv[1]):int(xmaxpv[1])+1]
xmin_wcs = ((xminpv+1-crpixpv)*cdeltpv+crvalpv)*3600
xmax_wcs = ((xmaxpv+1-crpixpv)*cdeltpv+crvalpv)*3600
zmin_wcs, zmax_wcs = -299.85, 390.254
radius = np.concatenate((rad,-rad))
vrotation, inclin, vsystem, posang = vrot, inc, vsys, pa
if twostage==True:
radius, vrotation, inclin, vsystem, posang = np.concatenate((rad2,-rad2)), vrot2, inc2, vsys2, pa2
vlos1 = vrotation*np.sin(np.deg2rad(inclin))+vsystem
vlos2 = vsystem-vrotation*np.sin(np.deg2rad(inclin))
reverse = True
if reverse==True: vlos1, vlos2 = vlos2, vlos1
vlos = np.concatenate((vlos1,vlos2))
vsysmean, pamean = np.nanmean(vsystem), np.nanmean(posang)
ext = [[xmin_wcs[0],xmax_wcs[0],zmin_wcs-vsysmean,zmax_wcs-vsysmean],\
[xmin_wcs[1],xmax_wcs[1],zmin_wcs-vsysmean,zmax_wcs-vsysmean]]
labsize = 15
palab = ['$\phi = $107$^\circ$', '$\phi = $197$^\circ$']
# Beginning PV plot
for k in range (len(files_mod)):
image_mod_maj = fits.open(outfolder+'pvs/'+files_pva_mod[k])
image_mod_min = fits.open(outfolder+'pvs/'+files_pvb_mod[k])
data_mod_maj = image_mod_maj[0].data[zmin:zmax+1,int(xminpv[0]):int(xmaxpv[0])+1]
data_mod_min = image_mod_min[0].data[zmin:zmax+1,int(xminpv[1]):int(xmaxpv[1])+1]
toplot = [[data_maj,data_min],[data_mod_maj,data_mod_min],[data_mas_maj,data_mas_min]]
fig=plt.figure(figsize=(11.69,8.27), dpi=150)
fig_ratio = 11.69/8.27
x_len, y_len, y_sep = 0.6, 0.42, 0.08
ax, bottom_corner = [], [0.1,0.7]
for i in range (2):
bottom_corner[0], axcol = 0.1, []
ax.append(fig.add_axes([bottom_corner[0],bottom_corner[1],x_len,y_len*fig_ratio]))
bottom_corner[1]-=(y_len+y_sep)*fig_ratio
for i in range (2):
axis = ax[i]
axis.tick_params(which='major',length=8, labelsize=labsize)
axis.set_xlabel('Offset (arcsec)',fontsize=labsize+2)
axis.set_ylabel('$\mathrm{\Delta V_{LOS}}$ (km/s)',fontsize=labsize+2)
axis.text(1, 1.02,palab[i],ha='right',transform=axis.transAxes,fontsize=labsize+4)
axis2 = axis.twinx()
axis2.set_xlim([ext[i][0],ext[i][1]])
axis2.set_ylim([ext[i][2]+vsysmean,ext[i][3]+vsysmean])
axis2.tick_params(which='major',length=8, labelsize=labsize)
axis2.set_ylabel('$\mathrm{V_{LOS}}$ (km/s)',fontsize=labsize+2)
axis.imshow(toplot[0][i],origin='lower',cmap = matplotlib.cm.Greys,norm=norm,extent=ext[i],aspect='auto')
axis.contour(toplot[0][i],v,origin='lower',linewidths=0.7,colors='#00008B',extent=ext[i])
axis.contour(toplot[0][i],v_neg,origin='lower',linewidths=0.1,colors='gray',extent=ext[i])
axis.contour(toplot[1][i],v,origin='lower',linewidths=1,colors='#B22222',extent=ext[i])
axis.axhline(y=0,color='black')
axis.axvline(x=0,color='black')
if plotmask:
axis.contour(toplot[2][i],[1],origin='lower',linewidths=2,colors='k',extent=ext[i])
if i==0 :
axis2.plot(radius,vlos,'yo')
axis.text(0, 1.1, gname, transform=axis.transAxes,fontsize=22)
outfile = 'plot_pv.pdf'
if (typ[k]=='AZIM'): outfile = 'plot_pv_azim.pdf'
if (typ[k]=='LOCAL'): outfile = 'plot_pv_local.pdf'
plt.savefig(outfolder+outfile, bbox_inches='tight')
image_mod_maj.close()
image_mod_min.close()
image_maj.close()
image_min.close()
# Now plotting moment maps
mom0 = f0[0].data[ymin:ymax+1,xmin:xmax+1]
mom1 = f1[0].data[ymin:ymax+1,xmin:xmax+1]
mom2 = f2[0].data[ymin:ymax+1,xmin:xmax+1]
files_mod0, files_mod1, files_mod2 = [], [], []
for thisFile in os.listdir(outfolder+'/maps/'):
if 'azim_0mom.fits' in thisFile: files_mod0.append(thisFile)
if 'azim_1mom.fits' in thisFile: files_mod1.append(thisFile)
if 'azim_2mom.fits' in thisFile: files_mod2.append(thisFile)
for thisFile in os.listdir(outfolder+'maps/'):
if 'local_0mom.fits' in thisFile: files_mod0.append(thisFile)
if 'local_1mom.fits' in thisFile: files_mod1.append(thisFile)
if 'local_2mom.fits' in thisFile: files_mod2.append(thisFile)
norm0 = ImageNormalize(vmin=interval.get_limits(mom0)[0],vmax=interval.get_limits(mom0)[1], stretch=LinearStretch())
norm1 = ImageNormalize(vmin=interval.get_limits(mom1)[0],vmax=interval.get_limits(mom1)[1], stretch=LinearStretch())
norm2 = ImageNormalize(vmin=interval.get_limits(mom2)[0],vmax=interval.get_limits(mom2)[1], stretch=LinearStretch())
norm = [norm0, norm1, norm2]
cmaps = [matplotlib.cm.jet,matplotlib.cm.jet,matplotlib.cm.jet]
barlab = ['Intensity ('+bunit+')', 'V$_\mathrm{LOS}$ (km/s)', '$\sigma$ (km/s)']
titles = ['DATA', 'MODEL']
mapname = ['INTENSITY', 'VELOCITY', 'DISPERSION']
x = np.arange(0,xmax-xmin,0.1)
y = np.tan(np.radians(phi-90))*(x-xcen)+ycen
ext = [0,xmax-xmin,0, ymax-ymin]
for k in range (len(files_mod0)):
mom0_mod = fits.open(outfolder+'/maps/'+files_mod0[k])[0].data[ymin:ymax+1,xmin:xmax+1]
mom1_mod = fits.open(outfolder+'/maps/'+files_mod1[k])[0].data[ymin:ymax+1,xmin:xmax+1]
mom2_mod = fits.open(outfolder+'/maps/'+files_mod2[k])[0].data[ymin:ymax+1,xmin:xmax+1]
to_plot = [[mom0,mom1,mom2],[mom0_mod,mom1_mod,mom2_mod]]
fig=plt.figure(figsize=(11.69,8.27), dpi=150)
fig_ratio = 11.69/8.27
nrows, ncols = 3, 2
x_len, y_len = 0.2, 0.2
x_sep, y_sep = 0.00,0.02
ax, ax_cb, bottom_corner = [], [], [0.1,0.7]
for i in range (nrows):
bottom_corner[0], axcol = 0.1, []
for j in range (ncols):
axcol.append(fig.add_axes([bottom_corner[0],bottom_corner[1],x_len,y_len*fig_ratio]))
bottom_corner[0]+=x_len+x_sep
ax.append(axcol)
ax_cb.append(fig.add_axes([bottom_corner[0]+0.01,bottom_corner[1],0.02,y_len*fig_ratio]))
bottom_corner[1]-=(y_len+y_sep)*fig_ratio
for i in range (nrows):
for j in range (ncols):
axis = ax[i][j]
axis.tick_params(labelbottom='off',labelleft='off')
axis.set_xlim(ext[0],ext[1])
axis.set_ylim(ext[2],ext[3])
axis.imshow(to_plot[j][i],origin='lower',cmap=cmaps[i],norm=norm[i],aspect='auto',extent=ext)
axis.plot(xcen,ycen,'x',color='#000000',markersize=7,mew=1.5)
cb = ColorbarBase(ax_cb[i], orientation='vertical', cmap=cmaps[i], norm=norm[i])
cb.solids.set_edgecolor('face')
cb.set_label(barlab[i],fontsize=13)
if i==0: axis.text(0.5,1.05,titles[j],ha='center',transform=axis.transAxes,fontsize=15)
elif i==1: axis.plot(x,y,color='#808080',linewidth=2)
if j==0: axis.text(-0.1,0.5,mapname[i],va='center',rotation=90,transform=axis.transAxes,fontsize=15)
if (typ[k]=='AZIM'): outfile = 'plot_maps_azim.pdf'
if (typ[k]=='LOCAL'): outfile = 'plot_maps_local.pdf'
plt.savefig(outfolder+outfile, bbox_inches = 'tight')
| [
"[email protected]"
] | |
f00fe9f24b7f590fb1a7de3a9fae4f9da18bf2ff | 21e1d00c48c1732cc44af077572299831b93ffc2 | /1000_PROBLEMS/SimplePythonPrograms/Problem-13.py | 414ab5620f03a8d4311ccd9c1a6e407e2cce8267 | [] | no_license | GolamRabbani20/PYTHON-A2Z | 7be72041407e4417359b3a610ced0919f3939993 | 7c89223f253aa559fa15caacb89c68e0b78ff915 | refs/heads/master | 2023-05-09T00:43:03.012963 | 2021-05-26T07:56:56 | 2021-05-26T07:56:56 | 317,953,879 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 173 | py | #Python Program to Count the Number of Digits in a Numbe
x=int(input("Enter a number:"))
s=0
while x>=1:
rem=x%10
s+=1
x=x//10
print("Total number of digits:",s) | [
"[email protected]"
] | |
a2dce625a2f9f2cd0e30411bdbfc7ec2277792d5 | 601b8aa76cc86c159c2736107d0779e31a2a7c56 | /datacube/utils/__init__.py | c3cf7fd4d9f2b1a4b74d75f432eb6ed7d1a339a6 | [
"Apache-2.0"
] | permissive | PhilipeRLeal/datacube-core | 531b7156b777fa4b631b6af163f65473055a58d9 | 81bed714f2e5cb30a2492f1b0cf3397b79141c3a | refs/heads/develop | 2022-12-13T20:36:52.188166 | 2019-10-16T01:08:03 | 2019-10-23T02:45:40 | 217,332,524 | 2 | 0 | Apache-2.0 | 2022-12-08T01:08:59 | 2019-10-24T15:29:47 | null | UTF-8 | Python | false | false | 1,712 | py | """
Utility functions
"""
from .dates import datetime_to_seconds_since_1970, parse_time
from .py import cached_property, ignore_exceptions_if, import_function
from .serialise import jsonify_document
from .uris import is_url, uri_to_local_path, get_part_from_uri, mk_part_uri
from .io import slurp, check_write_path, write_user_secret_file
from .documents import (
InvalidDocException,
SimpleDocNav,
DocReader,
is_supported_document_type,
read_strings_from_netcdf,
read_documents,
validate_document,
NoDatesSafeLoader,
get_doc_offset,
get_doc_offset_safe,
netcdf_extract_string,
without_lineage_sources,
schema_validated,
_readable_offset,
)
from .math import (
unsqueeze_data_array,
iter_slices,
unsqueeze_dataset,
data_resolution_and_offset,
)
from ._misc import (
DatacubeException,
gen_password,
)
__all__ = (
"datetime_to_seconds_since_1970",
"parse_time",
"cached_property",
"ignore_exceptions_if",
"import_function",
"jsonify_document",
"is_url",
"uri_to_local_path",
"get_part_from_uri",
"mk_part_uri",
"InvalidDocException",
"SimpleDocNav",
"DocReader",
"is_supported_document_type",
"read_strings_from_netcdf",
"read_documents",
"validate_document",
"NoDatesSafeLoader",
"get_doc_offset",
"get_doc_offset_safe",
"netcdf_extract_string",
"without_lineage_sources",
"unsqueeze_data_array",
"iter_slices",
"unsqueeze_dataset",
"data_resolution_and_offset",
"DatacubeException",
"schema_validated",
"write_user_secret_file",
"slurp",
"check_write_path",
"gen_password",
"_readable_offset",
)
| [
"37929162+mergify[bot]@users.noreply.github.com"
] | 37929162+mergify[bot]@users.noreply.github.com |
4d00a92665d503a391ba2c6d9695fc807d185ad4 | a97fb0584709e292a475defc8506eeb85bb24339 | /source code/code/ch1712.py | 6fe76d7f548707cc2b35f37252628da0f72d23fc | [] | no_license | AAQ6291/PYCATCH | bd297858051042613739819ed70c535901569079 | 27ec4094be785810074be8b16ef84c85048065b5 | refs/heads/master | 2020-03-26T13:54:57.051016 | 2018-08-17T09:05:19 | 2018-08-17T09:05:19 | 144,963,014 | 0 | 0 | null | null | null | null | BIG5 | Python | false | false | 1,116 | py | #!/usr/bin/env python
# -*- coding: cp950 -*-
# 載入wx模組
import wx
class myApp(wx.App):
def OnInit(self):
frame = myFrame()
frame.Show()
return True
# 定義myFrame並繼承wx.Frame類別
class myFrame(wx.Frame):
def __init__(self):
wx.Frame.__init__(
self,
None,
-1,
'Up/Down按鈕',
size=(300, 150))
# 建立panel
panel = wx.Panel(self, -1)
# 建立up/down按鈕
spinctrl = wx.SpinCtrl(
panel,
-1,
pos=(10, 20),
size=(60, -1))
# 設定最小值與最大值
spinctrl.SetRange(0, 100)
# 設定一開始的值
spinctrl.SetValue(10)
# 建立up/down按鈕
spinctrl1 = wx.SpinCtrl(
panel,
id = -1,
value = wx.EmptyString,
pos = (10, 50),
size = wx.DefaultSize,
style = wx.SP_ARROW_KEYS|wx.SP_WRAP,
min = 0,
max = 100,
initial = 0,
name = "mySpinCtrl")
def main():
app = myApp()
app.MainLoop()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
a4b4cf7d907ae028a1c2e6372fe13bc2ba30a25d | 6a58240cdfcacec18fbfc2a08d75288092cc6da1 | /data/HASOC/utils.py | 2c38470bf232fc56e074adaf1ac1d1e25942c2f5 | [] | no_license | airKlizz/germeval2021toxic | 132ae9de11bb85c79acbff3a756f8608e32a385a | 1be57a15509a76b1551c871e73619241499257fe | refs/heads/main | 2023-08-18T04:25:42.387320 | 2021-09-14T12:10:18 | 2021-09-14T12:10:18 | 369,182,623 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 567 | py | import pandas as pd
DATA_FILES = [
"data/HASOC/english_dataset.tsv",
"data/HASOC/german_dataset.tsv",
"data/HASOC/hindi_dataset.tsv",
]
df = pd.concat([pd.read_csv(DATA_FILE, sep="\t") for DATA_FILE in DATA_FILES])
print(df)
TEXT_COLUMN = "text"
LABEL_COLUMN = "task_1"
texts = df[TEXT_COLUMN].values.tolist()
labels = [0 if l.replace(" ", "").lower() == "not" else 1 for l in df[LABEL_COLUMN].values.tolist()]
data = list(zip(texts, labels))
df = pd.DataFrame(data=data, columns=["comment_text", "hf"])
print(df)
df.to_csv("data/HASOC/train.csv")
| [
"[email protected]"
] | |
fbee8c798cd7d44f148b6dfc4cb1800c034eff07 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_sobriety.py | 5a609ca90c82239903ee659ab33783e0970f8b7f | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 328 | py |
#calss header
class _SOBRIETY():
def __init__(self,):
self.name = "SOBRIETY"
self.definitions = [u'the state of being sober: ', u'seriousness: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
] | |
29577dd61fc0b2ab4e46b2f21f32670e2dc1af00 | 1bc7053e7582e43bdd4b943c5700677e07449d3c | /pytsite/tpl/_error.py | 4bb04363d32ffdb9ef93b8c458e31aa365c0ac61 | [
"MIT"
] | permissive | pytsite/pytsite | f92eaa041d85b245fbfcdff44224b24da5d9b73a | e4896722709607bda88b4a69400dcde4bf7e5f0a | refs/heads/master | 2021-01-18T01:06:12.357397 | 2019-08-03T02:56:48 | 2019-08-03T02:56:48 | 34,899,242 | 12 | 2 | null | null | null | null | UTF-8 | Python | false | false | 208 | py | """PytSite Templates Support Errors
"""
__author__ = 'Oleksandr Shepetko'
__email__ = '[email protected]'
__license__ = 'MIT'
import jinja2 as _jinja
class TemplateNotFound(_jinja.TemplateNotFound):
pass
| [
"[email protected]"
] | |
cf5162cac66fc297d23a734d6bb7a8848f53e50b | 9e780f17eb49171d1f234944563225ca22b3c286 | /postgresqleu/confsponsor/management/commands/sponsor_generate_discount_invoices.py | ab716d6862d691c73452bd021d108291aeca7354 | [
"MIT"
] | permissive | pgeu/pgeu-system | e5216d5e90eec6c72770b88a5af4b3fd565cda59 | 885cfdcdadd4a721f72b699a39f26c94d1f636e0 | refs/heads/master | 2023-08-06T13:03:55.606562 | 2023-08-03T12:47:37 | 2023-08-03T12:47:37 | 161,434,221 | 15 | 27 | MIT | 2023-05-30T11:21:24 | 2018-12-12T04:48:14 | Python | UTF-8 | Python | false | false | 6,001 | py | # Generate invoices for discount codes. That is, sponsors that have ordered discount codes,
# that have now either expired or been used fully.
#
from django.core.management.base import BaseCommand
from django.utils import timezone
from django.db import transaction
from django.conf import settings
from datetime import timedelta, time
from django.db.models import Q, F, Count
from postgresqleu.confreg.models import DiscountCode
from postgresqleu.confreg.util import send_conference_mail
from postgresqleu.confsponsor.util import send_conference_sponsor_notification, send_sponsor_manager_email
from postgresqleu.invoices.util import InvoiceManager, InvoiceWrapper
from postgresqleu.util.time import today_global
class Command(BaseCommand):
help = 'Generate invoices for discount codes'
class ScheduledJob:
scheduled_times = [time(5, 19), ]
internal = True
@classmethod
def should_run(self):
return DiscountCode.objects.filter(sponsor__isnull=False, is_invoiced=False).exists()
@transaction.atomic
def handle(self, *args, **options):
# We're always going to process all conferences, since most will not have any
# open discount codes.
filt = Q(sponsor__isnull=False, is_invoiced=False) & (Q(validuntil__lte=today_global()) | Q(num_uses__gte=F('maxuses')))
codes = DiscountCode.objects.annotate(num_uses=Count('registrations')).filter(filt)
for code in codes:
# Either the code has expired, or it is fully used by now. Time to generate the invoice. We'll also
# send an email to the sponsor (and the admins) to inform them of what's happening.
# The invoice will be a one-off one, we don't need a registered manager for it since the
# discounts have already been given out.
if code.count == 0:
# In case there is not a single user, we just notify the user of this and set it to
# invoiced in the system so we don't try again.
code.is_invoiced = True
code.save()
send_conference_sponsor_notification(
code.conference,
"[{0}] Discount code expired".format(code.conference),
"Discount code {0} has expired without any uses.".format(code.code),
)
send_sponsor_manager_email(
code.sponsor,
"Discount code {0} expired".format(code.code),
'confsponsor/mail/discount_expired.txt',
{
'code': code,
'sponsor': code.sponsor,
'conference': code.conference,
},
)
else:
# At least one use, so we generate the invoice
invoicerows = []
for r in code.registrations.all():
if code.discountamount:
# Fixed amount discount. Always apply
discountvalue = code.discountamount
else:
# Percentage discount, so we need to calculate it. Ordered discount codes will
# only support a registration-only style discount code, so only count it
# against that.
discountvalue = r.regtype.cost * code.discountpercentage / 100
invoicerows.append(['Attendee "{0}"'.format(r.fullname), 1, discountvalue, r.conference.vat_registrations])
# All invoices are always due immediately
manager = InvoiceManager()
code.invoice = manager.create_invoice(
code.sponsor_rep,
code.sponsor_rep.email,
"{0} {1}".format(code.sponsor_rep.first_name, code.sponsor_rep.last_name),
'%s\n%s' % (code.sponsor.name, code.sponsor.invoiceaddr),
'{0} discount code {1}'.format(code.conference, code.code),
timezone.now(),
timezone.now() + timedelta(days=1),
invoicerows,
accounting_account=settings.ACCOUNTING_CONFREG_ACCOUNT,
accounting_object=code.conference.accounting_object,
paymentmethods=code.conference.paymentmethods.all(),
)
code.invoice.save()
code.is_invoiced = True
code.save()
wrapper = InvoiceWrapper(code.invoice)
wrapper.email_invoice()
# Now also fire off emails, both to the admins and to all the managers of the sponsor
# (so they know where the invoice was sent).
send_conference_sponsor_notification(
code.conference,
"[{0}] Discount code {1} has been invoiced".format(code.conference, code.code),
"The discount code {0} has been closed,\nand an invoice has been sent to {1}.\n\nA total of {2} registrations used this code, and the total amount was {3}.\n".format(
code.code,
code.sponsor,
len(invoicerows),
code.invoice.total_amount,
),
)
send_sponsor_manager_email(
code.sponsor,
"Discount code {0} has been invoiced".format(code.code),
'confsponsor/mail/discount_invoiced.txt',
{
'code': code,
'conference': code.conference,
'sponsor': code.sponsor,
'invoice': code.invoice,
'curr': settings.CURRENCY_ABBREV,
'expired_time': code.validuntil < today_global(),
},
)
| [
"[email protected]"
] | |
4a9de25ca11e00c67d838a2589d66d6418e577d4 | 900b8dbfbd8a9a7899b3da3a0b24c03721b1ac49 | /daopilot/daophot.py | 280b7078aad4aa18760038ee3bb987b5c8993b98 | [
"BSD-3-Clause"
] | permissive | jonathansick/daopilot | e9735956b43428fe38db0656b06e246546e09efc | d1757f3df61b715606d2027bea0d71c85b8fab07 | refs/heads/master | 2021-01-10T18:40:17.811411 | 2014-05-30T23:39:30 | 2014-05-30T23:39:30 | 6,196,022 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 15,474 | py | #!/usr/bin/env python
# encoding: utf-8
"""
Class wrapper to daophot.
2012-05-05 - Created by Jonathan Sick
"""
import os
import sys
import pexpect
class Daophot(object):
"""Object-oriented interface to drive daophot.
:param inputImagePath: is the path to the FITS image that will be measured.
This is a real (filesystem) path. All paths should be supplied, and
will be returned to the user as filesystem paths. The class internally
converts these into shortened (symlinked) paths.
:type inputImagePath: str
:param shell: name of the shell that `daophot` will run in
:type shell: str (optional)
:param cmd: name of the `daophot` executable
:type shell: str (optional)
"""
def __init__(self, inputImagePath, shell="/bin/zsh", cmd="daophot"):
super(Daophot, self).__init__()
self.inputImagePath = inputImagePath
self.cmd = cmd
self.shell = shell
self._workDir = os.path.dirname(self.inputImagePath)
# a pexpect process running daophot, None when shutdown
self._daophot = None
# Cache for paths; two levels of dictionaries. First level is keyed
# to the types of files (represented by file extension strings). Second
# level is keyed by the path names themselves
self._pathCache = {'fits': {},
'coo': {}, 'lst': {}, 'ap': {}, 'psf': {}, 'nei': {}}
self._pathCache['fits']['input_image'] \
= os.path.basename(self.inputImagePath)
self._pathCache['fits']['last'] \
= os.path.basename(self.inputImagePath)
self._startup()
def _startup(self):
"""Start a daophot session and attaches the inputImagePath's image.
Automatically called by :meth:`__init__`.
"""
# We start daophot from the working directory (the directory of the
# input image.) All output will be placed in this directory. From
# the user's perspective, the returned paths will still be relative
# to the pipeline's base directory.
startupCommand = '/bin/tcsh -c "cd %s;daophot"' % self._workDir
self._daophot = pexpect.spawn(startupCommand)
self._daophot.logfile = sys.stdout # DEBUG
self._daophot.expect("Command:")
# print self._daophot.before
self.set_option("WA", "-2") # turn off extraneous printing
self.attach('input_image')
def shutdown(self):
"""Shutdown the daophot process."""
self._daophot.sendline("exit")
self._daophot = None
def set_option(self, name, value):
"""Set the named option in daophot to a given value."""
self._daophot.sendline("OPTION")
self._daophot.expect(":") # asks for the file with parameter values
self._daophot.sendline("") # accept the defaults
self._daophot.expect("OPT>")
self._daophot.sendline("%s=%s" % (name, value))
self._daophot.expect("OPT>")
self._daophot.sendline("")
self._daophot.expect("Command:")
print self._daophot.before
def attach(self, image):
"""Attaches the given image to daophot. *image* will be resolved
either as a name in the imageCache, or as a path. (Runs daophot
*ATTACH*)
By default, the attached image will be the last one attached (or the
inputImagePath on the first run). But if *image* is specified, then
it will be resolved in two steps
1. If a name in the imageCache, that path will be used
2. If not in the imageCache, then it will be used as a path itself
"""
imagePath = self._resolve_path(image, 'fits')
self._set_last_path(imagePath, 'fits')
command = "ATTACH %s" % imagePath
self._daophot.sendline(command)
self._daophot.expect("Command:")
def find(self, nAvg=1, nSum=1, cooName=None, cooPath=None):
"""Runs the *FIND* command on the previously attached image.
:param cooName: Set to have the coordinate path cached under this name.
:type cooName: str (optional)
:param cooPath: Set as the filepath for the output coordinate file,
otherwise a default path is made.
:type cooPath: str (optional)
"""
cooPath = self._make_output_path(cooPath, cooName, "coo")
self._name_path(cooName, cooPath, 'coo')
self._set_last_path(cooPath, 'coo')
self._daophot.sendline("FIND")
# asks 'Number of frames averaged, summed:'
self._daophot.expect(":")
self._daophot.sendline("%i,%i" % (nAvg, nSum))
# asks 'File for positions (default ???.coo):'
self._daophot.expect(":")
self._daophot.sendline(cooPath)
self._daophot.expect("Are you happy with this?", timeout=60 * 20)
# print self._daophot.before
self._daophot.sendline("Y")
self._daophot.expect("Command:")
def apphot(self, coordinates, apRadPath=None, photOutputPath=None,
photOutputName=None, options=None):
"""Run aperture photometry routine *PHOTOMETRY* in daophot.
:param coordinates: refers to coordinates of stars from the find
method; it is a string to be resolved either into a name in the
path cache, a filepath itself.
:type coordinates: str
:param apRadPath: is path to the aperture radii options file. This file
must be in the working diretory. If None, then the default of
'photo.opt' is assumed.
:type apRadPath: str (optional)
:param photOutputPath: Set as the filepath of output .ap file.
:param photOutputName: Set to have .ap path cached.
:param options: Sequence of `(optionName, optionValue)` pairs (both str
values) passed to the PHOTOMETRY sub routine.
"""
self._daophot.sendline("PHOTOMETRY")
# asks for 'File with aperture radii (default photo.opt)'
self._daophot.expect(":")
if apRadPath is not None:
self._daophot.sendline(os.path.basename(apRadPath))
else:
self._daophot.sendline("") # assume default photo.opt file
self._daophot.expect("PHO>")
if options is not None:
for optionName, optionValue in options.iteritems():
self._daophot.sendline(optionName + "=" + optionValue)
self._daophot.expect("PHO>")
self._daophot.sendline("")
# asks 'Input position file (default source/sky28k.coo):'
self._daophot.expect(":")
cooPath = self._resolve_path(coordinates, 'coo')
self._daophot.sendline(cooPath)
# asks 'Output file (default source/sky28k.ap):'
self._daophot.expect(":")
photOutputPath = self._make_output_path(photOutputPath,
photOutputName, "ap")
self._name_path(photOutputName, photOutputPath, 'ap')
self._set_last_path(photOutputPath, 'ap')
self._daophot.sendline(photOutputPath)
self._daophot.expect("Command:", timeout=60 * 20)
def pick_psf_stars(self, nStars, apPhot, starListPath=None,
starListName=None, magLimit=99):
"""Picks *nStars* number of stars from the aperture photometry list
that will be used as prototypes for making a PSF model;
runs daophot *PICK*.
:param apPhot: points to the aperture photometry list (made by
apphot()). It is resolved into a name in apCache or a filepath to
the .ap file
:param nStars: is the number of stars to select, can be a str or int.
:param starListPath: and starListName and the path/name that may
be specified for the .lst file that lists the prototype psf stars.
:param magLimit: is the limiting instrumental magnitude that can be
used as a PSF prototype. Can be a str object.
"""
magLimit = str(magLimit)
nStars = str(int(nStars))
apPhotPath = self._resolve_path(apPhot, 'ap')
starListPath = self._make_output_path(starListPath,
starListName, 'lst')
self._name_path(starListName, starListPath, 'lst')
self._set_last_path(starListPath, 'lst')
self._daophot.sendline("PICK")
# ask for input file name to .ap file
self._daophot.expect(":")
self._daophot.sendline(apPhotPath)
# asks for 'Desired number of stars, faintest magnitude:'
self._daophot.expect(":")
self._daophot.sendline(",".join((nStars, magLimit)))
# asks for output file path, .lst
self._daophot.expect(":")
# TODO implement output filepath
self._daophot.sendline("")
self._daophot.expect("Command:", timeout=60 * 10)
def make_psf(self, apPhot, starList, psfPath=None, psfName=None):
"""Computes a PSF model with the daophot *PSF* command.
:param apPhot: points to the aperture photometry list (made by
apphot()). It is resolved into a name in apCache or a filepath
to the .ap file
:param starList: points to the psf prototype star list.
:return: text output of fitting routine, path to the psf file and path
to the neighbours file
"""
apPhotPath = self._resolve_path(apPhot, 'ap')
starListPath = self._resolve_path(starList, 'lst')
psfPath = self._make_output_path(psfPath, psfName, 'psf')
self._name_path(psfName, psfPath, 'psf')
self._set_last_path(psfPath, 'psf')
# make with the neighbours file name (.nei).
# It always follows this form:
fileRoot = os.path.splitext(psfPath)[0]
neiPath = ".".join((fileRoot, 'nei'))
self._set_last_path(neiPath, 'nei')
if os.path.exists(neiPath):
os.remove(neiPath)
self._daophot.sendline("PSF")
# asks for file with aperture phot results
self._daophot.expect(":")
self._daophot.sendline(apPhotPath)
# asks for file with PSF prototype star list
self._daophot.expect(":")
self._daophot.sendline(starListPath)
# asks for file for the psf output file
self._daophot.expect(":")
self._daophot.sendline(psfPath)
# funny hack; pexpect has trouble here, but works
# self._daophot.expect(".nei", timeout=120)
# send a CR to make sure we're clean before leaving
# self._daophot.sendline("")
result = self._daophot.expect(["nei", "Failed to converge.",
"Command:"], timeout=60 * 10)
# save daophot's output of fit quality
fittingText = self._daophot.before
if result == 1 or result == 2:
# failed to converge
print "didn't converge. now what?"
# raise PSFNotConverged
return None, None, None
# otherwise we should have good convergence
print result,
print "Ok convergence?"
self._daophot.sendline("")
self._daophot.expect("Command:")
return fittingText, os.path.join(self._workDir, psfPath), \
os.path.join(self._workDir, neiPath)
def substar(self, substarList, psf, outputPath, keepers=None):
"""Subtracts stars in `substarList` from the attached image using the
`psf` model.
:param substarList: is a **path* to a photometry file of all stars
that should be subtracted out of the image.
:type substarList: str
:param psf: is a name/path resolved into a path to a PSF model.
:param outputPath: is a **path** where the star-subtracted FITS image
will be placed. Any existing file will be deleted.
:param keepers: is a **path** (not a resolvable file name) to a
listing stars that should be kept in the subtracted image.
If `None`, then no stars are kept.
:return: outputPath, relative to the pipeline.
"""
psfPath = self._resolve_path(psf, 'psf')
if os.path.exists(outputPath):
os.remove(outputPath)
self._daophot.sendline("SUBSTAR")
self._daophot.expect(":") # File with the PSF (*)
self._daophot.sendline(os.path.basename(psfPath))
self._daophot.expect(":") # File with photometry (*)
self._daophot.sendline(os.path.basename(substarList))
# print "send substarList"
# print self._daophot.interact()
self._daophot.expect("in\?") # Do you have stars to leave in
if keepers is not None:
self._daophot.sendline("Y")
# print self._daophot.before
self._daophot.expect(":") # File with star list (*)
self._daophot.sendline(os.path.basename(keepers))
else:
self._daophot.sendline("N")
self._daophot.expect(":") # Name for subtracted image (*)
self._daophot.sendline(os.path.basename(outputPath))
self._daophot.expect("Command:", timeout=60 * 10)
return outputPath
def get_path(self, name, ext):
"""Returns the named path of type ext. The path will be relative
to the pipeline's base... as the user would expect."""
return os.path.join(self._workDir, self._resolve_path(name, ext))
def _resolve_path(self, path, ext):
"""Resolves path into a path to the given type (via ext extension)
of file if it is name. Or if it is a path already, that
path will be passed through. The returned path is relative to the
workDir (working directory) of this Daophot.
"""
print path,
print ext
try:
resolvedPath = self._pathCache[ext][path]
except:
print "This is a path"
print path
resolvedPath = os.path.basename(path)
return resolvedPath
def _name_path(self, name, path, ext):
"""Adds the path of type ext(ention) to its cache under given name,
if the name is not None.
"""
if name is not None:
self._pathCache[ext][name] = path
def _set_last_path(self, path, ext):
"""Makes the path be filed under 'last' in its type's cache."""
self._pathCache[ext]['last'] = path
def _make_output_path(self, path, name, ext):
"""Forms an output file path. If path is None, then a path is made
using the name. If both path and name are None, then a path is formed
from the inputImagePath and the filename extension *ext*.
The path is force to be relative to `workDir`.
"""
if path is None:
# make a default ap photometry output file path
fileRoot = os.path.splitext(
os.path.basename(self.inputImagePath))[0]
if name is not None:
fileRoot = "_".join((fileRoot, name))
path = ".".join((fileRoot, ext))
else:
path = os.path.basename(path)
fullpath = os.path.join(self._workDir, path)
if os.path.exists(fullpath):
print "removing existing %s" % fullpath
os.remove(fullpath)
return path
| [
"[email protected]"
] | |
3fd932279804d8d778f6c54d1e6481c85aea76df | 9cd180fc7594eb018c41f0bf0b54548741fd33ba | /sdk/python/pulumi_azure_nextgen/containerservice/latest/outputs.py | 98aae238eabc64dcdf8cf187997aef870b01307b | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | MisinformedDNA/pulumi-azure-nextgen | c71971359450d03f13a53645171f621e200fe82d | f0022686b655c2b0744a9f47915aadaa183eed3b | refs/heads/master | 2022-12-17T22:27:37.916546 | 2020-09-28T16:03:59 | 2020-09-28T16:03:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 98,356 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
__all__ = [
'AgentPoolUpgradeSettingsResponse',
'ContainerServiceAgentPoolProfileResponse',
'ContainerServiceCustomProfileResponse',
'ContainerServiceDiagnosticsProfileResponse',
'ContainerServiceLinuxProfileResponse',
'ContainerServiceMasterProfileResponse',
'ContainerServiceNetworkProfileResponse',
'ContainerServiceOrchestratorProfileResponse',
'ContainerServiceServicePrincipalProfileResponse',
'ContainerServiceSshConfigurationResponse',
'ContainerServiceSshPublicKeyResponse',
'ContainerServiceVMDiagnosticsResponse',
'ContainerServiceWindowsProfileResponse',
'CredentialResultResponseResult',
'KeyVaultSecretRefResponse',
'ManagedClusterAADProfileResponse',
'ManagedClusterAPIServerAccessProfileResponse',
'ManagedClusterAddonProfileResponse',
'ManagedClusterAddonProfileResponseIdentity',
'ManagedClusterAgentPoolProfileResponse',
'ManagedClusterIdentityResponse',
'ManagedClusterIdentityResponseUserAssignedIdentities',
'ManagedClusterLoadBalancerProfileResponse',
'ManagedClusterLoadBalancerProfileResponseManagedOutboundIPs',
'ManagedClusterLoadBalancerProfileResponseOutboundIPPrefixes',
'ManagedClusterLoadBalancerProfileResponseOutboundIPs',
'ManagedClusterPropertiesResponseAutoScalerProfile',
'ManagedClusterPropertiesResponseIdentityProfile',
'ManagedClusterSKUResponse',
'ManagedClusterServicePrincipalProfileResponse',
'ManagedClusterWindowsProfileResponse',
'NetworkProfileResponse',
'OpenShiftManagedClusterAADIdentityProviderResponse',
'OpenShiftManagedClusterAgentPoolProfileResponse',
'OpenShiftManagedClusterAuthProfileResponse',
'OpenShiftManagedClusterIdentityProviderResponse',
'OpenShiftManagedClusterMasterPoolProfileResponse',
'OpenShiftRouterProfileResponse',
'PowerStateResponse',
'PrivateEndpointResponse',
'PrivateLinkServiceConnectionStateResponse',
'PurchasePlanResponse',
'ResourceReferenceResponse',
]
@pulumi.output_type
class AgentPoolUpgradeSettingsResponse(dict):
"""
Settings for upgrading an agentpool
"""
def __init__(__self__, *,
max_surge: Optional[str] = None):
"""
Settings for upgrading an agentpool
:param str max_surge: Count or percentage of additional nodes to be added during upgrade. If empty uses AKS default
"""
if max_surge is not None:
pulumi.set(__self__, "max_surge", max_surge)
@property
@pulumi.getter(name="maxSurge")
def max_surge(self) -> Optional[str]:
"""
Count or percentage of additional nodes to be added during upgrade. If empty uses AKS default
"""
return pulumi.get(self, "max_surge")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ContainerServiceAgentPoolProfileResponse(dict):
"""
Profile for the container service agent pool.
"""
def __init__(__self__, *,
fqdn: str,
name: str,
vm_size: str,
count: Optional[int] = None,
dns_prefix: Optional[str] = None,
os_disk_size_gb: Optional[int] = None,
os_type: Optional[str] = None,
ports: Optional[Sequence[int]] = None,
storage_profile: Optional[str] = None,
vnet_subnet_id: Optional[str] = None):
"""
Profile for the container service agent pool.
:param str fqdn: FQDN for the agent pool.
:param str name: Unique name of the agent pool profile in the context of the subscription and resource group.
:param str vm_size: Size of agent VMs.
:param int count: Number of agents (VMs) to host docker containers. Allowed values must be in the range of 1 to 100 (inclusive). The default value is 1.
:param str dns_prefix: DNS prefix to be used to create the FQDN for the agent pool.
:param int os_disk_size_gb: OS Disk Size in GB to be used to specify the disk size for every machine in this master/agent pool. If you specify 0, it will apply the default osDisk size according to the vmSize specified.
:param str os_type: OsType to be used to specify os type. Choose from Linux and Windows. Default to Linux.
:param Sequence[int] ports: Ports number array used to expose on this agent pool. The default opened ports are different based on your choice of orchestrator.
:param str storage_profile: Storage profile specifies what kind of storage used. Choose from StorageAccount and ManagedDisks. Leave it empty, we will choose for you based on the orchestrator choice.
:param str vnet_subnet_id: VNet SubnetID specifies the VNet's subnet identifier.
"""
pulumi.set(__self__, "fqdn", fqdn)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "vm_size", vm_size)
if count is not None:
pulumi.set(__self__, "count", count)
if dns_prefix is not None:
pulumi.set(__self__, "dns_prefix", dns_prefix)
if os_disk_size_gb is not None:
pulumi.set(__self__, "os_disk_size_gb", os_disk_size_gb)
if os_type is not None:
pulumi.set(__self__, "os_type", os_type)
if ports is not None:
pulumi.set(__self__, "ports", ports)
if storage_profile is not None:
pulumi.set(__self__, "storage_profile", storage_profile)
if vnet_subnet_id is not None:
pulumi.set(__self__, "vnet_subnet_id", vnet_subnet_id)
@property
@pulumi.getter
def fqdn(self) -> str:
"""
FQDN for the agent pool.
"""
return pulumi.get(self, "fqdn")
@property
@pulumi.getter
def name(self) -> str:
"""
Unique name of the agent pool profile in the context of the subscription and resource group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="vmSize")
def vm_size(self) -> str:
"""
Size of agent VMs.
"""
return pulumi.get(self, "vm_size")
@property
@pulumi.getter
def count(self) -> Optional[int]:
"""
Number of agents (VMs) to host docker containers. Allowed values must be in the range of 1 to 100 (inclusive). The default value is 1.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter(name="dnsPrefix")
def dns_prefix(self) -> Optional[str]:
"""
DNS prefix to be used to create the FQDN for the agent pool.
"""
return pulumi.get(self, "dns_prefix")
@property
@pulumi.getter(name="osDiskSizeGB")
def os_disk_size_gb(self) -> Optional[int]:
"""
OS Disk Size in GB to be used to specify the disk size for every machine in this master/agent pool. If you specify 0, it will apply the default osDisk size according to the vmSize specified.
"""
return pulumi.get(self, "os_disk_size_gb")
@property
@pulumi.getter(name="osType")
def os_type(self) -> Optional[str]:
"""
OsType to be used to specify os type. Choose from Linux and Windows. Default to Linux.
"""
return pulumi.get(self, "os_type")
@property
@pulumi.getter
def ports(self) -> Optional[Sequence[int]]:
"""
Ports number array used to expose on this agent pool. The default opened ports are different based on your choice of orchestrator.
"""
return pulumi.get(self, "ports")
@property
@pulumi.getter(name="storageProfile")
def storage_profile(self) -> Optional[str]:
"""
Storage profile specifies what kind of storage used. Choose from StorageAccount and ManagedDisks. Leave it empty, we will choose for you based on the orchestrator choice.
"""
return pulumi.get(self, "storage_profile")
@property
@pulumi.getter(name="vnetSubnetID")
def vnet_subnet_id(self) -> Optional[str]:
"""
VNet SubnetID specifies the VNet's subnet identifier.
"""
return pulumi.get(self, "vnet_subnet_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ContainerServiceCustomProfileResponse(dict):
"""
Properties to configure a custom container service cluster.
"""
def __init__(__self__, *,
orchestrator: str):
"""
Properties to configure a custom container service cluster.
:param str orchestrator: The name of the custom orchestrator to use.
"""
pulumi.set(__self__, "orchestrator", orchestrator)
@property
@pulumi.getter
def orchestrator(self) -> str:
"""
The name of the custom orchestrator to use.
"""
return pulumi.get(self, "orchestrator")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ContainerServiceDiagnosticsProfileResponse(dict):
"""
Profile for diagnostics on the container service cluster.
"""
def __init__(__self__, *,
vm_diagnostics: 'outputs.ContainerServiceVMDiagnosticsResponse'):
"""
Profile for diagnostics on the container service cluster.
:param 'ContainerServiceVMDiagnosticsResponseArgs' vm_diagnostics: Profile for diagnostics on the container service VMs.
"""
pulumi.set(__self__, "vm_diagnostics", vm_diagnostics)
@property
@pulumi.getter(name="vmDiagnostics")
def vm_diagnostics(self) -> 'outputs.ContainerServiceVMDiagnosticsResponse':
"""
Profile for diagnostics on the container service VMs.
"""
return pulumi.get(self, "vm_diagnostics")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ContainerServiceLinuxProfileResponse(dict):
"""
Profile for Linux VMs in the container service cluster.
"""
def __init__(__self__, *,
admin_username: str,
ssh: 'outputs.ContainerServiceSshConfigurationResponse'):
"""
Profile for Linux VMs in the container service cluster.
:param str admin_username: The administrator username to use for Linux VMs.
:param 'ContainerServiceSshConfigurationResponseArgs' ssh: SSH configuration for Linux-based VMs running on Azure.
"""
pulumi.set(__self__, "admin_username", admin_username)
pulumi.set(__self__, "ssh", ssh)
@property
@pulumi.getter(name="adminUsername")
def admin_username(self) -> str:
"""
The administrator username to use for Linux VMs.
"""
return pulumi.get(self, "admin_username")
@property
@pulumi.getter
def ssh(self) -> 'outputs.ContainerServiceSshConfigurationResponse':
"""
SSH configuration for Linux-based VMs running on Azure.
"""
return pulumi.get(self, "ssh")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ContainerServiceMasterProfileResponse(dict):
"""
Profile for the container service master.
"""
def __init__(__self__, *,
dns_prefix: str,
fqdn: str,
vm_size: str,
count: Optional[int] = None,
first_consecutive_static_ip: Optional[str] = None,
os_disk_size_gb: Optional[int] = None,
storage_profile: Optional[str] = None,
vnet_subnet_id: Optional[str] = None):
"""
Profile for the container service master.
:param str dns_prefix: DNS prefix to be used to create the FQDN for the master pool.
:param str fqdn: FQDN for the master pool.
:param str vm_size: Size of agent VMs.
:param int count: Number of masters (VMs) in the container service cluster. Allowed values are 1, 3, and 5. The default value is 1.
:param str first_consecutive_static_ip: FirstConsecutiveStaticIP used to specify the first static ip of masters.
:param int os_disk_size_gb: OS Disk Size in GB to be used to specify the disk size for every machine in this master/agent pool. If you specify 0, it will apply the default osDisk size according to the vmSize specified.
:param str storage_profile: Storage profile specifies what kind of storage used. Choose from StorageAccount and ManagedDisks. Leave it empty, we will choose for you based on the orchestrator choice.
:param str vnet_subnet_id: VNet SubnetID specifies the VNet's subnet identifier.
"""
pulumi.set(__self__, "dns_prefix", dns_prefix)
pulumi.set(__self__, "fqdn", fqdn)
pulumi.set(__self__, "vm_size", vm_size)
if count is not None:
pulumi.set(__self__, "count", count)
if first_consecutive_static_ip is not None:
pulumi.set(__self__, "first_consecutive_static_ip", first_consecutive_static_ip)
if os_disk_size_gb is not None:
pulumi.set(__self__, "os_disk_size_gb", os_disk_size_gb)
if storage_profile is not None:
pulumi.set(__self__, "storage_profile", storage_profile)
if vnet_subnet_id is not None:
pulumi.set(__self__, "vnet_subnet_id", vnet_subnet_id)
@property
@pulumi.getter(name="dnsPrefix")
def dns_prefix(self) -> str:
"""
DNS prefix to be used to create the FQDN for the master pool.
"""
return pulumi.get(self, "dns_prefix")
@property
@pulumi.getter
def fqdn(self) -> str:
"""
FQDN for the master pool.
"""
return pulumi.get(self, "fqdn")
@property
@pulumi.getter(name="vmSize")
def vm_size(self) -> str:
"""
Size of agent VMs.
"""
return pulumi.get(self, "vm_size")
@property
@pulumi.getter
def count(self) -> Optional[int]:
"""
Number of masters (VMs) in the container service cluster. Allowed values are 1, 3, and 5. The default value is 1.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter(name="firstConsecutiveStaticIP")
def first_consecutive_static_ip(self) -> Optional[str]:
"""
FirstConsecutiveStaticIP used to specify the first static ip of masters.
"""
return pulumi.get(self, "first_consecutive_static_ip")
@property
@pulumi.getter(name="osDiskSizeGB")
def os_disk_size_gb(self) -> Optional[int]:
"""
OS Disk Size in GB to be used to specify the disk size for every machine in this master/agent pool. If you specify 0, it will apply the default osDisk size according to the vmSize specified.
"""
return pulumi.get(self, "os_disk_size_gb")
@property
@pulumi.getter(name="storageProfile")
def storage_profile(self) -> Optional[str]:
"""
Storage profile specifies what kind of storage used. Choose from StorageAccount and ManagedDisks. Leave it empty, we will choose for you based on the orchestrator choice.
"""
return pulumi.get(self, "storage_profile")
@property
@pulumi.getter(name="vnetSubnetID")
def vnet_subnet_id(self) -> Optional[str]:
"""
VNet SubnetID specifies the VNet's subnet identifier.
"""
return pulumi.get(self, "vnet_subnet_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ContainerServiceNetworkProfileResponse(dict):
"""
Profile of network configuration.
"""
def __init__(__self__, *,
dns_service_ip: Optional[str] = None,
docker_bridge_cidr: Optional[str] = None,
load_balancer_profile: Optional['outputs.ManagedClusterLoadBalancerProfileResponse'] = None,
load_balancer_sku: Optional[str] = None,
network_mode: Optional[str] = None,
network_plugin: Optional[str] = None,
network_policy: Optional[str] = None,
outbound_type: Optional[str] = None,
pod_cidr: Optional[str] = None,
service_cidr: Optional[str] = None):
"""
Profile of network configuration.
:param str dns_service_ip: An IP address assigned to the Kubernetes DNS service. It must be within the Kubernetes service address range specified in serviceCidr.
:param str docker_bridge_cidr: A CIDR notation IP range assigned to the Docker bridge network. It must not overlap with any Subnet IP ranges or the Kubernetes service address range.
:param 'ManagedClusterLoadBalancerProfileResponseArgs' load_balancer_profile: Profile of the cluster load balancer.
:param str load_balancer_sku: The load balancer sku for the managed cluster.
:param str network_mode: Network mode used for building Kubernetes network.
:param str network_plugin: Network plugin used for building Kubernetes network.
:param str network_policy: Network policy used for building Kubernetes network.
:param str outbound_type: The outbound (egress) routing method.
:param str pod_cidr: A CIDR notation IP range from which to assign pod IPs when kubenet is used.
:param str service_cidr: A CIDR notation IP range from which to assign service cluster IPs. It must not overlap with any Subnet IP ranges.
"""
if dns_service_ip is not None:
pulumi.set(__self__, "dns_service_ip", dns_service_ip)
if docker_bridge_cidr is not None:
pulumi.set(__self__, "docker_bridge_cidr", docker_bridge_cidr)
if load_balancer_profile is not None:
pulumi.set(__self__, "load_balancer_profile", load_balancer_profile)
if load_balancer_sku is not None:
pulumi.set(__self__, "load_balancer_sku", load_balancer_sku)
if network_mode is not None:
pulumi.set(__self__, "network_mode", network_mode)
if network_plugin is not None:
pulumi.set(__self__, "network_plugin", network_plugin)
if network_policy is not None:
pulumi.set(__self__, "network_policy", network_policy)
if outbound_type is not None:
pulumi.set(__self__, "outbound_type", outbound_type)
if pod_cidr is not None:
pulumi.set(__self__, "pod_cidr", pod_cidr)
if service_cidr is not None:
pulumi.set(__self__, "service_cidr", service_cidr)
@property
@pulumi.getter(name="dnsServiceIP")
def dns_service_ip(self) -> Optional[str]:
"""
An IP address assigned to the Kubernetes DNS service. It must be within the Kubernetes service address range specified in serviceCidr.
"""
return pulumi.get(self, "dns_service_ip")
@property
@pulumi.getter(name="dockerBridgeCidr")
def docker_bridge_cidr(self) -> Optional[str]:
"""
A CIDR notation IP range assigned to the Docker bridge network. It must not overlap with any Subnet IP ranges or the Kubernetes service address range.
"""
return pulumi.get(self, "docker_bridge_cidr")
@property
@pulumi.getter(name="loadBalancerProfile")
def load_balancer_profile(self) -> Optional['outputs.ManagedClusterLoadBalancerProfileResponse']:
"""
Profile of the cluster load balancer.
"""
return pulumi.get(self, "load_balancer_profile")
@property
@pulumi.getter(name="loadBalancerSku")
def load_balancer_sku(self) -> Optional[str]:
"""
The load balancer sku for the managed cluster.
"""
return pulumi.get(self, "load_balancer_sku")
@property
@pulumi.getter(name="networkMode")
def network_mode(self) -> Optional[str]:
"""
Network mode used for building Kubernetes network.
"""
return pulumi.get(self, "network_mode")
@property
@pulumi.getter(name="networkPlugin")
def network_plugin(self) -> Optional[str]:
"""
Network plugin used for building Kubernetes network.
"""
return pulumi.get(self, "network_plugin")
@property
@pulumi.getter(name="networkPolicy")
def network_policy(self) -> Optional[str]:
"""
Network policy used for building Kubernetes network.
"""
return pulumi.get(self, "network_policy")
@property
@pulumi.getter(name="outboundType")
def outbound_type(self) -> Optional[str]:
"""
The outbound (egress) routing method.
"""
return pulumi.get(self, "outbound_type")
@property
@pulumi.getter(name="podCidr")
def pod_cidr(self) -> Optional[str]:
"""
A CIDR notation IP range from which to assign pod IPs when kubenet is used.
"""
return pulumi.get(self, "pod_cidr")
@property
@pulumi.getter(name="serviceCidr")
def service_cidr(self) -> Optional[str]:
"""
A CIDR notation IP range from which to assign service cluster IPs. It must not overlap with any Subnet IP ranges.
"""
return pulumi.get(self, "service_cidr")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ContainerServiceOrchestratorProfileResponse(dict):
"""
Profile for the container service orchestrator.
"""
def __init__(__self__, *,
orchestrator_type: str,
orchestrator_version: Optional[str] = None):
"""
Profile for the container service orchestrator.
:param str orchestrator_type: The orchestrator to use to manage container service cluster resources. Valid values are Kubernetes, Swarm, DCOS, DockerCE and Custom.
:param str orchestrator_version: The version of the orchestrator to use. You can specify the major.minor.patch part of the actual version.For example, you can specify version as "1.6.11".
"""
pulumi.set(__self__, "orchestrator_type", orchestrator_type)
if orchestrator_version is not None:
pulumi.set(__self__, "orchestrator_version", orchestrator_version)
@property
@pulumi.getter(name="orchestratorType")
def orchestrator_type(self) -> str:
"""
The orchestrator to use to manage container service cluster resources. Valid values are Kubernetes, Swarm, DCOS, DockerCE and Custom.
"""
return pulumi.get(self, "orchestrator_type")
@property
@pulumi.getter(name="orchestratorVersion")
def orchestrator_version(self) -> Optional[str]:
"""
The version of the orchestrator to use. You can specify the major.minor.patch part of the actual version.For example, you can specify version as "1.6.11".
"""
return pulumi.get(self, "orchestrator_version")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ContainerServiceServicePrincipalProfileResponse(dict):
"""
Information about a service principal identity for the cluster to use for manipulating Azure APIs. Either secret or keyVaultSecretRef must be specified.
"""
def __init__(__self__, *,
client_id: str,
key_vault_secret_ref: Optional['outputs.KeyVaultSecretRefResponse'] = None,
secret: Optional[str] = None):
"""
Information about a service principal identity for the cluster to use for manipulating Azure APIs. Either secret or keyVaultSecretRef must be specified.
:param str client_id: The ID for the service principal.
:param 'KeyVaultSecretRefResponseArgs' key_vault_secret_ref: Reference to a secret stored in Azure Key Vault.
:param str secret: The secret password associated with the service principal in plain text.
"""
pulumi.set(__self__, "client_id", client_id)
if key_vault_secret_ref is not None:
pulumi.set(__self__, "key_vault_secret_ref", key_vault_secret_ref)
if secret is not None:
pulumi.set(__self__, "secret", secret)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> str:
"""
The ID for the service principal.
"""
return pulumi.get(self, "client_id")
@property
@pulumi.getter(name="keyVaultSecretRef")
def key_vault_secret_ref(self) -> Optional['outputs.KeyVaultSecretRefResponse']:
"""
Reference to a secret stored in Azure Key Vault.
"""
return pulumi.get(self, "key_vault_secret_ref")
@property
@pulumi.getter
def secret(self) -> Optional[str]:
"""
The secret password associated with the service principal in plain text.
"""
return pulumi.get(self, "secret")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ContainerServiceSshConfigurationResponse(dict):
"""
SSH configuration for Linux-based VMs running on Azure.
"""
def __init__(__self__, *,
public_keys: Sequence['outputs.ContainerServiceSshPublicKeyResponse']):
"""
SSH configuration for Linux-based VMs running on Azure.
:param Sequence['ContainerServiceSshPublicKeyResponseArgs'] public_keys: The list of SSH public keys used to authenticate with Linux-based VMs. Only expect one key specified.
"""
pulumi.set(__self__, "public_keys", public_keys)
@property
@pulumi.getter(name="publicKeys")
def public_keys(self) -> Sequence['outputs.ContainerServiceSshPublicKeyResponse']:
"""
The list of SSH public keys used to authenticate with Linux-based VMs. Only expect one key specified.
"""
return pulumi.get(self, "public_keys")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ContainerServiceSshPublicKeyResponse(dict):
"""
Contains information about SSH certificate public key data.
"""
def __init__(__self__, *,
key_data: str):
"""
Contains information about SSH certificate public key data.
:param str key_data: Certificate public key used to authenticate with VMs through SSH. The certificate must be in PEM format with or without headers.
"""
pulumi.set(__self__, "key_data", key_data)
@property
@pulumi.getter(name="keyData")
def key_data(self) -> str:
"""
Certificate public key used to authenticate with VMs through SSH. The certificate must be in PEM format with or without headers.
"""
return pulumi.get(self, "key_data")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ContainerServiceVMDiagnosticsResponse(dict):
"""
Profile for diagnostics on the container service VMs.
"""
def __init__(__self__, *,
enabled: bool,
storage_uri: str):
"""
Profile for diagnostics on the container service VMs.
:param bool enabled: Whether the VM diagnostic agent is provisioned on the VM.
:param str storage_uri: The URI of the storage account where diagnostics are stored.
"""
pulumi.set(__self__, "enabled", enabled)
pulumi.set(__self__, "storage_uri", storage_uri)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
Whether the VM diagnostic agent is provisioned on the VM.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter(name="storageUri")
def storage_uri(self) -> str:
"""
The URI of the storage account where diagnostics are stored.
"""
return pulumi.get(self, "storage_uri")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ContainerServiceWindowsProfileResponse(dict):
"""
Profile for Windows VMs in the container service cluster.
"""
def __init__(__self__, *,
admin_password: str,
admin_username: str):
"""
Profile for Windows VMs in the container service cluster.
:param str admin_password: The administrator password to use for Windows VMs.
:param str admin_username: The administrator username to use for Windows VMs.
"""
pulumi.set(__self__, "admin_password", admin_password)
pulumi.set(__self__, "admin_username", admin_username)
@property
@pulumi.getter(name="adminPassword")
def admin_password(self) -> str:
"""
The administrator password to use for Windows VMs.
"""
return pulumi.get(self, "admin_password")
@property
@pulumi.getter(name="adminUsername")
def admin_username(self) -> str:
"""
The administrator username to use for Windows VMs.
"""
return pulumi.get(self, "admin_username")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class CredentialResultResponseResult(dict):
"""
The credential result response.
"""
def __init__(__self__, *,
name: str,
value: str):
"""
The credential result response.
:param str name: The name of the credential.
:param str value: Base64-encoded Kubernetes configuration file.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the credential.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def value(self) -> str:
"""
Base64-encoded Kubernetes configuration file.
"""
return pulumi.get(self, "value")
@pulumi.output_type
class KeyVaultSecretRefResponse(dict):
"""
Reference to a secret stored in Azure Key Vault.
"""
def __init__(__self__, *,
secret_name: str,
vault_id: str,
version: Optional[str] = None):
"""
Reference to a secret stored in Azure Key Vault.
:param str secret_name: The secret name.
:param str vault_id: Key vault identifier.
:param str version: The secret version.
"""
pulumi.set(__self__, "secret_name", secret_name)
pulumi.set(__self__, "vault_id", vault_id)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter(name="secretName")
def secret_name(self) -> str:
"""
The secret name.
"""
return pulumi.get(self, "secret_name")
@property
@pulumi.getter(name="vaultID")
def vault_id(self) -> str:
"""
Key vault identifier.
"""
return pulumi.get(self, "vault_id")
@property
@pulumi.getter
def version(self) -> Optional[str]:
"""
The secret version.
"""
return pulumi.get(self, "version")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedClusterAADProfileResponse(dict):
"""
AADProfile specifies attributes for Azure Active Directory integration.
"""
def __init__(__self__, *,
admin_group_object_ids: Optional[Sequence[str]] = None,
client_app_id: Optional[str] = None,
enable_azure_rbac: Optional[bool] = None,
managed: Optional[bool] = None,
server_app_id: Optional[str] = None,
server_app_secret: Optional[str] = None,
tenant_id: Optional[str] = None):
"""
AADProfile specifies attributes for Azure Active Directory integration.
:param Sequence[str] admin_group_object_ids: AAD group object IDs that will have admin role of the cluster.
:param str client_app_id: The client AAD application ID.
:param bool enable_azure_rbac: Whether to enable Azure RBAC for Kubernetes authorization.
:param bool managed: Whether to enable managed AAD.
:param str server_app_id: The server AAD application ID.
:param str server_app_secret: The server AAD application secret.
:param str tenant_id: The AAD tenant ID to use for authentication. If not specified, will use the tenant of the deployment subscription.
"""
if admin_group_object_ids is not None:
pulumi.set(__self__, "admin_group_object_ids", admin_group_object_ids)
if client_app_id is not None:
pulumi.set(__self__, "client_app_id", client_app_id)
if enable_azure_rbac is not None:
pulumi.set(__self__, "enable_azure_rbac", enable_azure_rbac)
if managed is not None:
pulumi.set(__self__, "managed", managed)
if server_app_id is not None:
pulumi.set(__self__, "server_app_id", server_app_id)
if server_app_secret is not None:
pulumi.set(__self__, "server_app_secret", server_app_secret)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
@property
@pulumi.getter(name="adminGroupObjectIDs")
def admin_group_object_ids(self) -> Optional[Sequence[str]]:
"""
AAD group object IDs that will have admin role of the cluster.
"""
return pulumi.get(self, "admin_group_object_ids")
@property
@pulumi.getter(name="clientAppID")
def client_app_id(self) -> Optional[str]:
"""
The client AAD application ID.
"""
return pulumi.get(self, "client_app_id")
@property
@pulumi.getter(name="enableAzureRBAC")
def enable_azure_rbac(self) -> Optional[bool]:
"""
Whether to enable Azure RBAC for Kubernetes authorization.
"""
return pulumi.get(self, "enable_azure_rbac")
@property
@pulumi.getter
def managed(self) -> Optional[bool]:
"""
Whether to enable managed AAD.
"""
return pulumi.get(self, "managed")
@property
@pulumi.getter(name="serverAppID")
def server_app_id(self) -> Optional[str]:
"""
The server AAD application ID.
"""
return pulumi.get(self, "server_app_id")
@property
@pulumi.getter(name="serverAppSecret")
def server_app_secret(self) -> Optional[str]:
"""
The server AAD application secret.
"""
return pulumi.get(self, "server_app_secret")
@property
@pulumi.getter(name="tenantID")
def tenant_id(self) -> Optional[str]:
"""
The AAD tenant ID to use for authentication. If not specified, will use the tenant of the deployment subscription.
"""
return pulumi.get(self, "tenant_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedClusterAPIServerAccessProfileResponse(dict):
"""
Access profile for managed cluster API server.
"""
def __init__(__self__, *,
authorized_ip_ranges: Optional[Sequence[str]] = None,
enable_private_cluster: Optional[bool] = None):
"""
Access profile for managed cluster API server.
:param Sequence[str] authorized_ip_ranges: Authorized IP Ranges to kubernetes API server.
:param bool enable_private_cluster: Whether to create the cluster as a private cluster or not.
"""
if authorized_ip_ranges is not None:
pulumi.set(__self__, "authorized_ip_ranges", authorized_ip_ranges)
if enable_private_cluster is not None:
pulumi.set(__self__, "enable_private_cluster", enable_private_cluster)
@property
@pulumi.getter(name="authorizedIPRanges")
def authorized_ip_ranges(self) -> Optional[Sequence[str]]:
"""
Authorized IP Ranges to kubernetes API server.
"""
return pulumi.get(self, "authorized_ip_ranges")
@property
@pulumi.getter(name="enablePrivateCluster")
def enable_private_cluster(self) -> Optional[bool]:
"""
Whether to create the cluster as a private cluster or not.
"""
return pulumi.get(self, "enable_private_cluster")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedClusterAddonProfileResponse(dict):
"""
A Kubernetes add-on profile for a managed cluster.
"""
def __init__(__self__, *,
enabled: bool,
identity: 'outputs.ManagedClusterAddonProfileResponseIdentity',
config: Optional[Mapping[str, str]] = None):
"""
A Kubernetes add-on profile for a managed cluster.
:param bool enabled: Whether the add-on is enabled or not.
:param 'ManagedClusterAddonProfileResponseIdentityArgs' identity: Information of user assigned identity used by this add-on.
:param Mapping[str, str] config: Key-value pairs for configuring an add-on.
"""
pulumi.set(__self__, "enabled", enabled)
pulumi.set(__self__, "identity", identity)
if config is not None:
pulumi.set(__self__, "config", config)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
Whether the add-on is enabled or not.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def identity(self) -> 'outputs.ManagedClusterAddonProfileResponseIdentity':
"""
Information of user assigned identity used by this add-on.
"""
return pulumi.get(self, "identity")
@property
@pulumi.getter
def config(self) -> Optional[Mapping[str, str]]:
"""
Key-value pairs for configuring an add-on.
"""
return pulumi.get(self, "config")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedClusterAddonProfileResponseIdentity(dict):
"""
Information of user assigned identity used by this add-on.
"""
def __init__(__self__, *,
client_id: Optional[str] = None,
object_id: Optional[str] = None,
resource_id: Optional[str] = None):
"""
Information of user assigned identity used by this add-on.
:param str client_id: The client id of the user assigned identity.
:param str object_id: The object id of the user assigned identity.
:param str resource_id: The resource id of the user assigned identity.
"""
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if object_id is not None:
pulumi.set(__self__, "object_id", object_id)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> Optional[str]:
"""
The client id of the user assigned identity.
"""
return pulumi.get(self, "client_id")
@property
@pulumi.getter(name="objectId")
def object_id(self) -> Optional[str]:
"""
The object id of the user assigned identity.
"""
return pulumi.get(self, "object_id")
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> Optional[str]:
"""
The resource id of the user assigned identity.
"""
return pulumi.get(self, "resource_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedClusterAgentPoolProfileResponse(dict):
"""
Profile for the container service agent pool.
"""
def __init__(__self__, *,
name: str,
node_image_version: str,
power_state: 'outputs.PowerStateResponse',
provisioning_state: str,
availability_zones: Optional[Sequence[str]] = None,
count: Optional[int] = None,
enable_auto_scaling: Optional[bool] = None,
enable_node_public_ip: Optional[bool] = None,
max_count: Optional[int] = None,
max_pods: Optional[int] = None,
min_count: Optional[int] = None,
mode: Optional[str] = None,
node_labels: Optional[Mapping[str, str]] = None,
node_taints: Optional[Sequence[str]] = None,
orchestrator_version: Optional[str] = None,
os_disk_size_gb: Optional[int] = None,
os_disk_type: Optional[str] = None,
os_type: Optional[str] = None,
proximity_placement_group_id: Optional[str] = None,
scale_set_eviction_policy: Optional[str] = None,
scale_set_priority: Optional[str] = None,
spot_max_price: Optional[float] = None,
tags: Optional[Mapping[str, str]] = None,
type: Optional[str] = None,
upgrade_settings: Optional['outputs.AgentPoolUpgradeSettingsResponse'] = None,
vm_size: Optional[str] = None,
vnet_subnet_id: Optional[str] = None):
"""
Profile for the container service agent pool.
:param str name: Unique name of the agent pool profile in the context of the subscription and resource group.
:param str node_image_version: Version of node image
:param 'PowerStateResponseArgs' power_state: Describes whether the Agent Pool is Running or Stopped
:param str provisioning_state: The current deployment or provisioning state, which only appears in the response.
:param Sequence[str] availability_zones: Availability zones for nodes. Must use VirtualMachineScaleSets AgentPoolType.
:param int count: Number of agents (VMs) to host docker containers. Allowed values must be in the range of 0 to 100 (inclusive) for user pools and in the range of 1 to 100 (inclusive) for system pools. The default value is 1.
:param bool enable_auto_scaling: Whether to enable auto-scaler
:param bool enable_node_public_ip: Enable public IP for nodes
:param int max_count: Maximum number of nodes for auto-scaling
:param int max_pods: Maximum number of pods that can run on a node.
:param int min_count: Minimum number of nodes for auto-scaling
:param str mode: AgentPoolMode represents mode of an agent pool
:param Mapping[str, str] node_labels: Agent pool node labels to be persisted across all nodes in agent pool.
:param Sequence[str] node_taints: Taints added to new nodes during node pool create and scale. For example, key=value:NoSchedule.
:param str orchestrator_version: Version of orchestrator specified when creating the managed cluster.
:param int os_disk_size_gb: OS Disk Size in GB to be used to specify the disk size for every machine in this master/agent pool. If you specify 0, it will apply the default osDisk size according to the vmSize specified.
:param str os_disk_type: OS disk type to be used for machines in a given agent pool. Allowed values are 'Ephemeral' and 'Managed'. Defaults to 'Managed'. May not be changed after creation.
:param str os_type: OsType to be used to specify os type. Choose from Linux and Windows. Default to Linux.
:param str proximity_placement_group_id: The ID for Proximity Placement Group.
:param str scale_set_eviction_policy: ScaleSetEvictionPolicy to be used to specify eviction policy for Spot virtual machine scale set. Default to Delete.
:param str scale_set_priority: ScaleSetPriority to be used to specify virtual machine scale set priority. Default to regular.
:param float spot_max_price: SpotMaxPrice to be used to specify the maximum price you are willing to pay in US Dollars. Possible values are any decimal value greater than zero or -1 which indicates default price to be up-to on-demand.
:param Mapping[str, str] tags: Agent pool tags to be persisted on the agent pool virtual machine scale set.
:param str type: AgentPoolType represents types of an agent pool
:param 'AgentPoolUpgradeSettingsResponseArgs' upgrade_settings: Settings for upgrading the agentpool
:param str vm_size: Size of agent VMs.
:param str vnet_subnet_id: VNet SubnetID specifies the VNet's subnet identifier.
"""
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "node_image_version", node_image_version)
pulumi.set(__self__, "power_state", power_state)
pulumi.set(__self__, "provisioning_state", provisioning_state)
if availability_zones is not None:
pulumi.set(__self__, "availability_zones", availability_zones)
if count is not None:
pulumi.set(__self__, "count", count)
if enable_auto_scaling is not None:
pulumi.set(__self__, "enable_auto_scaling", enable_auto_scaling)
if enable_node_public_ip is not None:
pulumi.set(__self__, "enable_node_public_ip", enable_node_public_ip)
if max_count is not None:
pulumi.set(__self__, "max_count", max_count)
if max_pods is not None:
pulumi.set(__self__, "max_pods", max_pods)
if min_count is not None:
pulumi.set(__self__, "min_count", min_count)
if mode is not None:
pulumi.set(__self__, "mode", mode)
if node_labels is not None:
pulumi.set(__self__, "node_labels", node_labels)
if node_taints is not None:
pulumi.set(__self__, "node_taints", node_taints)
if orchestrator_version is not None:
pulumi.set(__self__, "orchestrator_version", orchestrator_version)
if os_disk_size_gb is not None:
pulumi.set(__self__, "os_disk_size_gb", os_disk_size_gb)
if os_disk_type is not None:
pulumi.set(__self__, "os_disk_type", os_disk_type)
if os_type is not None:
pulumi.set(__self__, "os_type", os_type)
if proximity_placement_group_id is not None:
pulumi.set(__self__, "proximity_placement_group_id", proximity_placement_group_id)
if scale_set_eviction_policy is not None:
pulumi.set(__self__, "scale_set_eviction_policy", scale_set_eviction_policy)
if scale_set_priority is not None:
pulumi.set(__self__, "scale_set_priority", scale_set_priority)
if spot_max_price is not None:
pulumi.set(__self__, "spot_max_price", spot_max_price)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if type is not None:
pulumi.set(__self__, "type", type)
if upgrade_settings is not None:
pulumi.set(__self__, "upgrade_settings", upgrade_settings)
if vm_size is not None:
pulumi.set(__self__, "vm_size", vm_size)
if vnet_subnet_id is not None:
pulumi.set(__self__, "vnet_subnet_id", vnet_subnet_id)
@property
@pulumi.getter
def name(self) -> str:
"""
Unique name of the agent pool profile in the context of the subscription and resource group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="nodeImageVersion")
def node_image_version(self) -> str:
"""
Version of node image
"""
return pulumi.get(self, "node_image_version")
@property
@pulumi.getter(name="powerState")
def power_state(self) -> 'outputs.PowerStateResponse':
"""
Describes whether the Agent Pool is Running or Stopped
"""
return pulumi.get(self, "power_state")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The current deployment or provisioning state, which only appears in the response.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="availabilityZones")
def availability_zones(self) -> Optional[Sequence[str]]:
"""
Availability zones for nodes. Must use VirtualMachineScaleSets AgentPoolType.
"""
return pulumi.get(self, "availability_zones")
@property
@pulumi.getter
def count(self) -> Optional[int]:
"""
Number of agents (VMs) to host docker containers. Allowed values must be in the range of 0 to 100 (inclusive) for user pools and in the range of 1 to 100 (inclusive) for system pools. The default value is 1.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter(name="enableAutoScaling")
def enable_auto_scaling(self) -> Optional[bool]:
"""
Whether to enable auto-scaler
"""
return pulumi.get(self, "enable_auto_scaling")
@property
@pulumi.getter(name="enableNodePublicIP")
def enable_node_public_ip(self) -> Optional[bool]:
"""
Enable public IP for nodes
"""
return pulumi.get(self, "enable_node_public_ip")
@property
@pulumi.getter(name="maxCount")
def max_count(self) -> Optional[int]:
"""
Maximum number of nodes for auto-scaling
"""
return pulumi.get(self, "max_count")
@property
@pulumi.getter(name="maxPods")
def max_pods(self) -> Optional[int]:
"""
Maximum number of pods that can run on a node.
"""
return pulumi.get(self, "max_pods")
@property
@pulumi.getter(name="minCount")
def min_count(self) -> Optional[int]:
"""
Minimum number of nodes for auto-scaling
"""
return pulumi.get(self, "min_count")
@property
@pulumi.getter
def mode(self) -> Optional[str]:
"""
AgentPoolMode represents mode of an agent pool
"""
return pulumi.get(self, "mode")
@property
@pulumi.getter(name="nodeLabels")
def node_labels(self) -> Optional[Mapping[str, str]]:
"""
Agent pool node labels to be persisted across all nodes in agent pool.
"""
return pulumi.get(self, "node_labels")
@property
@pulumi.getter(name="nodeTaints")
def node_taints(self) -> Optional[Sequence[str]]:
"""
Taints added to new nodes during node pool create and scale. For example, key=value:NoSchedule.
"""
return pulumi.get(self, "node_taints")
@property
@pulumi.getter(name="orchestratorVersion")
def orchestrator_version(self) -> Optional[str]:
"""
Version of orchestrator specified when creating the managed cluster.
"""
return pulumi.get(self, "orchestrator_version")
@property
@pulumi.getter(name="osDiskSizeGB")
def os_disk_size_gb(self) -> Optional[int]:
"""
OS Disk Size in GB to be used to specify the disk size for every machine in this master/agent pool. If you specify 0, it will apply the default osDisk size according to the vmSize specified.
"""
return pulumi.get(self, "os_disk_size_gb")
@property
@pulumi.getter(name="osDiskType")
def os_disk_type(self) -> Optional[str]:
"""
OS disk type to be used for machines in a given agent pool. Allowed values are 'Ephemeral' and 'Managed'. Defaults to 'Managed'. May not be changed after creation.
"""
return pulumi.get(self, "os_disk_type")
@property
@pulumi.getter(name="osType")
def os_type(self) -> Optional[str]:
"""
OsType to be used to specify os type. Choose from Linux and Windows. Default to Linux.
"""
return pulumi.get(self, "os_type")
@property
@pulumi.getter(name="proximityPlacementGroupID")
def proximity_placement_group_id(self) -> Optional[str]:
"""
The ID for Proximity Placement Group.
"""
return pulumi.get(self, "proximity_placement_group_id")
@property
@pulumi.getter(name="scaleSetEvictionPolicy")
def scale_set_eviction_policy(self) -> Optional[str]:
"""
ScaleSetEvictionPolicy to be used to specify eviction policy for Spot virtual machine scale set. Default to Delete.
"""
return pulumi.get(self, "scale_set_eviction_policy")
@property
@pulumi.getter(name="scaleSetPriority")
def scale_set_priority(self) -> Optional[str]:
"""
ScaleSetPriority to be used to specify virtual machine scale set priority. Default to regular.
"""
return pulumi.get(self, "scale_set_priority")
@property
@pulumi.getter(name="spotMaxPrice")
def spot_max_price(self) -> Optional[float]:
"""
SpotMaxPrice to be used to specify the maximum price you are willing to pay in US Dollars. Possible values are any decimal value greater than zero or -1 which indicates default price to be up-to on-demand.
"""
return pulumi.get(self, "spot_max_price")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Agent pool tags to be persisted on the agent pool virtual machine scale set.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
AgentPoolType represents types of an agent pool
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="upgradeSettings")
def upgrade_settings(self) -> Optional['outputs.AgentPoolUpgradeSettingsResponse']:
"""
Settings for upgrading the agentpool
"""
return pulumi.get(self, "upgrade_settings")
@property
@pulumi.getter(name="vmSize")
def vm_size(self) -> Optional[str]:
"""
Size of agent VMs.
"""
return pulumi.get(self, "vm_size")
@property
@pulumi.getter(name="vnetSubnetID")
def vnet_subnet_id(self) -> Optional[str]:
"""
VNet SubnetID specifies the VNet's subnet identifier.
"""
return pulumi.get(self, "vnet_subnet_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedClusterIdentityResponse(dict):
"""
Identity for the managed cluster.
"""
def __init__(__self__, *,
principal_id: str,
tenant_id: str,
type: Optional[str] = None,
user_assigned_identities: Optional[Mapping[str, 'outputs.ManagedClusterIdentityResponseUserAssignedIdentities']] = None):
"""
Identity for the managed cluster.
:param str principal_id: The principal id of the system assigned identity which is used by master components.
:param str tenant_id: The tenant id of the system assigned identity which is used by master components.
:param str type: The type of identity used for the managed cluster. Type 'SystemAssigned' will use an implicitly created identity in master components and an auto-created user assigned identity in MC_ resource group in agent nodes. Type 'None' will not use MSI for the managed cluster, service principal will be used instead.
:param Mapping[str, 'ManagedClusterIdentityResponseUserAssignedIdentitiesArgs'] user_assigned_identities: The user identity associated with the managed cluster. This identity will be used in control plane and only one user assigned identity is allowed. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.
"""
pulumi.set(__self__, "principal_id", principal_id)
pulumi.set(__self__, "tenant_id", tenant_id)
if type is not None:
pulumi.set(__self__, "type", type)
if user_assigned_identities is not None:
pulumi.set(__self__, "user_assigned_identities", user_assigned_identities)
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> str:
"""
The principal id of the system assigned identity which is used by master components.
"""
return pulumi.get(self, "principal_id")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> str:
"""
The tenant id of the system assigned identity which is used by master components.
"""
return pulumi.get(self, "tenant_id")
@property
@pulumi.getter
def type(self) -> Optional[str]:
"""
The type of identity used for the managed cluster. Type 'SystemAssigned' will use an implicitly created identity in master components and an auto-created user assigned identity in MC_ resource group in agent nodes. Type 'None' will not use MSI for the managed cluster, service principal will be used instead.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="userAssignedIdentities")
def user_assigned_identities(self) -> Optional[Mapping[str, 'outputs.ManagedClusterIdentityResponseUserAssignedIdentities']]:
"""
The user identity associated with the managed cluster. This identity will be used in control plane and only one user assigned identity is allowed. The user identity dictionary key references will be ARM resource ids in the form: '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{identityName}'.
"""
return pulumi.get(self, "user_assigned_identities")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedClusterIdentityResponseUserAssignedIdentities(dict):
def __init__(__self__, *,
client_id: str,
principal_id: str):
"""
:param str client_id: The client id of user assigned identity.
:param str principal_id: The principal id of user assigned identity.
"""
pulumi.set(__self__, "client_id", client_id)
pulumi.set(__self__, "principal_id", principal_id)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> str:
"""
The client id of user assigned identity.
"""
return pulumi.get(self, "client_id")
@property
@pulumi.getter(name="principalId")
def principal_id(self) -> str:
"""
The principal id of user assigned identity.
"""
return pulumi.get(self, "principal_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedClusterLoadBalancerProfileResponse(dict):
"""
Profile of the managed cluster load balancer.
"""
def __init__(__self__, *,
allocated_outbound_ports: Optional[int] = None,
effective_outbound_ips: Optional[Sequence['outputs.ResourceReferenceResponse']] = None,
idle_timeout_in_minutes: Optional[int] = None,
managed_outbound_ips: Optional['outputs.ManagedClusterLoadBalancerProfileResponseManagedOutboundIPs'] = None,
outbound_ip_prefixes: Optional['outputs.ManagedClusterLoadBalancerProfileResponseOutboundIPPrefixes'] = None,
outbound_ips: Optional['outputs.ManagedClusterLoadBalancerProfileResponseOutboundIPs'] = None):
"""
Profile of the managed cluster load balancer.
:param int allocated_outbound_ports: Desired number of allocated SNAT ports per VM. Allowed values must be in the range of 0 to 64000 (inclusive). The default value is 0 which results in Azure dynamically allocating ports.
:param Sequence['ResourceReferenceResponseArgs'] effective_outbound_ips: The effective outbound IP resources of the cluster load balancer.
:param int idle_timeout_in_minutes: Desired outbound flow idle timeout in minutes. Allowed values must be in the range of 4 to 120 (inclusive). The default value is 30 minutes.
:param 'ManagedClusterLoadBalancerProfileResponseManagedOutboundIPsArgs' managed_outbound_ips: Desired managed outbound IPs for the cluster load balancer.
:param 'ManagedClusterLoadBalancerProfileResponseOutboundIPPrefixesArgs' outbound_ip_prefixes: Desired outbound IP Prefix resources for the cluster load balancer.
:param 'ManagedClusterLoadBalancerProfileResponseOutboundIPsArgs' outbound_ips: Desired outbound IP resources for the cluster load balancer.
"""
if allocated_outbound_ports is not None:
pulumi.set(__self__, "allocated_outbound_ports", allocated_outbound_ports)
if effective_outbound_ips is not None:
pulumi.set(__self__, "effective_outbound_ips", effective_outbound_ips)
if idle_timeout_in_minutes is not None:
pulumi.set(__self__, "idle_timeout_in_minutes", idle_timeout_in_minutes)
if managed_outbound_ips is not None:
pulumi.set(__self__, "managed_outbound_ips", managed_outbound_ips)
if outbound_ip_prefixes is not None:
pulumi.set(__self__, "outbound_ip_prefixes", outbound_ip_prefixes)
if outbound_ips is not None:
pulumi.set(__self__, "outbound_ips", outbound_ips)
@property
@pulumi.getter(name="allocatedOutboundPorts")
def allocated_outbound_ports(self) -> Optional[int]:
"""
Desired number of allocated SNAT ports per VM. Allowed values must be in the range of 0 to 64000 (inclusive). The default value is 0 which results in Azure dynamically allocating ports.
"""
return pulumi.get(self, "allocated_outbound_ports")
@property
@pulumi.getter(name="effectiveOutboundIPs")
def effective_outbound_ips(self) -> Optional[Sequence['outputs.ResourceReferenceResponse']]:
"""
The effective outbound IP resources of the cluster load balancer.
"""
return pulumi.get(self, "effective_outbound_ips")
@property
@pulumi.getter(name="idleTimeoutInMinutes")
def idle_timeout_in_minutes(self) -> Optional[int]:
"""
Desired outbound flow idle timeout in minutes. Allowed values must be in the range of 4 to 120 (inclusive). The default value is 30 minutes.
"""
return pulumi.get(self, "idle_timeout_in_minutes")
@property
@pulumi.getter(name="managedOutboundIPs")
def managed_outbound_ips(self) -> Optional['outputs.ManagedClusterLoadBalancerProfileResponseManagedOutboundIPs']:
"""
Desired managed outbound IPs for the cluster load balancer.
"""
return pulumi.get(self, "managed_outbound_ips")
@property
@pulumi.getter(name="outboundIPPrefixes")
def outbound_ip_prefixes(self) -> Optional['outputs.ManagedClusterLoadBalancerProfileResponseOutboundIPPrefixes']:
"""
Desired outbound IP Prefix resources for the cluster load balancer.
"""
return pulumi.get(self, "outbound_ip_prefixes")
@property
@pulumi.getter(name="outboundIPs")
def outbound_ips(self) -> Optional['outputs.ManagedClusterLoadBalancerProfileResponseOutboundIPs']:
"""
Desired outbound IP resources for the cluster load balancer.
"""
return pulumi.get(self, "outbound_ips")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedClusterLoadBalancerProfileResponseManagedOutboundIPs(dict):
"""
Desired managed outbound IPs for the cluster load balancer.
"""
def __init__(__self__, *,
count: Optional[int] = None):
"""
Desired managed outbound IPs for the cluster load balancer.
:param int count: Desired number of outbound IP created/managed by Azure for the cluster load balancer. Allowed values must be in the range of 1 to 100 (inclusive). The default value is 1.
"""
if count is not None:
pulumi.set(__self__, "count", count)
@property
@pulumi.getter
def count(self) -> Optional[int]:
"""
Desired number of outbound IP created/managed by Azure for the cluster load balancer. Allowed values must be in the range of 1 to 100 (inclusive). The default value is 1.
"""
return pulumi.get(self, "count")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedClusterLoadBalancerProfileResponseOutboundIPPrefixes(dict):
"""
Desired outbound IP Prefix resources for the cluster load balancer.
"""
def __init__(__self__, *,
public_ip_prefixes: Optional[Sequence['outputs.ResourceReferenceResponse']] = None):
"""
Desired outbound IP Prefix resources for the cluster load balancer.
:param Sequence['ResourceReferenceResponseArgs'] public_ip_prefixes: A list of public IP prefix resources.
"""
if public_ip_prefixes is not None:
pulumi.set(__self__, "public_ip_prefixes", public_ip_prefixes)
@property
@pulumi.getter(name="publicIPPrefixes")
def public_ip_prefixes(self) -> Optional[Sequence['outputs.ResourceReferenceResponse']]:
"""
A list of public IP prefix resources.
"""
return pulumi.get(self, "public_ip_prefixes")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedClusterLoadBalancerProfileResponseOutboundIPs(dict):
"""
Desired outbound IP resources for the cluster load balancer.
"""
def __init__(__self__, *,
public_ips: Optional[Sequence['outputs.ResourceReferenceResponse']] = None):
"""
Desired outbound IP resources for the cluster load balancer.
:param Sequence['ResourceReferenceResponseArgs'] public_ips: A list of public IP resources.
"""
if public_ips is not None:
pulumi.set(__self__, "public_ips", public_ips)
@property
@pulumi.getter(name="publicIPs")
def public_ips(self) -> Optional[Sequence['outputs.ResourceReferenceResponse']]:
"""
A list of public IP resources.
"""
return pulumi.get(self, "public_ips")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedClusterPropertiesResponseAutoScalerProfile(dict):
"""
Parameters to be applied to the cluster-autoscaler when enabled
"""
def __init__(__self__, *,
balance_similar_node_groups: Optional[str] = None,
expander: Optional[str] = None,
max_empty_bulk_delete: Optional[str] = None,
max_graceful_termination_sec: Optional[str] = None,
max_total_unready_percentage: Optional[str] = None,
new_pod_scale_up_delay: Optional[str] = None,
ok_total_unready_count: Optional[str] = None,
scale_down_delay_after_add: Optional[str] = None,
scale_down_delay_after_delete: Optional[str] = None,
scale_down_delay_after_failure: Optional[str] = None,
scale_down_unneeded_time: Optional[str] = None,
scale_down_unready_time: Optional[str] = None,
scale_down_utilization_threshold: Optional[str] = None,
scan_interval: Optional[str] = None,
skip_nodes_with_local_storage: Optional[str] = None,
skip_nodes_with_system_pods: Optional[str] = None):
"""
Parameters to be applied to the cluster-autoscaler when enabled
"""
if balance_similar_node_groups is not None:
pulumi.set(__self__, "balance_similar_node_groups", balance_similar_node_groups)
if expander is not None:
pulumi.set(__self__, "expander", expander)
if max_empty_bulk_delete is not None:
pulumi.set(__self__, "max_empty_bulk_delete", max_empty_bulk_delete)
if max_graceful_termination_sec is not None:
pulumi.set(__self__, "max_graceful_termination_sec", max_graceful_termination_sec)
if max_total_unready_percentage is not None:
pulumi.set(__self__, "max_total_unready_percentage", max_total_unready_percentage)
if new_pod_scale_up_delay is not None:
pulumi.set(__self__, "new_pod_scale_up_delay", new_pod_scale_up_delay)
if ok_total_unready_count is not None:
pulumi.set(__self__, "ok_total_unready_count", ok_total_unready_count)
if scale_down_delay_after_add is not None:
pulumi.set(__self__, "scale_down_delay_after_add", scale_down_delay_after_add)
if scale_down_delay_after_delete is not None:
pulumi.set(__self__, "scale_down_delay_after_delete", scale_down_delay_after_delete)
if scale_down_delay_after_failure is not None:
pulumi.set(__self__, "scale_down_delay_after_failure", scale_down_delay_after_failure)
if scale_down_unneeded_time is not None:
pulumi.set(__self__, "scale_down_unneeded_time", scale_down_unneeded_time)
if scale_down_unready_time is not None:
pulumi.set(__self__, "scale_down_unready_time", scale_down_unready_time)
if scale_down_utilization_threshold is not None:
pulumi.set(__self__, "scale_down_utilization_threshold", scale_down_utilization_threshold)
if scan_interval is not None:
pulumi.set(__self__, "scan_interval", scan_interval)
if skip_nodes_with_local_storage is not None:
pulumi.set(__self__, "skip_nodes_with_local_storage", skip_nodes_with_local_storage)
if skip_nodes_with_system_pods is not None:
pulumi.set(__self__, "skip_nodes_with_system_pods", skip_nodes_with_system_pods)
@property
@pulumi.getter(name="balanceSimilarNodeGroups")
def balance_similar_node_groups(self) -> Optional[str]:
return pulumi.get(self, "balance_similar_node_groups")
@property
@pulumi.getter
def expander(self) -> Optional[str]:
return pulumi.get(self, "expander")
@property
@pulumi.getter(name="maxEmptyBulkDelete")
def max_empty_bulk_delete(self) -> Optional[str]:
return pulumi.get(self, "max_empty_bulk_delete")
@property
@pulumi.getter(name="maxGracefulTerminationSec")
def max_graceful_termination_sec(self) -> Optional[str]:
return pulumi.get(self, "max_graceful_termination_sec")
@property
@pulumi.getter(name="maxTotalUnreadyPercentage")
def max_total_unready_percentage(self) -> Optional[str]:
return pulumi.get(self, "max_total_unready_percentage")
@property
@pulumi.getter(name="newPodScaleUpDelay")
def new_pod_scale_up_delay(self) -> Optional[str]:
return pulumi.get(self, "new_pod_scale_up_delay")
@property
@pulumi.getter(name="okTotalUnreadyCount")
def ok_total_unready_count(self) -> Optional[str]:
return pulumi.get(self, "ok_total_unready_count")
@property
@pulumi.getter(name="scaleDownDelayAfterAdd")
def scale_down_delay_after_add(self) -> Optional[str]:
return pulumi.get(self, "scale_down_delay_after_add")
@property
@pulumi.getter(name="scaleDownDelayAfterDelete")
def scale_down_delay_after_delete(self) -> Optional[str]:
return pulumi.get(self, "scale_down_delay_after_delete")
@property
@pulumi.getter(name="scaleDownDelayAfterFailure")
def scale_down_delay_after_failure(self) -> Optional[str]:
return pulumi.get(self, "scale_down_delay_after_failure")
@property
@pulumi.getter(name="scaleDownUnneededTime")
def scale_down_unneeded_time(self) -> Optional[str]:
return pulumi.get(self, "scale_down_unneeded_time")
@property
@pulumi.getter(name="scaleDownUnreadyTime")
def scale_down_unready_time(self) -> Optional[str]:
return pulumi.get(self, "scale_down_unready_time")
@property
@pulumi.getter(name="scaleDownUtilizationThreshold")
def scale_down_utilization_threshold(self) -> Optional[str]:
return pulumi.get(self, "scale_down_utilization_threshold")
@property
@pulumi.getter(name="scanInterval")
def scan_interval(self) -> Optional[str]:
return pulumi.get(self, "scan_interval")
@property
@pulumi.getter(name="skipNodesWithLocalStorage")
def skip_nodes_with_local_storage(self) -> Optional[str]:
return pulumi.get(self, "skip_nodes_with_local_storage")
@property
@pulumi.getter(name="skipNodesWithSystemPods")
def skip_nodes_with_system_pods(self) -> Optional[str]:
return pulumi.get(self, "skip_nodes_with_system_pods")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedClusterPropertiesResponseIdentityProfile(dict):
def __init__(__self__, *,
client_id: Optional[str] = None,
object_id: Optional[str] = None,
resource_id: Optional[str] = None):
"""
:param str client_id: The client id of the user assigned identity.
:param str object_id: The object id of the user assigned identity.
:param str resource_id: The resource id of the user assigned identity.
"""
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if object_id is not None:
pulumi.set(__self__, "object_id", object_id)
if resource_id is not None:
pulumi.set(__self__, "resource_id", resource_id)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> Optional[str]:
"""
The client id of the user assigned identity.
"""
return pulumi.get(self, "client_id")
@property
@pulumi.getter(name="objectId")
def object_id(self) -> Optional[str]:
"""
The object id of the user assigned identity.
"""
return pulumi.get(self, "object_id")
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> Optional[str]:
"""
The resource id of the user assigned identity.
"""
return pulumi.get(self, "resource_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedClusterSKUResponse(dict):
def __init__(__self__, *,
name: Optional[str] = None,
tier: Optional[str] = None):
"""
:param str name: Name of a managed cluster SKU.
:param str tier: Tier of a managed cluster SKU.
"""
if name is not None:
pulumi.set(__self__, "name", name)
if tier is not None:
pulumi.set(__self__, "tier", tier)
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of a managed cluster SKU.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def tier(self) -> Optional[str]:
"""
Tier of a managed cluster SKU.
"""
return pulumi.get(self, "tier")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedClusterServicePrincipalProfileResponse(dict):
"""
Information about a service principal identity for the cluster to use for manipulating Azure APIs.
"""
def __init__(__self__, *,
client_id: str,
secret: Optional[str] = None):
"""
Information about a service principal identity for the cluster to use for manipulating Azure APIs.
:param str client_id: The ID for the service principal.
:param str secret: The secret password associated with the service principal in plain text.
"""
pulumi.set(__self__, "client_id", client_id)
if secret is not None:
pulumi.set(__self__, "secret", secret)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> str:
"""
The ID for the service principal.
"""
return pulumi.get(self, "client_id")
@property
@pulumi.getter
def secret(self) -> Optional[str]:
"""
The secret password associated with the service principal in plain text.
"""
return pulumi.get(self, "secret")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ManagedClusterWindowsProfileResponse(dict):
"""
Profile for Windows VMs in the container service cluster.
"""
def __init__(__self__, *,
admin_username: str,
admin_password: Optional[str] = None,
license_type: Optional[str] = None):
"""
Profile for Windows VMs in the container service cluster.
:param str admin_username: The administrator username to use for Windows VMs.
:param str admin_password: The administrator password to use for Windows VMs.
:param str license_type: The licenseType to use for Windows VMs. Windows_Server is used to enable Azure Hybrid User Benefits for Windows VMs.
"""
pulumi.set(__self__, "admin_username", admin_username)
if admin_password is not None:
pulumi.set(__self__, "admin_password", admin_password)
if license_type is not None:
pulumi.set(__self__, "license_type", license_type)
@property
@pulumi.getter(name="adminUsername")
def admin_username(self) -> str:
"""
The administrator username to use for Windows VMs.
"""
return pulumi.get(self, "admin_username")
@property
@pulumi.getter(name="adminPassword")
def admin_password(self) -> Optional[str]:
"""
The administrator password to use for Windows VMs.
"""
return pulumi.get(self, "admin_password")
@property
@pulumi.getter(name="licenseType")
def license_type(self) -> Optional[str]:
"""
The licenseType to use for Windows VMs. Windows_Server is used to enable Azure Hybrid User Benefits for Windows VMs.
"""
return pulumi.get(self, "license_type")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class NetworkProfileResponse(dict):
"""
Represents the OpenShift networking configuration
"""
def __init__(__self__, *,
peer_vnet_id: Optional[str] = None,
vnet_cidr: Optional[str] = None,
vnet_id: Optional[str] = None):
"""
Represents the OpenShift networking configuration
:param str peer_vnet_id: CIDR of the Vnet to peer.
:param str vnet_cidr: CIDR for the OpenShift Vnet.
:param str vnet_id: ID of the Vnet created for OSA cluster.
"""
if peer_vnet_id is not None:
pulumi.set(__self__, "peer_vnet_id", peer_vnet_id)
if vnet_cidr is not None:
pulumi.set(__self__, "vnet_cidr", vnet_cidr)
if vnet_id is not None:
pulumi.set(__self__, "vnet_id", vnet_id)
@property
@pulumi.getter(name="peerVnetId")
def peer_vnet_id(self) -> Optional[str]:
"""
CIDR of the Vnet to peer.
"""
return pulumi.get(self, "peer_vnet_id")
@property
@pulumi.getter(name="vnetCidr")
def vnet_cidr(self) -> Optional[str]:
"""
CIDR for the OpenShift Vnet.
"""
return pulumi.get(self, "vnet_cidr")
@property
@pulumi.getter(name="vnetId")
def vnet_id(self) -> Optional[str]:
"""
ID of the Vnet created for OSA cluster.
"""
return pulumi.get(self, "vnet_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class OpenShiftManagedClusterAADIdentityProviderResponse(dict):
"""
Defines the Identity provider for MS AAD.
"""
def __init__(__self__, *,
kind: str,
client_id: Optional[str] = None,
customer_admin_group_id: Optional[str] = None,
secret: Optional[str] = None,
tenant_id: Optional[str] = None):
"""
Defines the Identity provider for MS AAD.
:param str kind: The kind of the provider.
:param str client_id: The clientId password associated with the provider.
:param str customer_admin_group_id: The groupId to be granted cluster admin role.
:param str secret: The secret password associated with the provider.
:param str tenant_id: The tenantId associated with the provider.
"""
pulumi.set(__self__, "kind", 'AADIdentityProvider')
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if customer_admin_group_id is not None:
pulumi.set(__self__, "customer_admin_group_id", customer_admin_group_id)
if secret is not None:
pulumi.set(__self__, "secret", secret)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
@property
@pulumi.getter
def kind(self) -> str:
"""
The kind of the provider.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter(name="clientId")
def client_id(self) -> Optional[str]:
"""
The clientId password associated with the provider.
"""
return pulumi.get(self, "client_id")
@property
@pulumi.getter(name="customerAdminGroupId")
def customer_admin_group_id(self) -> Optional[str]:
"""
The groupId to be granted cluster admin role.
"""
return pulumi.get(self, "customer_admin_group_id")
@property
@pulumi.getter
def secret(self) -> Optional[str]:
"""
The secret password associated with the provider.
"""
return pulumi.get(self, "secret")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[str]:
"""
The tenantId associated with the provider.
"""
return pulumi.get(self, "tenant_id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class OpenShiftManagedClusterAgentPoolProfileResponse(dict):
"""
Defines the configuration of the OpenShift cluster VMs.
"""
def __init__(__self__, *,
count: int,
name: str,
vm_size: str,
os_type: Optional[str] = None,
role: Optional[str] = None,
subnet_cidr: Optional[str] = None):
"""
Defines the configuration of the OpenShift cluster VMs.
:param int count: Number of agents (VMs) to host docker containers.
:param str name: Unique name of the pool profile in the context of the subscription and resource group.
:param str vm_size: Size of agent VMs.
:param str os_type: OsType to be used to specify os type. Choose from Linux and Windows. Default to Linux.
:param str role: Define the role of the AgentPoolProfile.
:param str subnet_cidr: Subnet CIDR for the peering.
"""
pulumi.set(__self__, "count", count)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "vm_size", vm_size)
if os_type is not None:
pulumi.set(__self__, "os_type", os_type)
if role is not None:
pulumi.set(__self__, "role", role)
if subnet_cidr is not None:
pulumi.set(__self__, "subnet_cidr", subnet_cidr)
@property
@pulumi.getter
def count(self) -> int:
"""
Number of agents (VMs) to host docker containers.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter
def name(self) -> str:
"""
Unique name of the pool profile in the context of the subscription and resource group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="vmSize")
def vm_size(self) -> str:
"""
Size of agent VMs.
"""
return pulumi.get(self, "vm_size")
@property
@pulumi.getter(name="osType")
def os_type(self) -> Optional[str]:
"""
OsType to be used to specify os type. Choose from Linux and Windows. Default to Linux.
"""
return pulumi.get(self, "os_type")
@property
@pulumi.getter
def role(self) -> Optional[str]:
"""
Define the role of the AgentPoolProfile.
"""
return pulumi.get(self, "role")
@property
@pulumi.getter(name="subnetCidr")
def subnet_cidr(self) -> Optional[str]:
"""
Subnet CIDR for the peering.
"""
return pulumi.get(self, "subnet_cidr")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class OpenShiftManagedClusterAuthProfileResponse(dict):
"""
Defines all possible authentication profiles for the OpenShift cluster.
"""
def __init__(__self__, *,
identity_providers: Optional[Sequence['outputs.OpenShiftManagedClusterIdentityProviderResponse']] = None):
"""
Defines all possible authentication profiles for the OpenShift cluster.
:param Sequence['OpenShiftManagedClusterIdentityProviderResponseArgs'] identity_providers: Type of authentication profile to use.
"""
if identity_providers is not None:
pulumi.set(__self__, "identity_providers", identity_providers)
@property
@pulumi.getter(name="identityProviders")
def identity_providers(self) -> Optional[Sequence['outputs.OpenShiftManagedClusterIdentityProviderResponse']]:
"""
Type of authentication profile to use.
"""
return pulumi.get(self, "identity_providers")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class OpenShiftManagedClusterIdentityProviderResponse(dict):
"""
Defines the configuration of the identity providers to be used in the OpenShift cluster.
"""
def __init__(__self__, *,
name: Optional[str] = None,
provider: Optional['outputs.OpenShiftManagedClusterAADIdentityProviderResponse'] = None):
"""
Defines the configuration of the identity providers to be used in the OpenShift cluster.
:param str name: Name of the provider.
:param 'OpenShiftManagedClusterAADIdentityProviderResponseArgs' provider: Configuration of the provider.
"""
if name is not None:
pulumi.set(__self__, "name", name)
if provider is not None:
pulumi.set(__self__, "provider", provider)
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of the provider.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def provider(self) -> Optional['outputs.OpenShiftManagedClusterAADIdentityProviderResponse']:
"""
Configuration of the provider.
"""
return pulumi.get(self, "provider")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class OpenShiftManagedClusterMasterPoolProfileResponse(dict):
"""
OpenShiftManagedClusterMaterPoolProfile contains configuration for OpenShift master VMs.
"""
def __init__(__self__, *,
count: int,
vm_size: str,
name: Optional[str] = None,
os_type: Optional[str] = None,
subnet_cidr: Optional[str] = None):
"""
OpenShiftManagedClusterMaterPoolProfile contains configuration for OpenShift master VMs.
:param int count: Number of masters (VMs) to host docker containers. The default value is 3.
:param str vm_size: Size of agent VMs.
:param str name: Unique name of the master pool profile in the context of the subscription and resource group.
:param str os_type: OsType to be used to specify os type. Choose from Linux and Windows. Default to Linux.
:param str subnet_cidr: Subnet CIDR for the peering.
"""
pulumi.set(__self__, "count", count)
pulumi.set(__self__, "vm_size", vm_size)
if name is not None:
pulumi.set(__self__, "name", name)
if os_type is not None:
pulumi.set(__self__, "os_type", os_type)
if subnet_cidr is not None:
pulumi.set(__self__, "subnet_cidr", subnet_cidr)
@property
@pulumi.getter
def count(self) -> int:
"""
Number of masters (VMs) to host docker containers. The default value is 3.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter(name="vmSize")
def vm_size(self) -> str:
"""
Size of agent VMs.
"""
return pulumi.get(self, "vm_size")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Unique name of the master pool profile in the context of the subscription and resource group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="osType")
def os_type(self) -> Optional[str]:
"""
OsType to be used to specify os type. Choose from Linux and Windows. Default to Linux.
"""
return pulumi.get(self, "os_type")
@property
@pulumi.getter(name="subnetCidr")
def subnet_cidr(self) -> Optional[str]:
"""
Subnet CIDR for the peering.
"""
return pulumi.get(self, "subnet_cidr")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class OpenShiftRouterProfileResponse(dict):
"""
Represents an OpenShift router
"""
def __init__(__self__, *,
fqdn: str,
public_subdomain: str,
name: Optional[str] = None):
"""
Represents an OpenShift router
:param str fqdn: Auto-allocated FQDN for the OpenShift router.
:param str public_subdomain: DNS subdomain for OpenShift router.
:param str name: Name of the router profile.
"""
pulumi.set(__self__, "fqdn", fqdn)
pulumi.set(__self__, "public_subdomain", public_subdomain)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter
def fqdn(self) -> str:
"""
Auto-allocated FQDN for the OpenShift router.
"""
return pulumi.get(self, "fqdn")
@property
@pulumi.getter(name="publicSubdomain")
def public_subdomain(self) -> str:
"""
DNS subdomain for OpenShift router.
"""
return pulumi.get(self, "public_subdomain")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name of the router profile.
"""
return pulumi.get(self, "name")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PowerStateResponse(dict):
"""
Describes the Power State of the cluster
"""
def __init__(__self__, *,
code: Optional[str] = None):
"""
Describes the Power State of the cluster
:param str code: Tells whether the cluster is Running or Stopped
"""
if code is not None:
pulumi.set(__self__, "code", code)
@property
@pulumi.getter
def code(self) -> Optional[str]:
"""
Tells whether the cluster is Running or Stopped
"""
return pulumi.get(self, "code")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PrivateEndpointResponse(dict):
"""
Private endpoint which a connection belongs to.
"""
def __init__(__self__, *,
id: Optional[str] = None):
"""
Private endpoint which a connection belongs to.
:param str id: The resource Id for private endpoint
"""
if id is not None:
pulumi.set(__self__, "id", id)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The resource Id for private endpoint
"""
return pulumi.get(self, "id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PrivateLinkServiceConnectionStateResponse(dict):
"""
The state of a private link service connection.
"""
def __init__(__self__, *,
description: Optional[str] = None,
status: Optional[str] = None):
"""
The state of a private link service connection.
:param str description: The private link service connection description.
:param str status: The private link service connection status.
"""
if description is not None:
pulumi.set(__self__, "description", description)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def description(self) -> Optional[str]:
"""
The private link service connection description.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
The private link service connection status.
"""
return pulumi.get(self, "status")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class PurchasePlanResponse(dict):
"""
Used for establishing the purchase context of any 3rd Party artifact through MarketPlace.
"""
def __init__(__self__, *,
name: Optional[str] = None,
product: Optional[str] = None,
promotion_code: Optional[str] = None,
publisher: Optional[str] = None):
"""
Used for establishing the purchase context of any 3rd Party artifact through MarketPlace.
:param str name: The plan ID.
:param str product: Specifies the product of the image from the marketplace. This is the same value as Offer under the imageReference element.
:param str promotion_code: The promotion code.
:param str publisher: The plan ID.
"""
if name is not None:
pulumi.set(__self__, "name", name)
if product is not None:
pulumi.set(__self__, "product", product)
if promotion_code is not None:
pulumi.set(__self__, "promotion_code", promotion_code)
if publisher is not None:
pulumi.set(__self__, "publisher", publisher)
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The plan ID.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def product(self) -> Optional[str]:
"""
Specifies the product of the image from the marketplace. This is the same value as Offer under the imageReference element.
"""
return pulumi.get(self, "product")
@property
@pulumi.getter(name="promotionCode")
def promotion_code(self) -> Optional[str]:
"""
The promotion code.
"""
return pulumi.get(self, "promotion_code")
@property
@pulumi.getter
def publisher(self) -> Optional[str]:
"""
The plan ID.
"""
return pulumi.get(self, "publisher")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
@pulumi.output_type
class ResourceReferenceResponse(dict):
"""
A reference to an Azure resource.
"""
def __init__(__self__, *,
id: Optional[str] = None):
"""
A reference to an Azure resource.
:param str id: The fully qualified Azure resource id.
"""
if id is not None:
pulumi.set(__self__, "id", id)
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
The fully qualified Azure resource id.
"""
return pulumi.get(self, "id")
def _translate_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
| [
"[email protected]"
] | |
b7868249902bfe1fb69ee6e3267b9e1aab3b8417 | 6b247e365d97951ae7137bb8140447fe72100ff6 | /app/urls.py | c942d1ca3e5b888307b6d9ccafa4f13c869944b5 | [] | no_license | tharcissie/Discussion_Board | 27f251875218174b3285a48b5d1de58653930e5a | 42b3c14b9993a906dc6bfa142dab0d3ddfac66b8 | refs/heads/master | 2023-02-27T18:03:36.251799 | 2021-02-10T15:57:33 | 2021-02-10T15:57:33 | 336,992,064 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 714 | py | from django.urls import path
from .views import home, topics, new_topic, signup, topic_detail,reply_topic, profile, delete_topic,update_topic
urlpatterns = [
path('', home, name='home'),
path('topics/<id>', topics, name='topics'),
path('topics/<id>/create_topic', new_topic, name='create_topic'),
path('signup/', signup, name='signup'),
path('topic_detail/<id>', topic_detail, name='topic_detail'),
path('topic_detail/<id>/reply_topic', reply_topic , name='reply_topic'),
path('profile/<username>', profile, name='profile'),
path('topic_detail/<id>/delete', delete_topic , name='delete_topic'),
path('topic_detail/<id>/update_topic', update_topic , name='update_topic'),
] | [
"[email protected]"
] | |
d23690b10700d834432702a5b133c61e359439af | 50948d4cb10dcb1cc9bc0355918478fb2841322a | /azure-mgmt-sql/azure/mgmt/sql/models/restorable_dropped_managed_database_paged.py | d6e432c2c35188bdfa6c829f58d7c60fe70a9ab3 | [
"MIT"
] | permissive | xiafu-msft/azure-sdk-for-python | de9cd680b39962702b629a8e94726bb4ab261594 | 4d9560cfd519ee60667f3cc2f5295a58c18625db | refs/heads/master | 2023-08-12T20:36:24.284497 | 2019-05-22T00:55:16 | 2019-05-22T00:55:16 | 187,986,993 | 1 | 0 | MIT | 2020-10-02T01:17:02 | 2019-05-22T07:33:46 | Python | UTF-8 | Python | false | false | 1,036 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.paging import Paged
class RestorableDroppedManagedDatabasePaged(Paged):
"""
A paging container for iterating over a list of :class:`RestorableDroppedManagedDatabase <azure.mgmt.sql.models.RestorableDroppedManagedDatabase>` object
"""
_attribute_map = {
'next_link': {'key': 'nextLink', 'type': 'str'},
'current_page': {'key': 'value', 'type': '[RestorableDroppedManagedDatabase]'}
}
def __init__(self, *args, **kwargs):
super(RestorableDroppedManagedDatabasePaged, self).__init__(*args, **kwargs)
| [
"[email protected]"
] | |
a42b874d58734949d3b054019a599a8224df6ec5 | 216e6e4957e02780129ab4e917e94cfb975dbfcb | /chapter_6/es148.py | 22764021b34b3dcfda7b4d40ffc5d138be098555 | [] | no_license | DamManc/workbook | d7e72fd1ed098bd7bccb23fa5fd9a102cfff10db | 2103dbdc8a6635ffd6a1b16b581c98800c9f21a2 | refs/heads/master | 2023-04-19T21:26:23.940011 | 2021-05-23T22:37:27 | 2021-05-23T22:37:27 | 335,064,547 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,080 | py | # Exercise 148: Play Bingo
from es147 import *
import copy
def main():
print('Welcome to the Bingo Game!!')
print('------ Your card ------')
card = bingo_card()
print_bingo_card(card)
n_calls = []
for i in range(0, 1000):
copy_card = copy.deepcopy(card)
gamble = False
count = 0
while not gamble:
numbers = []
while len(numbers) < 5:
r = random.randint(1, 75)
if r not in numbers:
numbers.append(r)
gamble = check_card(copy_card, numbers)
if gamble:
print('Your call:', end='\t')
print(f'{numbers} *****************---> WIN {gamble}')
print(f'tot calls: {count}')
n_calls.append(count)
else:
count += 1
print(f'The minimum number of calls is {min(n_calls)}')
print(f'The maximum number of calls is {max(n_calls)}')
print(f'The average number of calls is {sum(n_calls) / 1000}')
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
fb5fd8b8750934092164597d06bd43e67d19e4c4 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_97/768.py | 789102ddeb40b07f44f85d54dd152798feab35b8 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,576 | py | import sys
def nbValidPermutation(i, maxi):
strF = str(i)
nb = 0
finded = []
for j in range(len(strF))[:-1]:
other = int(strF[j+1:]+strF[:j+1])
if other > i and other <= maxi and not other in finded:
finded.append(other)
nb += 1
return nb
def buildKelem(i, maxi):
strF = str(i)
nb = 0
finded = []
for j in range(len(strF))[:-1]:
other = int(strF[j+1:]+strF[:j+1])
if other > i and other <= maxi and not other in finded:
finded.append(other)
nb += 1
return sorted(finded,reverse=True)
def buildK():
vals = []
for i in range(2000000):
vals.append(buildKelem(i,2000000))
return vals
def computeSolKno(mini,maxi,kno):
sol = 0
for i in range(mini,maxi-1):
sol += len(kno[i])
counter = 0
while counter < len(kno[i]):
if kno[i][counter] <= maxi:
counter = len(kno[i])
else:
counter += 1
sol -= 1
return sol
def computeSol(mini,maxi):
sol = 0
for i in range(mini,maxi-1):
sol += nbValidPermutation(i,maxi)
return sol
def solve(pathI,pathOut):
kno = buildK()
print 'ok, kno'
counter = 1
fI = file(pathI,'rU')
fO = file(pathOut,'w')
lines = fI.readlines()
for line in lines[1:]:
print line
elem = line.split()
mini = int(elem[0])
maxi = int(elem[1])
sol = computeSolKno(mini,maxi,kno)
fO.write('Case #')
fO.write(str(counter))
fO.write(': ')
fO.write(str(sol))
fO.write('\n')
counter+=1
fI.close()
fO.close()
def main():
args = sys.argv[1:]
solve(args[0],args[1])
main()
| [
"[email protected]"
] | |
7937ed53104fe047714bf6e587ccd85bf22f019c | 0437ec3526cc39af1d8d87c2e3c0928b9740e7b9 | /Node.py | 19436774365bebf7a66124db81ab1caa08a93e7e | [] | no_license | wkcn/Flow | b5b2c15a72e2407fcce3e8d2535705acf9f11bb1 | 461b8c181b8bca68c41bb69d20e2a0083596cef9 | refs/heads/master | 2021-06-07T07:37:49.633042 | 2016-09-11T14:55:19 | 2016-09-11T14:55:19 | 67,905,517 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 216 | py | #coding=utf-8
class Node:
def __init__(self, x, y):
self.x = x
self.y = y
self.ox = 0
self.oy = 0
self.edges = []
self.neighbors = []
self.redgreen = False
| [
"[email protected]"
] | |
deec09e0baf2531114f192fdb1aba714d03af881 | 2a266dda00578ea177b231e8f0dfd14a1824d2e6 | /pw_ls/pw_ls_AB/test_decompress.py | 1ad25097f68c7ea5778186d55d9da1735b9235dd | [] | no_license | sanskrit-lexicon/PWK | fbb51c19d9169e4c28d5c9056484c4a53def78eb | 57d07725b828a95b22b859422287474bfd858ffe | refs/heads/master | 2023-08-17T04:32:37.387691 | 2023-08-15T18:34:46 | 2023-08-15T18:34:46 | 15,903,957 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 324 | py | #-*- coding:utf-8 -*-
"""make_numberchange2b.py
"""
from __future__ import print_function
import sys, re,codecs
from make_numberchange2b import lsnumstr_to_intseq, decompress
if __name__=="__main__":
x = sys.argv[1]
seq,flag = lsnumstr_to_intseq(x)
print(flag,seq)
if flag:
d,flag1 = decompress(seq)
print(flag1,d)
| [
"[email protected]"
] | |
3c6efaa9740b328d1508fc75df89820d4fa4ed29 | 7c01cd1df700a68965a22a041fcf0425fb5b8d2e | /api/tacticalrmm/apiv3/urls.py | 934da836b1079809c5346d405b12aac2207b14af | [
"MIT"
] | permissive | socmap/tacticalrmm | 61de15244c61edfb343314bd9e7d832b473df38e | 72d55a010b8a55583a955daf5546b21273e5a5f0 | refs/heads/master | 2023-03-17T23:50:37.565735 | 2021-03-05T23:05:17 | 2021-03-05T23:05:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 983 | py | from django.urls import path
from . import views
urlpatterns = [
path("checkrunner/", views.CheckRunner.as_view()),
path("<str:agentid>/checkrunner/", views.CheckRunner.as_view()),
path("<str:agentid>/checkinterval/", views.CheckRunnerInterval.as_view()),
path("<int:pk>/<str:agentid>/taskrunner/", views.TaskRunner.as_view()),
path("meshexe/", views.MeshExe.as_view()),
path("sysinfo/", views.SysInfo.as_view()),
path("newagent/", views.NewAgent.as_view()),
path("software/", views.Software.as_view()),
path("installer/", views.Installer.as_view()),
path("checkin/", views.CheckIn.as_view()),
path("syncmesh/", views.SyncMeshNodeID.as_view()),
path("choco/", views.Choco.as_view()),
path("winupdates/", views.WinUpdates.as_view()),
path("superseded/", views.SupersededWinUpdate.as_view()),
path("<int:pk>/chocoresult/", views.ChocoResult.as_view()),
path("<str:agentid>/recovery/", views.AgentRecovery.as_view()),
]
| [
"[email protected]"
] | |
32370305956bdaa9a3226650e42697ee227b1f90 | 9ac405635f3ac9332e02d0c7803df757417b7fee | /cotizaciones/migrations/0042_auto_20191019_0954.py | 236d57369a78000a98643a41cf309646161b8d74 | [] | no_license | odecsarrollo/07_intranet_proyectos | 80af5de8da5faeb40807dd7df3a4f55f432ff4c0 | 524aeebb140bda9b1bf7a09b60e54a02f56fec9f | refs/heads/master | 2023-01-08T04:59:57.617626 | 2020-09-25T18:01:09 | 2020-09-25T18:01:09 | 187,250,667 | 0 | 0 | null | 2022-12-30T09:36:37 | 2019-05-17T16:41:35 | JavaScript | UTF-8 | Python | false | false | 838 | py | # Generated by Django 2.2.6 on 2019-10-19 14:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cotizaciones', '0041_cotizacion_revisada'),
]
operations = [
migrations.AlterField(
model_name='cotizacion',
name='estado',
field=models.CharField(choices=[('Cita/Generación Interés', 'Cita/Generación Interés'), ('Configurando Propuesta', 'Configurando Propuesta'), ('Cotización Enviada', 'Cotización Enviada'), ('Evaluación Técnica y Económica', 'Evaluación Técnica y Económica'), ('Aceptación de Terminos y Condiciones', 'Aceptación de Terminos y Condiciones'), ('Cierre (Aprobado)', 'Cierre (Aprobado)'), ('Aplazado', 'Aplazado'), ('Cancelado', 'Cancelado')], max_length=200, null=True),
),
]
| [
"[email protected]"
] | |
017d19b97fd8f6aab8a08babe66bec2918da227a | 233928d206e13e068cf8cb5ff7888c9a2d84ad61 | /swea/D5/swea_1242_암호코드스캔.py | 36d01ac84b27da5410b011fd26f7544b5e741c33 | [] | no_license | Jinwoongma/Algorithm | 7f6daa2d3c2c361059c09fb4fe287b1cce4863e2 | 78803f4572f1416451a9f4f31f53b7d653f74d4a | refs/heads/master | 2022-10-07T22:53:20.333329 | 2020-06-07T13:27:47 | 2020-06-07T13:27:47 | 237,114,107 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,455 | py | hcode = {'0':'0000', '1':'0001', '2':'0010', '3':'0011',
'4':'0100', '5':'0101', '6':'0110', '7':'0111',
'8':'1000', '9':'1001', 'A':'1010', 'B':'1011',
'C':'1100', 'D':'1101', 'E':'1110', 'F':'1111'}
scode = {211:0, 221:1, 122:2, 411:3, 132:4, 231:5, 114:6, 312:7, 213:8, 112:9}
TC = int(input())
for tc in range(TC):
R, C = map(int, input().split())
data = [input() for _ in range(R)]
answer = 0
mat = [''] * R
for i in range(R):
for j in range(C):
mat[i] += hcode[data[i][j]]
for i in range(1, len(mat) - 6):
j = C * 4 - 1
while j > 56:
if mat[i][j] == '1' and mat[i - 1][j] == '0':
c = [0] * 8
for k in range(7, -1, -1):
c1, c2, c3 = 0, 0, 0
while mat[i][j] == '1': c3 += 1; j -= 1
while mat[i][j] == '0': c2 += 1; j -= 1
while mat[i][j] == '1': c1 += 1; j -= 1
while mat[i][j] == '0' and k: j -= 1
MIN = min(c1, c2, c3)
c1, c2, c3 = c1 // MIN, c2 // MIN, c3 // MIN
c[k] = scode[100 * c1 + 10 * c2 + c3]
t = 3 * (c[0] + c[2] + c[4] + c[6]) + c[1] + c[3] + c[5] + c[7]
if t % 10 == 0:
answer += sum(c)
j -= 1
print('#{} {}'.format(tc + 1, answer))
| [
"[email protected]"
] | |
7f18b56489ef36f4e2391878671a569f4252027d | 1ac9f756c5bab3ae8ae2df8daa596b6fc55b63d1 | /backend/accounts/views.py | c3104129fe20c9ad274477dc8f541600ce56fc03 | [] | no_license | woorud/facebook_clone | 6520adbf5e5aaeb3f517abe7920a0b90096e4f89 | a5b96f215c74e2960465cd2a96568e57db92043c | refs/heads/master | 2022-12-11T18:26:07.648768 | 2020-08-29T14:45:43 | 2020-08-29T14:45:43 | 277,793,569 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,907 | py | from django.shortcuts import render, redirect
from .models import *
from django.contrib.auth import authenticate, login
from django.contrib.auth import logout as django_logout
from .forms import SignupForm, LoginForm
from django.shortcuts import get_object_or_404
from django.contrib.auth import get_user_model
from django.http import HttpResponse
import json
def signup(request):
if request.method == 'POST':
form = SignupForm(request.POST, request.FILES)
if form.is_valid():
user = form.save()
return redirect('accounts:login')
else:
form = SignupForm()
return render(request, 'accounts/signup.html', {
'form':form,
})
def login_check(request):
if request.method == 'POST':
form = LoginForm(request.POST)
name = request.POST.get('username')
pwd = request.POST.get('password')
user = authenticate(username = name, password = pwd)
if user is not None:
login(request, user)
return redirect('/')
else:
form = LoginForm()
return render(request, 'accounts/login.html', {
'form':form
})
def logout(request):
django_logout(request)
return redirect('/')
def create_friend_request(request):
user_id = request.POST.get('pk', None)
user = request.user
target_user = get_object_or_404(get_user_model(), pk=user_id)
try:
user.friend_requests.create(from_user=user, to_user=target_user)
context = {'result': 'succes'}
except Exception as ex:
print('에러가 발생했습니다', ex) # ex는 발생한 에러의 이름을 받아오는 변수
context = {
'result': 'error',
}
return HttpResponse(json.dumps(context), content_type="application/json")
def accept_friend_request(request):
friend_request_id = request.POST.get('pk', None)
# 요청
friend_request = FriendRequest.objects.get(pk=friend_request_id)
# 커런트유저 가져오기
from_user = friend_request.from_user
# 타겟유저 가져오기
to_user = friend_request.to_user
try:
# 친구관계 생성
# room_name= "{},{}".format(from_user.username, to_user.username)
# 채팅방을 만들고
# room = Room.objects.create(room_name=room_name)
Friend.objects.create(user=from_user, current_user=to_user, room=room)
Friend.objects.create(user=to_user, current_user=from_user, room=room)
# 현재 만들어진 친구요청을 삭제
friend_request.delete()
context = {
'result': 'success',
}
except Exception as ex:
print('에러가 발생했습니다', ex)
context = {
'result': 'error',
}
return HttpResponse(json.dumps(context), content_type="application/json")
| [
"[email protected]"
] | |
f06a21f022b3d3742cee8df6c8048fcc34022202 | a51854991671a4389902945578288da34845f8d9 | /libs/UserInterface/TestPages/LampHolderTest.py | e9567659c28b0e4822d07ddbb3702556f7e9276b | [] | no_license | wuyou1102/DFM_B2 | 9210b4b8d47977c50d92ea77791f477fa77e5f83 | 69ace461b9b1b18a2269568110cb324c04ad4266 | refs/heads/master | 2020-04-13T18:54:20.045734 | 2019-06-17T12:46:23 | 2019-06-17T12:46:23 | 163,387,873 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,838 | py | # -*- encoding:UTF-8 -*-
import wx
import logging
import Base
from libs import Utility
from libs.Config import Font
from libs.Config import Color
from libs.Config import String
logger = logging.getLogger(__name__)
class LampHolder(Base.TestPage):
def __init__(self, parent, type):
Base.TestPage.__init__(self, parent=parent, type=type)
self.count = 0
def init_test_sizer(self):
sizer = wx.BoxSizer(wx.VERTICAL)
turn_on_button = wx.Button(self, wx.ID_ANY, u"开启LED", wx.DefaultPosition, (-1, 60), 0)
turn_on_button.SetFont(Font.NORMAL_20_BOLD)
turn_on_button.Bind(wx.EVT_BUTTON, self.on_button_click)
output = wx.TextCtrl(self, -1, "", style=wx.TE_MULTILINE | wx.TE_READONLY)
output.AppendText(u"请检查治具上的指示灯是否全亮\n")
output.AppendText(u"\n")
output.AppendText(u"判断条件:\n")
output.AppendText(u" 指示灯全亮 PASS\n")
output.AppendText(u" 其他情况 FAIL\n")
output.SetInsertionPointEnd()
output.SetBackgroundColour(Color.LightSkyBlue1)
output.SetFont(Font.DESC)
sizer.Add(turn_on_button, 0, wx.EXPAND | wx.ALL, 1)
sizer.Add(output, 1, wx.EXPAND | wx.ALL, 1)
return sizer
def before_test(self):
pass
def on_button_click(self, event):
comm = self.get_communicate()
if comm.unload_protocol_stack():
dlg = Utility.Alert.CountdownDialog(u"正在开启LED灯")
dlg.Countdown(3)
def start_test(self):
self.FormatPrint(info="Started")
def stop_test(self):
self.FormatPrint(info="Stop")
@staticmethod
def GetName():
return u"灯座测试"
@staticmethod
def GetFlag(t):
if t == "PCBA":
return String.LAMP_HOLDER_PCBA
| [
"[email protected]"
] | |
a22ffc16dfff771c3f037f2cf3410d17066bbd79 | 1f080333f1714ba88d4f41d6ce2676f0b299e05e | /.venv/bin/maf_extract_ranges_indexed.py | 011751629233c72c0d998a7fdd8de77cfa72ed42 | [] | no_license | venice-juanillas/EIB-hackathon | b66bf128144dcef893c91af84dc28ff48be08e1b | 6b73babff2b88dccbd5ec2e74bd5737ff0a4270b | refs/heads/master | 2022-11-17T23:52:24.365210 | 2018-04-05T01:56:17 | 2018-04-05T01:56:17 | 120,545,413 | 0 | 1 | null | 2022-10-25T18:54:52 | 2018-02-07T01:19:48 | Python | UTF-8 | Python | false | false | 4,702 | py | #!/home/galaxy/data/galaxy_17.09/.venv/bin/python2.7
"""
Reads a list of intervals and a maf. Produces a new maf containing the
blocks or parts of blocks in the original that overlapped the intervals.
It is assumed that each file `maf_fname` has a corresponding `maf_fname`.index
file.
NOTE: If two intervals overlap the same block it will be written twice. With
non-overlapping intervals and --chop this is never a problem.
NOTE: Intervals are origin-zero, half-open. For example, the interval 100,150
is 50 bases long, and there are 100 bases to its left in the sequence.
NOTE: Intervals are relative to the + strand, regardless of the strands in
the alignments.
WARNING: bz2/bz2t support and file cache support are new and not as well
tested.
usage: %prog maf_fname1 maf_fname2 ... [options] < interval_file
-m, --mincols=0: Minimum length (columns) required for alignment to be output
-c, --chop: Should blocks be chopped to only portion overlapping (no by default)
-s, --src=s: Use this src for all intervals
-p, --prefix=p: Prepend this to each src before lookup
-d, --dir=d: Write each interval as a separate file in this directory
-S, --strand: Strand is included as an additional column, and the blocks are reverse complemented (if necessary) so that they are always on that strand w/r/t the src species.
-C, --usecache: Use a cache that keeps blocks of the MAF files in memory (requires ~20MB per MAF)
"""
import psyco_full
from bx.cookbook import doc_optparse
import bx.align.maf
from bx import misc
import os
import sys
def main():
# Parse Command Line
options, args = doc_optparse.parse( __doc__ )
try:
maf_files = args
if options.mincols: mincols = int( options.mincols )
else: mincols = 0
if options.src: fixed_src = options.src
else: fixed_src = None
if options.prefix: prefix = options.prefix
else: prefix = None
if options.dir: dir = options.dir
else: dir = None
chop = bool( options.chop )
do_strand = bool( options.strand )
use_cache = bool( options.usecache )
except:
doc_optparse.exit()
# Open indexed access to mafs
index = bx.align.maf.MultiIndexed( maf_files, keep_open=True,
parse_e_rows=True,
use_cache=use_cache )
# Start MAF on stdout
if dir is None:
out = bx.align.maf.Writer( sys.stdout )
# Iterate over input ranges
for line in sys.stdin:
strand = None
fields = line.split()
if fixed_src:
src, start, end = fixed_src, int( fields[0] ), int( fields[1] )
if do_strand: strand = fields[2]
else:
src, start, end = fields[0], int( fields[1] ), int( fields[2] )
if do_strand: strand = fields[3]
if prefix: src = prefix + src
# Find overlap with reference component
blocks = index.get( src, start, end )
# Open file if needed
if dir:
out = bx.align.maf.Writer( open( os.path.join( dir, "%s:%09d-%09d.maf" % ( src, start, end ) ), 'w' ) )
# Write each intersecting block
if chop:
for block in blocks:
for ref in block.get_components_by_src( src ):
slice_start = max( start, ref.get_forward_strand_start() )
slice_end = min( end, ref.get_forward_strand_end() )
if (slice_end <= slice_start): continue
sliced = block.slice_by_component( ref, slice_start, slice_end )
# If the block is shorter than the minimum allowed size, stop
if mincols and ( sliced.text_size < mincols ):
continue
# If the reference component is empty, don't write the block
if sliced.get_component_by_src( src ).size < 1:
continue
# Keep only components that are not empty
sliced.components = [ c for c in sliced.components if c.size > 0 ]
# Reverse complement if needed
if ( strand != None ) and ( ref.strand != strand ):
sliced = sliced.reverse_complement()
# Write the block
out.write( sliced )
else:
for block in blocks:
out.write( block )
if dir:
out.close()
# Close output MAF
out.close()
index.close()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
dcda4ae98e5ceea8422c2a9d5b281462addc5b6e | 4047b91585245c3ee5ea6c50a620dadf74636bc3 | /phylobot/phylobot/admin.py | e38df56d65328e8a83b088332fe4a4404c4facb6 | [] | no_license | httang12/phylobot-django | fd371cc870f444cf94179d6a3cc6d23e9895186c | b535edfd1ee09dab02421ba22d96d48b3f611dad | refs/heads/master | 2020-04-15T12:53:13.349661 | 2018-02-15T08:46:08 | 2018-02-15T08:46:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 345 | py | from django.contrib import admin
admin.autodiscover()
from phylobot.models import *
from phylobot.models_aws import *
print "\n\n\n phylobot admin\n\n\n"
admin.site.register(UserProfile)
admin.site.register(AncestralLibrary)
admin.site.register(AWSConfiguration)
admin.site.register(ViewingPrefs)
admin.site.register(AncestralLibrarySourceJob)
| [
"[email protected]"
] | |
5dd9789f49b6bf5e26968ad8d2ac344ebc993ed3 | fcca7ebb332ae400b82f7d75d424ace30e35963c | /apps/elasticity/stegoton/plot_comparison.py | 6f3eaab6264e7dee56852f1672d4f2d87a7f8564 | [] | no_license | clawpack/sharpclaw | 5d2812149b28a09bfb626daf057fd27e4ab2f6a5 | 7c9782d932a449b92c875ff341a16bf00f0cc630 | refs/heads/master | 2021-01-04T14:06:42.001372 | 2013-11-28T15:19:26 | 2013-11-28T15:19:26 | 1,613,567 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,147 | py | from pyclaw.data import ClawPlotData
from pyclaw.plotting import plotframe
plotdata = ClawPlotData()
plotdata.outdir = '.'
# Figure:
plotfigure = plotdata.new_plotfigure(name='Solution', figno=1)
plotfigure.kwargs = {'figsize':[5,3]}
# Axes:
plotaxes = plotfigure.new_plotaxes(name='Strain')
#plotaxes.xlim = [73,79]
plotitem = plotaxes.new_plotitem(name='SharpClaw 3600', plot_type='1d')
plotitem.plot_var = 0 # q[2] is the stress
plotitem.plotstyle = 's'
plotitem.color = 'b' # could use 'r' or 'red' or '[1,0,0]'
plotitem.kwargs = {'linewidth':3,'markersize':10}
plotitem = plotaxes.new_plotitem(name='ClawPack 3600', plot_type='1d')
plotitem.outdir = '/users/ketch/research/claw42/fwave2/3600'
plotitem.plot_var = 0 # q[2] is the stress
plotitem.plotstyle = 'o'
plotitem.color = 'r'
plotitem.kwargs = {'linewidth':3,'markersize':10}
#plotitem = plotaxes.new_plotitem(name='ClawPack 28800', plot_type='1d')
#plotitem.outdir = '/users/ketch/research/claw42/fwave2/'
#plotitem.plot_var = 0 # q[2] is the stress
#plotitem.plotstyle = '-'
#plotitem.color = 'k'
#plotitem.kwargs = {'linewidth':3}
plotdata.plotframe(100)
| [
"[email protected]"
] | |
fead5b51476cb0ee7d01cbd4d92adfe47ece5082 | 32a6ac6cbec63296ba68838ad4699b995810c6cd | /compiled/construct/debug_enum_name.py | f557f7c82a5e810c80400f8ac4c1aa17e88d975e | [
"MIT"
] | permissive | smarek/ci_targets | a33696ddaa97daa77c0aecbdfb20c67546c729bc | c5edee7b0901fd8e7f75f85245ea4209b38e0cb3 | refs/heads/master | 2022-12-01T22:54:38.478115 | 2020-08-10T13:36:36 | 2020-08-19T07:12:14 | 286,483,420 | 0 | 0 | MIT | 2020-08-10T13:30:22 | 2020-08-10T13:30:21 | null | UTF-8 | Python | false | false | 914 | py | from construct import *
from construct.lib import *
def debug_enum_name__test_subtype__inner_enum1(subcon):
return Enum(subcon,
enum_value_67=67,
)
def debug_enum_name__test_subtype__inner_enum2(subcon):
return Enum(subcon,
enum_value_11=11,
)
debug_enum_name__test_subtype = Struct(
'field1' / debug_enum_name__test_subtype__inner_enum1(Int8ub),
'field2' / Int8ub,
'instance_field' / Computed(lambda this: KaitaiStream.resolve_enum(DebugEnumName.TestSubtype.InnerEnum2, (this.field2 & 15))),
)
def debug_enum_name__test_enum1(subcon):
return Enum(subcon,
enum_value_80=80,
)
def debug_enum_name__test_enum2(subcon):
return Enum(subcon,
enum_value_65=65,
)
debug_enum_name = Struct(
'one' / debug_enum_name__test_enum1(Int8ub),
'array_of_ints' / Array(1, debug_enum_name__test_enum2(Int8ub)),
'test_type' / LazyBound(lambda: debug_enum_name__test_subtype),
)
_schema = debug_enum_name
| [
"[email protected]"
] | |
39f1ab98e67afeba433bba71016769cf604ee099 | b7e6cdf094baaee9d6e5034c2355641fbf9138d7 | /824. 山羊拉丁文.py | fcd4e950c0e3c85ebf53f970708644e021c0f2ce | [] | no_license | heshibo1994/leetcode-python-2 | 04296c66cd6d1fe58880062aeafdbe9d474b7d2e | 3ea32f03bd453743b9b81de9871fad7ac67ced90 | refs/heads/master | 2020-05-23T21:49:01.367969 | 2019-09-30T03:31:27 | 2019-09-30T03:31:27 | 186,961,972 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,333 | py | # 给定一个由空格分割单词的句子 S。每个单词只包含大写或小写字母。
#
# 我们要将句子转换为 “Goat Latin”(一种类似于 猪拉丁文 - Pig Latin 的虚构语言)。
#
# 山羊拉丁文的规则如下:
#
# 如果单词以元音开头(a, e, i, o, u),在单词后添加"ma"。
# 例如,单词"apple"变为"applema"。
#
# 如果单词以辅音字母开头(即非元音字母),移除第一个字符并将它放到末尾,之后再添加"ma"。
# 例如,单词"goat"变为"oatgma"。
#
# 根据单词在句子中的索引,在单词最后添加与索引相同数量的字母'a',索引从1开始。
# 例如,在第一个单词后添加"a",在第二个单词后添加"aa",以此类推。
#
# 返回将 S 转换为山羊拉丁文后的句子。
# 输入: "I speak Goat Latin"
# 输出: "Imaa peaksmaaa oatGmaaaa atinLmaaaaa
class Solution:
def toGoatLatin(self, S):
s = S.split(" ")
print(s)
ans = ""
for i in range(len(s)):
if s[i][0] in "aeiouAEIOU":
temp = s[i]+"ma"+"a"*(i+1)
else:
temp = s[i][1:]+s[i][0]+"ma"+"a"*(i+1)
ans = ans+temp+" "
temp = ""
return ans
s=Solution()
print(s.toGoatLatin("I speak Goat Latin"))
| [
"[email protected]"
] | |
c74363ec7f3ffb330ff7eb6cc99754b2dfbc69e4 | 0e7be557833f38fef17b5eaa57c331a96148ad5e | /Assets/Python/StrategyOnly/Heroes.py | 895ee42ac8234f910d9e6bebc4e54df85577387d | [] | no_license | Thunderbrd/Caveman2Cosmos | 9f38961c638b82099b0601c22f8e90a1c98daa1e | b99aca8e56fb2a1fae48abd424dc0060a1d1fc1a | refs/heads/master | 2022-01-12T19:40:32.586456 | 2019-07-21T22:00:09 | 2019-07-21T22:00:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,886 | py | ## By StrategyOnly converted to BUG by Dancing Hoskuld
from CvPythonExtensions import *
import CvEventInterface
import CvUtil
import BugUtil
import PyHelpers
import Popup as PyPopup
import SdToolKit as SDTK
gc = CyGlobalContext()
localText = CyTranslator()
PyPlayer = PyHelpers.PyPlayer
PyInfo = PyHelpers.PyInfo
giSparticus = -1
giGladiator = -1
def init():
global giSparticus, giGladiator
giSparticus = gc.getInfoTypeForString('UNITCLASS_SPARTACUS')
giGladiator = CvUtil.findInfoTypeNum(gc.getUnitInfo,gc.getNumUnitInfos(),'UNIT_GLADIATOR')
def onUnitBuilt(self, argsList):
'Unit Completed'
city = argsList[0]
unit = argsList[1]
player = PyPlayer(city.getOwner())
CvAdvisorUtils.unitBuiltFeats(city, unit)
## Hero Movies ##
if not CyGame().isNetworkMultiPlayer() and city.getOwner() == CyGame().getActivePlayer() and isWorldUnitClass(unit.getUnitClassType()):
popupInfo = CyPopupInfo()
popupInfo.setButtonPopupType(ButtonPopupTypes.BUTTONPOPUP_PYTHON_SCREEN)
popupInfo.setData1(unit.getUnitType())
popupInfo.setData2(city.getID())
popupInfo.setData3(4)
popupInfo.setText(u"showWonderMovie")
popupInfo.addPopup(city.getOwner())
## Hero Movies ##
def onCombatResult(argsList):
'Combat Result'
pWinner,pLoser = argsList
playerX = PyPlayer(pWinner.getOwner())
unitX = PyInfo.UnitInfo(pWinner.getUnitType())
playerY = PyPlayer(pLoser.getOwner())
unitY = PyInfo.UnitInfo(pLoser.getUnitType())
pPlayer = gc.getPlayer(pWinner.getOwner())
## BTS HEROS - Spartacus Capture Event Start ##
if pWinner.getUnitClassType() == giSparticus:
## Capture % Random # 0 to 3 or 25% ##
iNewGladiatorNumber = getRandomNumber( 3 )
if iNewGladiatorNumber == 0:
pClearPlot = findClearPlot(pLoser)
if (pLoser.plot().getNumUnits() == 1 and pClearPlot != -1):
pPlot = pLoser.plot()
pLoser.setXY(pClearPlot.getX(), pClearPlot.getY(), False, True, True)
else:
pPlot = pWinner.plot()
pPID = pPlayer.getID()
newUnit = pPlayer.initUnit(giGladiator, pPlot.getX(), pPlot.getY(), UnitAITypes.NO_UNITAI, DirectionTypes.DIRECTION_NORTH)
pLoser.setDamage(100000, False)
## newUnit.convert(pLoser)
## pLoser.setDamage(100, False)
newUnit.finishMoves()
iXa = pLoser.getX()
iYa = pLoser.getY()
CyInterface().addMessage(pPID,False,15,CyTranslator().getText("TXT_KEY_SPARTACUS_CAPTURE_SUCCESS",()),'',0,',Art/Interface/Buttons/Units/ICBM.dds,Art/Interface/Buttons/Warlords_Atlas_1.dds,3,11',ColorTypes(44), iXa, iYa, True,True)
## BTS HEROS - Spartacus Capture End ##
## Field Medic Start ##
if pWinner.isHasPromotion(gc.getInfoTypeForString('PROMOTION_RETINUE_MESSENGER')):
iHealChance = getRandomNumber( 9 )
if iHealChance == 0:
if ( not SDTK.sdObjectExists('Heroes', pWinner) ) :
iHealTurn = -1
else :
iHealTurn = SDTK.sdObjectGetVal( 'Heroes', pWinner, 'HealTurn' )
if( iHealTurn == None or gc.getGame().getGameTurn() > iHealTurn ) :
pWinner.setDamage(0, False)
if ( not SDTK.sdObjectExists('Heroes', pWinner) ) :
SDTK.sdObjectInit('Heroes', pWinner, {})
SDTK.sdObjectSetVal( 'Heroes', pWinner, 'HealTurn', gc.getGame().getGameTurn() )
## Field Medic End ##
def findClearPlot(pUnit):
BestPlot = -1
iBestPlot = 0
pOldPlot = pUnit.plot()
iX = pOldPlot.getX()
iY = pOldPlot.getY()
for iiX in range(iX-1, iX+2, 1):
for iiY in range(iY-1, iY+2, 1):
iCurrentPlot = 0
pPlot = CyMap().plot(iiX,iiY)
if pPlot.getNumUnits() == 0:
iCurrentPlot = iCurrentPlot + 5
if iCurrentPlot >= 1:
iCurrentPlot = iCurrentPlot + CyGame().getSorenRandNum(5, "findClearPlot")
if iCurrentPlot >= iBestPlot:
BestPlot = pPlot
iBestPlot = iCurrentPlot
return BestPlot
def getRandomNumber(int):
return CyGame().getSorenRandNum(int, "Gods")
| [
"raxo2222@8bbd16b5-4c62-4656-ae41-5efa6c748c97"
] | raxo2222@8bbd16b5-4c62-4656-ae41-5efa6c748c97 |
f7e3e4d0eb43e1b66081962b0ee6cdd9d6a3694b | 39c80306080defbde999f1af05ae5993f22d7fd7 | /oxford_astrazeneca/tests/q_calc_efficiency.py | 07076b1e2e93744fc876d8399910625d47330256 | [] | no_license | uob-cfd/spe | 47931d724792fbe812de49ac489a7e88cca52e1d | f7c76b766bffec71b80febd0dbc79e12aec3a11c | refs/heads/master | 2023-02-04T20:45:44.411481 | 2020-12-27T19:03:52 | 2020-12-27T19:03:52 | 321,508,088 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,259 | py | test = {
'name': 'Question calc_efficiency',
'points': 1,
'suites': [
{
'cases': [
{
'code': r"""
>>> # You need to define the function 'calc_efficiency'
>>> 'calc_efficiency' in vars()
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> # calc_efficiency should be a function.
>>> callable(calc_efficiency)
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> # Oops, have you deleted 'ox_vax'?
>>> 'ox_vax' in vars()
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> # Oops, have you deleted 'vax_eff'?
>>> 'vax_eff' in vars()
True
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> calc_efficiency(ox_vax) == vax_eff
True
""",
'hidden': False,
'locked': False
},
],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'
}
]
}
| [
"[email protected]"
] | |
fbbbc52eab2329bda34461328893fba1754e20a0 | 70934fe6d0feed93994a98650a543832897e69ae | /sinaweibo.bak/weibo_factory.py | 95cfe4e1548a4d14a120107301c0a8c022bd7623 | [] | no_license | zy-sunshine/sunblackshineblog | d1d3f0f69d6b8c006e70a0601bc0d520ec5002bb | ea7d1e2f8477a238501cecf8e63fd3d7a2a945c9 | refs/heads/master | 2021-01-01T16:19:52.954701 | 2011-10-29T17:12:59 | 2011-10-29T17:12:59 | 2,570,586 | 1 | 0 | null | null | null | null | GB18030 | Python | false | false | 1,878 | py | #encoding=utf8
#file:weibo_factory.py
#@author:carlos
#@date:2011-2-13
#@link:tieniuzai.com
from weibopy.auth import BasicAuthHandler
from weibopy.api import API
class SinaWeibo:
def __init__(self,username,password):
self.username = username
self.password = password
self.source ="app key" #在申请新浪微博开发者帐号并创建应用后获得
def getAtt(self, key):
try:
return self.obj.__getattribute__(key)
except Exception, e:
print e
return ''
def getAttValue(self, obj, key):
try:
return obj.__getattribute__(key)
except Exception, e:
print e
return ''
def basicAuth(self):
source = self.source
self.auth = BasicAuthHandler(self.username, self.password)
self.api = API(self.auth,source=source)
def parse_timeline(self,timeline):
result = []
for line in timeline:
self.obj = line
item ={}
user = self.getAtt("user")
item['mid'] = self.getAtt("id")
item['text'] = self.getAtt("text")
item['pic'] = self.getAtt("thumbnail_pic")
item['author_name']= user.screen_name
item['author_id'] = user.id
item['author_domain'] = user.domain
item['author_profile_image']= user.profile_image_url
item['created_at'] = self.getAtt('created_at')
item['source'] = self.getAtt('source')
item['retweeted_status'] = self.getAtt('retweeted_status')
result.append(item)
return result
def get_myself(self):
myself = self.api.get_user(id=1649938837)
#myself = self.api.get_user(user_id=self.auth.get_username)
self.obj = myself
user={}
user['profile_image_url'] = self.getAtt('profile_image_url')
user['name']=self.getAtt("screen_name")
user['description']=self.getAtt("description")
use = self.auth.get_username()
return user
def user_timeline(self):
timeline = self.api.user_timeline(count=10, page=1)
result = self.parse_timeline(timeline)
return result
| [
"[email protected]"
] | |
71cf86c96f1ba847636eea61053d0a0c4d035bb1 | 5e6d8b9989247801718dd1f10009f0f7f54c1eb4 | /sdk/python/pulumi_azure_native/desktopvirtualization/v20201102preview/host_pool.py | c86dab27732d65d459e1b272f33ec2f1fec05f50 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | vivimouret29/pulumi-azure-native | d238a8f91688c9bf09d745a7280b9bf2dd6d44e0 | 1cbd988bcb2aa75a83e220cb5abeb805d6484fce | refs/heads/master | 2023-08-26T05:50:40.560691 | 2021-10-21T09:25:07 | 2021-10-21T09:25:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 36,255 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['HostPoolArgs', 'HostPool']
@pulumi.input_type
class HostPoolArgs:
def __init__(__self__, *,
host_pool_type: pulumi.Input[Union[str, 'HostPoolType']],
load_balancer_type: pulumi.Input[Union[str, 'LoadBalancerType']],
preferred_app_group_type: pulumi.Input[Union[str, 'PreferredAppGroupType']],
resource_group_name: pulumi.Input[str],
custom_rdp_property: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
friendly_name: Optional[pulumi.Input[str]] = None,
host_pool_name: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
max_session_limit: Optional[pulumi.Input[int]] = None,
personal_desktop_assignment_type: Optional[pulumi.Input[Union[str, 'PersonalDesktopAssignmentType']]] = None,
registration_info: Optional[pulumi.Input['RegistrationInfoArgs']] = None,
ring: Optional[pulumi.Input[int]] = None,
sso_client_id: Optional[pulumi.Input[str]] = None,
sso_client_secret_key_vault_path: Optional[pulumi.Input[str]] = None,
sso_context: Optional[pulumi.Input[str]] = None,
sso_secret_type: Optional[pulumi.Input[Union[str, 'SSOSecretType']]] = None,
ssoadfs_authority: Optional[pulumi.Input[str]] = None,
start_vm_on_connect: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
validation_environment: Optional[pulumi.Input[bool]] = None,
vm_template: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a HostPool resource.
:param pulumi.Input[Union[str, 'HostPoolType']] host_pool_type: HostPool type for desktop.
:param pulumi.Input[Union[str, 'LoadBalancerType']] load_balancer_type: The type of the load balancer.
:param pulumi.Input[Union[str, 'PreferredAppGroupType']] preferred_app_group_type: The type of preferred application group type, default to Desktop Application Group
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[str] custom_rdp_property: Custom rdp property of HostPool.
:param pulumi.Input[str] description: Description of HostPool.
:param pulumi.Input[str] friendly_name: Friendly name of HostPool.
:param pulumi.Input[str] host_pool_name: The name of the host pool within the specified resource group
:param pulumi.Input[str] location: The geo-location where the resource lives
:param pulumi.Input[int] max_session_limit: The max session limit of HostPool.
:param pulumi.Input[Union[str, 'PersonalDesktopAssignmentType']] personal_desktop_assignment_type: PersonalDesktopAssignment type for HostPool.
:param pulumi.Input['RegistrationInfoArgs'] registration_info: The registration info of HostPool.
:param pulumi.Input[int] ring: The ring number of HostPool.
:param pulumi.Input[str] sso_client_id: ClientId for the registered Relying Party used to issue WVD SSO certificates.
:param pulumi.Input[str] sso_client_secret_key_vault_path: Path to Azure KeyVault storing the secret used for communication to ADFS.
:param pulumi.Input[str] sso_context: Path to keyvault containing ssoContext secret.
:param pulumi.Input[Union[str, 'SSOSecretType']] sso_secret_type: The type of single sign on Secret Type.
:param pulumi.Input[str] ssoadfs_authority: URL to customer ADFS server for signing WVD SSO certificates.
:param pulumi.Input[bool] start_vm_on_connect: The flag to turn on/off StartVMOnConnect feature.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input[bool] validation_environment: Is validation environment.
:param pulumi.Input[str] vm_template: VM template for sessionhosts configuration within hostpool.
"""
pulumi.set(__self__, "host_pool_type", host_pool_type)
pulumi.set(__self__, "load_balancer_type", load_balancer_type)
pulumi.set(__self__, "preferred_app_group_type", preferred_app_group_type)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if custom_rdp_property is not None:
pulumi.set(__self__, "custom_rdp_property", custom_rdp_property)
if description is not None:
pulumi.set(__self__, "description", description)
if friendly_name is not None:
pulumi.set(__self__, "friendly_name", friendly_name)
if host_pool_name is not None:
pulumi.set(__self__, "host_pool_name", host_pool_name)
if location is not None:
pulumi.set(__self__, "location", location)
if max_session_limit is not None:
pulumi.set(__self__, "max_session_limit", max_session_limit)
if personal_desktop_assignment_type is not None:
pulumi.set(__self__, "personal_desktop_assignment_type", personal_desktop_assignment_type)
if registration_info is not None:
pulumi.set(__self__, "registration_info", registration_info)
if ring is not None:
pulumi.set(__self__, "ring", ring)
if sso_client_id is not None:
pulumi.set(__self__, "sso_client_id", sso_client_id)
if sso_client_secret_key_vault_path is not None:
pulumi.set(__self__, "sso_client_secret_key_vault_path", sso_client_secret_key_vault_path)
if sso_context is not None:
pulumi.set(__self__, "sso_context", sso_context)
if sso_secret_type is not None:
pulumi.set(__self__, "sso_secret_type", sso_secret_type)
if ssoadfs_authority is not None:
pulumi.set(__self__, "ssoadfs_authority", ssoadfs_authority)
if start_vm_on_connect is not None:
pulumi.set(__self__, "start_vm_on_connect", start_vm_on_connect)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if validation_environment is not None:
pulumi.set(__self__, "validation_environment", validation_environment)
if vm_template is not None:
pulumi.set(__self__, "vm_template", vm_template)
@property
@pulumi.getter(name="hostPoolType")
def host_pool_type(self) -> pulumi.Input[Union[str, 'HostPoolType']]:
"""
HostPool type for desktop.
"""
return pulumi.get(self, "host_pool_type")
@host_pool_type.setter
def host_pool_type(self, value: pulumi.Input[Union[str, 'HostPoolType']]):
pulumi.set(self, "host_pool_type", value)
@property
@pulumi.getter(name="loadBalancerType")
def load_balancer_type(self) -> pulumi.Input[Union[str, 'LoadBalancerType']]:
"""
The type of the load balancer.
"""
return pulumi.get(self, "load_balancer_type")
@load_balancer_type.setter
def load_balancer_type(self, value: pulumi.Input[Union[str, 'LoadBalancerType']]):
pulumi.set(self, "load_balancer_type", value)
@property
@pulumi.getter(name="preferredAppGroupType")
def preferred_app_group_type(self) -> pulumi.Input[Union[str, 'PreferredAppGroupType']]:
"""
The type of preferred application group type, default to Desktop Application Group
"""
return pulumi.get(self, "preferred_app_group_type")
@preferred_app_group_type.setter
def preferred_app_group_type(self, value: pulumi.Input[Union[str, 'PreferredAppGroupType']]):
pulumi.set(self, "preferred_app_group_type", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group. The name is case insensitive.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="customRdpProperty")
def custom_rdp_property(self) -> Optional[pulumi.Input[str]]:
"""
Custom rdp property of HostPool.
"""
return pulumi.get(self, "custom_rdp_property")
@custom_rdp_property.setter
def custom_rdp_property(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_rdp_property", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Description of HostPool.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="friendlyName")
def friendly_name(self) -> Optional[pulumi.Input[str]]:
"""
Friendly name of HostPool.
"""
return pulumi.get(self, "friendly_name")
@friendly_name.setter
def friendly_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "friendly_name", value)
@property
@pulumi.getter(name="hostPoolName")
def host_pool_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the host pool within the specified resource group
"""
return pulumi.get(self, "host_pool_name")
@host_pool_name.setter
def host_pool_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host_pool_name", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The geo-location where the resource lives
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="maxSessionLimit")
def max_session_limit(self) -> Optional[pulumi.Input[int]]:
"""
The max session limit of HostPool.
"""
return pulumi.get(self, "max_session_limit")
@max_session_limit.setter
def max_session_limit(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_session_limit", value)
@property
@pulumi.getter(name="personalDesktopAssignmentType")
def personal_desktop_assignment_type(self) -> Optional[pulumi.Input[Union[str, 'PersonalDesktopAssignmentType']]]:
"""
PersonalDesktopAssignment type for HostPool.
"""
return pulumi.get(self, "personal_desktop_assignment_type")
@personal_desktop_assignment_type.setter
def personal_desktop_assignment_type(self, value: Optional[pulumi.Input[Union[str, 'PersonalDesktopAssignmentType']]]):
pulumi.set(self, "personal_desktop_assignment_type", value)
@property
@pulumi.getter(name="registrationInfo")
def registration_info(self) -> Optional[pulumi.Input['RegistrationInfoArgs']]:
"""
The registration info of HostPool.
"""
return pulumi.get(self, "registration_info")
@registration_info.setter
def registration_info(self, value: Optional[pulumi.Input['RegistrationInfoArgs']]):
pulumi.set(self, "registration_info", value)
@property
@pulumi.getter
def ring(self) -> Optional[pulumi.Input[int]]:
"""
The ring number of HostPool.
"""
return pulumi.get(self, "ring")
@ring.setter
def ring(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ring", value)
@property
@pulumi.getter(name="ssoClientId")
def sso_client_id(self) -> Optional[pulumi.Input[str]]:
"""
ClientId for the registered Relying Party used to issue WVD SSO certificates.
"""
return pulumi.get(self, "sso_client_id")
@sso_client_id.setter
def sso_client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sso_client_id", value)
@property
@pulumi.getter(name="ssoClientSecretKeyVaultPath")
def sso_client_secret_key_vault_path(self) -> Optional[pulumi.Input[str]]:
"""
Path to Azure KeyVault storing the secret used for communication to ADFS.
"""
return pulumi.get(self, "sso_client_secret_key_vault_path")
@sso_client_secret_key_vault_path.setter
def sso_client_secret_key_vault_path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sso_client_secret_key_vault_path", value)
@property
@pulumi.getter(name="ssoContext")
def sso_context(self) -> Optional[pulumi.Input[str]]:
"""
Path to keyvault containing ssoContext secret.
"""
return pulumi.get(self, "sso_context")
@sso_context.setter
def sso_context(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sso_context", value)
@property
@pulumi.getter(name="ssoSecretType")
def sso_secret_type(self) -> Optional[pulumi.Input[Union[str, 'SSOSecretType']]]:
"""
The type of single sign on Secret Type.
"""
return pulumi.get(self, "sso_secret_type")
@sso_secret_type.setter
def sso_secret_type(self, value: Optional[pulumi.Input[Union[str, 'SSOSecretType']]]):
pulumi.set(self, "sso_secret_type", value)
@property
@pulumi.getter(name="ssoadfsAuthority")
def ssoadfs_authority(self) -> Optional[pulumi.Input[str]]:
"""
URL to customer ADFS server for signing WVD SSO certificates.
"""
return pulumi.get(self, "ssoadfs_authority")
@ssoadfs_authority.setter
def ssoadfs_authority(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ssoadfs_authority", value)
@property
@pulumi.getter(name="startVMOnConnect")
def start_vm_on_connect(self) -> Optional[pulumi.Input[bool]]:
"""
The flag to turn on/off StartVMOnConnect feature.
"""
return pulumi.get(self, "start_vm_on_connect")
@start_vm_on_connect.setter
def start_vm_on_connect(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "start_vm_on_connect", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="validationEnvironment")
def validation_environment(self) -> Optional[pulumi.Input[bool]]:
"""
Is validation environment.
"""
return pulumi.get(self, "validation_environment")
@validation_environment.setter
def validation_environment(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "validation_environment", value)
@property
@pulumi.getter(name="vmTemplate")
def vm_template(self) -> Optional[pulumi.Input[str]]:
"""
VM template for sessionhosts configuration within hostpool.
"""
return pulumi.get(self, "vm_template")
@vm_template.setter
def vm_template(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "vm_template", value)
class HostPool(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
custom_rdp_property: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
friendly_name: Optional[pulumi.Input[str]] = None,
host_pool_name: Optional[pulumi.Input[str]] = None,
host_pool_type: Optional[pulumi.Input[Union[str, 'HostPoolType']]] = None,
load_balancer_type: Optional[pulumi.Input[Union[str, 'LoadBalancerType']]] = None,
location: Optional[pulumi.Input[str]] = None,
max_session_limit: Optional[pulumi.Input[int]] = None,
personal_desktop_assignment_type: Optional[pulumi.Input[Union[str, 'PersonalDesktopAssignmentType']]] = None,
preferred_app_group_type: Optional[pulumi.Input[Union[str, 'PreferredAppGroupType']]] = None,
registration_info: Optional[pulumi.Input[pulumi.InputType['RegistrationInfoArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
ring: Optional[pulumi.Input[int]] = None,
sso_client_id: Optional[pulumi.Input[str]] = None,
sso_client_secret_key_vault_path: Optional[pulumi.Input[str]] = None,
sso_context: Optional[pulumi.Input[str]] = None,
sso_secret_type: Optional[pulumi.Input[Union[str, 'SSOSecretType']]] = None,
ssoadfs_authority: Optional[pulumi.Input[str]] = None,
start_vm_on_connect: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
validation_environment: Optional[pulumi.Input[bool]] = None,
vm_template: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Represents a HostPool definition.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] custom_rdp_property: Custom rdp property of HostPool.
:param pulumi.Input[str] description: Description of HostPool.
:param pulumi.Input[str] friendly_name: Friendly name of HostPool.
:param pulumi.Input[str] host_pool_name: The name of the host pool within the specified resource group
:param pulumi.Input[Union[str, 'HostPoolType']] host_pool_type: HostPool type for desktop.
:param pulumi.Input[Union[str, 'LoadBalancerType']] load_balancer_type: The type of the load balancer.
:param pulumi.Input[str] location: The geo-location where the resource lives
:param pulumi.Input[int] max_session_limit: The max session limit of HostPool.
:param pulumi.Input[Union[str, 'PersonalDesktopAssignmentType']] personal_desktop_assignment_type: PersonalDesktopAssignment type for HostPool.
:param pulumi.Input[Union[str, 'PreferredAppGroupType']] preferred_app_group_type: The type of preferred application group type, default to Desktop Application Group
:param pulumi.Input[pulumi.InputType['RegistrationInfoArgs']] registration_info: The registration info of HostPool.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[int] ring: The ring number of HostPool.
:param pulumi.Input[str] sso_client_id: ClientId for the registered Relying Party used to issue WVD SSO certificates.
:param pulumi.Input[str] sso_client_secret_key_vault_path: Path to Azure KeyVault storing the secret used for communication to ADFS.
:param pulumi.Input[str] sso_context: Path to keyvault containing ssoContext secret.
:param pulumi.Input[Union[str, 'SSOSecretType']] sso_secret_type: The type of single sign on Secret Type.
:param pulumi.Input[str] ssoadfs_authority: URL to customer ADFS server for signing WVD SSO certificates.
:param pulumi.Input[bool] start_vm_on_connect: The flag to turn on/off StartVMOnConnect feature.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input[bool] validation_environment: Is validation environment.
:param pulumi.Input[str] vm_template: VM template for sessionhosts configuration within hostpool.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: HostPoolArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Represents a HostPool definition.
:param str resource_name: The name of the resource.
:param HostPoolArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(HostPoolArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
custom_rdp_property: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
friendly_name: Optional[pulumi.Input[str]] = None,
host_pool_name: Optional[pulumi.Input[str]] = None,
host_pool_type: Optional[pulumi.Input[Union[str, 'HostPoolType']]] = None,
load_balancer_type: Optional[pulumi.Input[Union[str, 'LoadBalancerType']]] = None,
location: Optional[pulumi.Input[str]] = None,
max_session_limit: Optional[pulumi.Input[int]] = None,
personal_desktop_assignment_type: Optional[pulumi.Input[Union[str, 'PersonalDesktopAssignmentType']]] = None,
preferred_app_group_type: Optional[pulumi.Input[Union[str, 'PreferredAppGroupType']]] = None,
registration_info: Optional[pulumi.Input[pulumi.InputType['RegistrationInfoArgs']]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
ring: Optional[pulumi.Input[int]] = None,
sso_client_id: Optional[pulumi.Input[str]] = None,
sso_client_secret_key_vault_path: Optional[pulumi.Input[str]] = None,
sso_context: Optional[pulumi.Input[str]] = None,
sso_secret_type: Optional[pulumi.Input[Union[str, 'SSOSecretType']]] = None,
ssoadfs_authority: Optional[pulumi.Input[str]] = None,
start_vm_on_connect: Optional[pulumi.Input[bool]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
validation_environment: Optional[pulumi.Input[bool]] = None,
vm_template: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = HostPoolArgs.__new__(HostPoolArgs)
__props__.__dict__["custom_rdp_property"] = custom_rdp_property
__props__.__dict__["description"] = description
__props__.__dict__["friendly_name"] = friendly_name
__props__.__dict__["host_pool_name"] = host_pool_name
if host_pool_type is None and not opts.urn:
raise TypeError("Missing required property 'host_pool_type'")
__props__.__dict__["host_pool_type"] = host_pool_type
if load_balancer_type is None and not opts.urn:
raise TypeError("Missing required property 'load_balancer_type'")
__props__.__dict__["load_balancer_type"] = load_balancer_type
__props__.__dict__["location"] = location
__props__.__dict__["max_session_limit"] = max_session_limit
__props__.__dict__["personal_desktop_assignment_type"] = personal_desktop_assignment_type
if preferred_app_group_type is None and not opts.urn:
raise TypeError("Missing required property 'preferred_app_group_type'")
__props__.__dict__["preferred_app_group_type"] = preferred_app_group_type
__props__.__dict__["registration_info"] = registration_info
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["ring"] = ring
__props__.__dict__["sso_client_id"] = sso_client_id
__props__.__dict__["sso_client_secret_key_vault_path"] = sso_client_secret_key_vault_path
__props__.__dict__["sso_context"] = sso_context
__props__.__dict__["sso_secret_type"] = sso_secret_type
__props__.__dict__["ssoadfs_authority"] = ssoadfs_authority
__props__.__dict__["start_vm_on_connect"] = start_vm_on_connect
__props__.__dict__["tags"] = tags
__props__.__dict__["validation_environment"] = validation_environment
__props__.__dict__["vm_template"] = vm_template
__props__.__dict__["application_group_references"] = None
__props__.__dict__["name"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20201102preview:HostPool"), pulumi.Alias(type_="azure-native:desktopvirtualization:HostPool"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization:HostPool"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20190123preview:HostPool"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20190123preview:HostPool"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20190924preview:HostPool"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20190924preview:HostPool"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20191210preview:HostPool"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20191210preview:HostPool"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20200921preview:HostPool"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20200921preview:HostPool"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20201019preview:HostPool"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20201019preview:HostPool"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20201110preview:HostPool"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20201110preview:HostPool"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20210114preview:HostPool"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20210114preview:HostPool"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20210201preview:HostPool"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20210201preview:HostPool"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20210309preview:HostPool"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20210309preview:HostPool"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20210401preview:HostPool"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20210401preview:HostPool"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20210712:HostPool"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20210712:HostPool"), pulumi.Alias(type_="azure-native:desktopvirtualization/v20210903preview:HostPool"), pulumi.Alias(type_="azure-nextgen:desktopvirtualization/v20210903preview:HostPool")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(HostPool, __self__).__init__(
'azure-native:desktopvirtualization/v20201102preview:HostPool',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'HostPool':
"""
Get an existing HostPool resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = HostPoolArgs.__new__(HostPoolArgs)
__props__.__dict__["application_group_references"] = None
__props__.__dict__["custom_rdp_property"] = None
__props__.__dict__["description"] = None
__props__.__dict__["friendly_name"] = None
__props__.__dict__["host_pool_type"] = None
__props__.__dict__["load_balancer_type"] = None
__props__.__dict__["location"] = None
__props__.__dict__["max_session_limit"] = None
__props__.__dict__["name"] = None
__props__.__dict__["personal_desktop_assignment_type"] = None
__props__.__dict__["preferred_app_group_type"] = None
__props__.__dict__["registration_info"] = None
__props__.__dict__["ring"] = None
__props__.__dict__["sso_client_id"] = None
__props__.__dict__["sso_client_secret_key_vault_path"] = None
__props__.__dict__["sso_context"] = None
__props__.__dict__["sso_secret_type"] = None
__props__.__dict__["ssoadfs_authority"] = None
__props__.__dict__["start_vm_on_connect"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["type"] = None
__props__.__dict__["validation_environment"] = None
__props__.__dict__["vm_template"] = None
return HostPool(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="applicationGroupReferences")
def application_group_references(self) -> pulumi.Output[Sequence[str]]:
"""
List of applicationGroup links.
"""
return pulumi.get(self, "application_group_references")
@property
@pulumi.getter(name="customRdpProperty")
def custom_rdp_property(self) -> pulumi.Output[Optional[str]]:
"""
Custom rdp property of HostPool.
"""
return pulumi.get(self, "custom_rdp_property")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Description of HostPool.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="friendlyName")
def friendly_name(self) -> pulumi.Output[Optional[str]]:
"""
Friendly name of HostPool.
"""
return pulumi.get(self, "friendly_name")
@property
@pulumi.getter(name="hostPoolType")
def host_pool_type(self) -> pulumi.Output[str]:
"""
HostPool type for desktop.
"""
return pulumi.get(self, "host_pool_type")
@property
@pulumi.getter(name="loadBalancerType")
def load_balancer_type(self) -> pulumi.Output[str]:
"""
The type of the load balancer.
"""
return pulumi.get(self, "load_balancer_type")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
The geo-location where the resource lives
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="maxSessionLimit")
def max_session_limit(self) -> pulumi.Output[Optional[int]]:
"""
The max session limit of HostPool.
"""
return pulumi.get(self, "max_session_limit")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="personalDesktopAssignmentType")
def personal_desktop_assignment_type(self) -> pulumi.Output[Optional[str]]:
"""
PersonalDesktopAssignment type for HostPool.
"""
return pulumi.get(self, "personal_desktop_assignment_type")
@property
@pulumi.getter(name="preferredAppGroupType")
def preferred_app_group_type(self) -> pulumi.Output[str]:
"""
The type of preferred application group type, default to Desktop Application Group
"""
return pulumi.get(self, "preferred_app_group_type")
@property
@pulumi.getter(name="registrationInfo")
def registration_info(self) -> pulumi.Output[Optional['outputs.RegistrationInfoResponse']]:
"""
The registration info of HostPool.
"""
return pulumi.get(self, "registration_info")
@property
@pulumi.getter
def ring(self) -> pulumi.Output[Optional[int]]:
"""
The ring number of HostPool.
"""
return pulumi.get(self, "ring")
@property
@pulumi.getter(name="ssoClientId")
def sso_client_id(self) -> pulumi.Output[Optional[str]]:
"""
ClientId for the registered Relying Party used to issue WVD SSO certificates.
"""
return pulumi.get(self, "sso_client_id")
@property
@pulumi.getter(name="ssoClientSecretKeyVaultPath")
def sso_client_secret_key_vault_path(self) -> pulumi.Output[Optional[str]]:
"""
Path to Azure KeyVault storing the secret used for communication to ADFS.
"""
return pulumi.get(self, "sso_client_secret_key_vault_path")
@property
@pulumi.getter(name="ssoContext")
def sso_context(self) -> pulumi.Output[Optional[str]]:
"""
Path to keyvault containing ssoContext secret.
"""
return pulumi.get(self, "sso_context")
@property
@pulumi.getter(name="ssoSecretType")
def sso_secret_type(self) -> pulumi.Output[Optional[str]]:
"""
The type of single sign on Secret Type.
"""
return pulumi.get(self, "sso_secret_type")
@property
@pulumi.getter(name="ssoadfsAuthority")
def ssoadfs_authority(self) -> pulumi.Output[Optional[str]]:
"""
URL to customer ADFS server for signing WVD SSO certificates.
"""
return pulumi.get(self, "ssoadfs_authority")
@property
@pulumi.getter(name="startVMOnConnect")
def start_vm_on_connect(self) -> pulumi.Output[Optional[bool]]:
"""
The flag to turn on/off StartVMOnConnect feature.
"""
return pulumi.get(self, "start_vm_on_connect")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="validationEnvironment")
def validation_environment(self) -> pulumi.Output[Optional[bool]]:
"""
Is validation environment.
"""
return pulumi.get(self, "validation_environment")
@property
@pulumi.getter(name="vmTemplate")
def vm_template(self) -> pulumi.Output[Optional[str]]:
"""
VM template for sessionhosts configuration within hostpool.
"""
return pulumi.get(self, "vm_template")
| [
"[email protected]"
] | |
1b167d908b1dbbae705befe87ecfb250c84f4b2a | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_14606.py | 992a0ac036c6de7ae67a625b61486cbe6bb0fc17 | [] | no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 73 | py | # keep a Python script running even when the user tries to exit out
.pyw
| [
"[email protected]"
] | |
918bf5948ed9490633028cdeed9ea000c19a2374 | 560af8e32aa77bfb6c5d837e93d0dc2dd7c9142c | /client_project/wsgi.py | fec9bea087df286ce54366f90bbc4c61b4f9094f | [] | no_license | anirudhasj441/Fake-Api | c7a4aef6bf9eadc16709fe10f4cd3b526664cd4e | 86b6e496cbecf314ef6e6366a84b8f93ce7c775b | refs/heads/master | 2023-06-26T00:00:00.914657 | 2021-07-29T06:35:39 | 2021-07-29T06:35:39 | 390,625,402 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | """
WSGI config for client_project project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'client_project.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
e4a88b54323db57cc2d1b09b8b6560d91291a6cd | 0bfb55b41282803db96b90e7bba73d86be7e8553 | /administration/migrations/0011_auto_20161109_1845.py | fad5aaa70cdd3596ffe76dba25b75bc51d8583f1 | [
"MIT"
] | permissive | OpenFurry/honeycomb | eebf2272f8ae95eb686ad129555dbebcf1adcd63 | c34eeaf22048948fedcae860db7c25d41b51ff48 | refs/heads/master | 2021-01-11T01:52:40.978564 | 2016-12-29T18:08:38 | 2016-12-29T18:08:38 | 70,649,821 | 2 | 2 | null | 2016-12-29T18:08:39 | 2016-10-12T01:22:38 | Python | UTF-8 | Python | false | false | 465 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-09 18:45
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('administration', '0010_auto_20161109_0552'),
]
operations = [
migrations.AlterField(
model_name='ban',
name='end_date',
field=models.DateField(blank=True, null=True),
),
]
| [
"[email protected]"
] | |
b0173056cf09e20ef265ae0bf916c84bfd972b1a | 86cd22354f2431087c9b3ff06188f071afb3eb72 | /383. Ransom Note.py | cf3b994fb40efd594dc1c7d269120b8b5583c390 | [] | no_license | tlxxzj/leetcode | 0c072a74d7e61ef4700388122f2270e46c4ac22e | 06dbf4f5b505a6a41e0d93367eedd231b611a84b | refs/heads/master | 2023-08-31T11:04:34.585532 | 2023-08-31T08:25:51 | 2023-08-31T08:25:51 | 94,386,828 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 408 | py | class Solution:
def canConstruct(self, ransomNote: str, magazine: str) -> bool:
letters = {}
for c in magazine:
if c in letters:
letters[c] += 1
else:
letters[c] = 1
for c in ransomNote:
if letters.get(c, 0) == 0:
return False
else:
letters[c] -= 1
return True | [
"[email protected]"
] | |
b307447339363ba5a9bc02068f4df4126d5e6527 | bb824786f095fbf6e6cf627ef9c04afdb5152477 | /apps/pages/migrations/0013_partnerspage.py | e96243788314b36b0fda1ef2712514fcbd92c772 | [] | no_license | Emilnurg/anas.ru | 19000063c7db98c15261056bb9590382db362d42 | 20cee1aee02da192c9c79a51bd0898c1dba0c98f | refs/heads/master | 2021-05-20T12:09:08.155749 | 2017-12-26T13:49:12 | 2017-12-26T13:49:12 | 252,287,670 | 0 | 0 | null | 2021-03-31T19:34:29 | 2020-04-01T21:10:48 | JavaScript | UTF-8 | Python | false | false | 6,865 | py | # -*- coding: utf-8 -*-
# flake8: noqa
# Generated by Django 1.10.7 on 2017-06-08 15:27
from __future__ import unicode_literals
import ckeditor_uploader.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pages', '0012_auto_20170531_1612'),
]
operations = [
migrations.CreateModel(
name='PartnersPage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255, verbose_name='Заголовок страницы')),
('title_ru', models.CharField(max_length=255, null=True, verbose_name='Заголовок страницы')),
('title_en', models.CharField(max_length=255, null=True, verbose_name='Заголовок страницы')),
('title_fr', models.CharField(max_length=255, null=True, verbose_name='Заголовок страницы')),
('subtitle', ckeditor_uploader.fields.RichTextUploadingField(blank=True, max_length=4096, null=True, verbose_name='Подзаголовок страницы')),
('subtitle_ru', ckeditor_uploader.fields.RichTextUploadingField(blank=True, max_length=4096, null=True, verbose_name='Подзаголовок страницы')),
('subtitle_en', ckeditor_uploader.fields.RichTextUploadingField(blank=True, max_length=4096, null=True, verbose_name='Подзаголовок страницы')),
('subtitle_fr', ckeditor_uploader.fields.RichTextUploadingField(blank=True, max_length=4096, null=True, verbose_name='Подзаголовок страницы')),
('howto_title', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Как стать дилером?"')),
('howto_title_ru', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Как стать дилером?"')),
('howto_title_en', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Как стать дилером?"')),
('howto_title_fr', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Как стать дилером?"')),
('howto_subtitle', models.CharField(blank=True, max_length=255, null=True, verbose_name='Подзаголовок блока "Как стать дилером?"')),
('howto_subtitle_ru', models.CharField(blank=True, max_length=255, null=True, verbose_name='Подзаголовок блока "Как стать дилером?"')),
('howto_subtitle_en', models.CharField(blank=True, max_length=255, null=True, verbose_name='Подзаголовок блока "Как стать дилером?"')),
('howto_subtitle_fr', models.CharField(blank=True, max_length=255, null=True, verbose_name='Подзаголовок блока "Как стать дилером?"')),
('howto_body', ckeditor_uploader.fields.RichTextUploadingField(blank=True, verbose_name='Контент блока "Как стать дилером?"')),
('howto_body_ru', ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True, verbose_name='Контент блока "Как стать дилером?"')),
('howto_body_en', ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True, verbose_name='Контент блока "Как стать дилером?"')),
('howto_body_fr', ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True, verbose_name='Контент блока "Как стать дилером?"')),
('howto_button_caption', models.CharField(blank=True, max_length=50, null=True, verbose_name='Текст кнопки блока "Как стать дилером?"')),
('howto_button_caption_ru', models.CharField(blank=True, max_length=50, null=True, verbose_name='Текст кнопки блока "Как стать дилером?"')),
('howto_button_caption_en', models.CharField(blank=True, max_length=50, null=True, verbose_name='Текст кнопки блока "Как стать дилером?"')),
('howto_button_caption_fr', models.CharField(blank=True, max_length=50, null=True, verbose_name='Текст кнопки блока "Как стать дилером?"')),
('questions_title_left', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Есть вопросы? (слева)"')),
('questions_title_left_ru', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Есть вопросы? (слева)"')),
('questions_title_left_en', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Есть вопросы? (слева)"')),
('questions_title_left_fr', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Есть вопросы? (слева)"')),
('questions_title', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Есть вопросы?"')),
('questions_title_ru', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Есть вопросы?"')),
('questions_title_en', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Есть вопросы?"')),
('questions_title_fr', models.CharField(blank=True, max_length=255, null=True, verbose_name='Заголовок блока "Есть вопросы?"')),
('questions_subtitle', models.TextField(blank=True, null=True, verbose_name='Подзаголовок блока "Есть вопросы?"')),
('questions_subtitle_ru', models.TextField(blank=True, null=True, verbose_name='Подзаголовок блока "Есть вопросы?"')),
('questions_subtitle_en', models.TextField(blank=True, null=True, verbose_name='Подзаголовок блока "Есть вопросы?"')),
('questions_subtitle_fr', models.TextField(blank=True, null=True, verbose_name='Подзаголовок блока "Есть вопросы?"')),
],
options={
'verbose_name': 'Страница "Дилеры"',
},
),
]
| [
"[email protected]"
] | |
71c44b270f1029386b8c8079cc4f51467a806a60 | 8690ca0028c54b62d68badf1753fc6151ae03525 | /Part3 Levels of Aggregation/esem_data/Act/tpot_mnist_pipeline_triangulateAggregationLevelParticipantSplitaggr_5_groups7.py | d83ecdf48b217bda826b409cdf96307576b5488a | [] | no_license | brains-on-code/conducting-and-analyzing-human-studies | fd74ee77fdc56cc61bdc1e0cf9bf423780f5dddc | 548e7443f4d2bdb2db1f2858289b7d3518593c59 | refs/heads/master | 2021-06-26T21:30:56.386121 | 2020-12-22T13:49:16 | 2020-12-22T13:49:16 | 195,975,817 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 867 | py | import numpy as np
import pandas as pd
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler
# NOTE: Make sure that the class is labeled 'target' in the data file
tpot_data = pd.read_csv('PATH/TO/DATA/FILE', sep='COLUMN_SEPARATOR', dtype=np.float64)
features = tpot_data.drop('target', axis=1).values
training_features, testing_features, training_target, testing_target = \
train_test_split(features, tpot_data['target'].values, random_state=42)
# Score on the training set was:1.0
exported_pipeline = make_pipeline(
StandardScaler(),
LogisticRegression(C=0.1, dual=False, penalty="l2")
)
exported_pipeline.fit(training_features, training_target)
results = exported_pipeline.predict(testing_features)
| [
"[email protected]"
] | |
9b8b6f9bda493cd1e8800f462021606cf91863d6 | 641f76328bfeb7e54f0793a18c5b7c00595b98fd | /apps/sms/serializers.py | 4d653e2794abcb0dbb3ce297e9d2919f02b8d8f4 | [
"Apache-2.0"
] | permissive | lianxiaopang/camel-store-api | 1d16060af92eb01607757c0423377a8c94c3a726 | b8021250bf3d8cf7adc566deebdba55225148316 | refs/heads/master | 2020-12-29T13:23:18.118617 | 2020-02-09T08:38:53 | 2020-02-09T08:38:53 | 238,621,246 | 0 | 0 | Apache-2.0 | 2020-02-07T14:28:35 | 2020-02-06T06:17:47 | Python | UTF-8 | Python | false | false | 207 | py | from rest_framework import serializers
from .models import SmsRecord
class SmsRecordSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = SmsRecord
fields = '__all__' | [
"[email protected]"
] | |
b7750a59ddd97731a3db15d7ff43b162bbc4a22b | b9ca99a0244e5d5a07e0b27be8192ad01c4eda6c | /EIP/EIP系统所有流程.py | fad26b441598bfd879662694afe6d03b21d1d15c | [] | no_license | Boomshakal/spider | c3fdbf18f874ec9953509e4ce984b5476d25839f | e6779a3961f48325dd4992d88f88b8b3938225d7 | refs/heads/master | 2021-06-17T06:22:19.679444 | 2021-03-05T06:33:36 | 2021-03-05T06:33:36 | 154,489,684 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,124 | py | import requests
import json
from lxml import etree
url="http://eip.megmeet.com:8008/km/review/km_review_index/kmReviewIndex.do?"
maxpage=5
headers={
"Cookie": "j_lang=zh-CN; JSESSIONID=40ABBC9A619C5860068184B1E339BC4D",
"User-Agent":"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36"
}
def get_onepage(page):
data = {
"method": "list",
"q.mydoc": "all",
"q.j_path": "/listAll",
"q.s_raq": "0.2347883935715236",
"pageno": page,
"rowsize": "30",
"orderby": "docCreateTime",
"ordertype": "down",
"s_ajax": "true"
}
text=requests.get(url,headers=headers,params=data).text
#print(type(text),text)
jsons=json.loads(text)
results=jsons.get('datas')
for result in results:
html=result[1]['value']
html=etree.HTML(html)
title=html.xpath('//span/text()')
print(title)
#print(html)
if __name__ == '__main__':
for page in range(1,maxpage+1):
get_onepage(page)
print("第{0}页加载完成!".format(page))
| [
"[email protected]"
] | |
5692e1461442776306fc415db227d24cb23bc350 | e89693a2906534fa4a9d180b404cb96751302e8c | /Timing_Panel.py | 40755d697cc5406b4f8fed9f34ce2b8208e85f42 | [] | permissive | friedrich-schotte/Lauecollect | e278e00692d109e98450c27502986673bf59db6a | acfc5afe34b4df5891a0f8186b8df76625afb51d | refs/heads/master | 2022-09-12T02:59:04.363963 | 2022-08-18T16:39:01 | 2022-08-18T16:39:01 | 186,062,944 | 0 | 2 | MIT | 2020-07-20T02:04:07 | 2019-05-10T22:42:26 | Python | UTF-8 | Python | false | false | 11,389 | py | #!/usr/bin/env python
"""
Graphical User Interface for FPGA Timing System.
Author: Friedrich Schotte
Date created: 2015-05-27
Date last modified: 2022-07-31
Revision comment: Cleanup: self.timing_system.sequencer, self.timing_system.composer
"""
__version__ = "7.3.3"
from logging import warning
from traceback import format_exc
from Panel_3 import BasePanel
from reference import reference
class Timing_Panel(BasePanel):
"""Control Panel for FPGA Timing System"""
from monitored_property import monitored_property
timing_system_name = "BioCARS"
def __init__(self, timing_system_name=None):
if timing_system_name is not None:
self.timing_system_name = timing_system_name
BasePanel.__init__(self)
icon = "Timing System"
@monitored_property
def title(self):
return "Timing System [%s]" % self.timing_system_name
@property
def name(self):
return "Timing_Panel.%s" % self.timing_system_name
label_width = 190
@property
def parameters(self):
return [
[("Delay", self.timing_system.composer, "delay", "time"), {"choices": self.delay_choices}],
[("Nom. Delay", self.timing_system.composer, "nom_delay", "time"), {"choices": self.delay_choices}],
[("Mode", self.timing_system.composer, "mode", "str"), {"choices_reference": reference(self.timing_system.composer, "modes")}],
[("Period [1-kHz cycles]", self.timing_system.composer, "trigger_period_in_1kHz_cycles", "int"), {}],
[("Detector", self.timing_system.composer, "xdet_on", "Off/On"), {}],
[("Pump (laser)", self.timing_system.composer, "laser_on", "Off/On"), {}],
[("Probe (X-Ray)", self.timing_system.composer, "ms_on", "Off/On"), {}],
[("Trans", self.timing_system.composer, "trans_on", "Off/On"), {}],
[("Circulate", self.timing_system.composer, "pump_on", "Off/On"), {}],
[("Trigger code", self.timing_system.composer, "transc", "binary"), {}],
[("Image number", self.timing_system.registers.image_number, "count", "int"), {}],
[("X-ray detector trigger count", self.timing_system.channels.xdet.trig_count, "count", "int"), {}],
[("X-ray detector acquisition count", self.timing_system.channels.xdet.acq_count, "count", "int"), {}],
[("X-ray scope trigger count", self.timing_system.channels.xosct.trig_count, "count", "int"), {}],
[("X-ray scope acquisition count", self.timing_system.channels.xosct.acq_count, "count", "int"), {}],
[("Laser scope trigger count", self.timing_system.channels.losct.trig_count, "count", "int"), {}],
[("Laser scope acquisition count", self.timing_system.channels.losct.acq_count, "count", "int"), {}],
[("Pass number", self.timing_system.registers.pass_number, "count", "int"), {}],
[("Pulses", self.timing_system.registers.pulses, "count", "int"), {}],
[("Image number increment", self.timing_system.composer, "image_number_inc", "Off/On"), {}],
[("Pass number increment", self.timing_system.composer, "pass_number_inc", "Off/On"), {}],
[("Queue active", self.timing_system.sequencer, "queue_active", "Not Active/Active"), {}],
[("Acquiring", self.timing_system.sequencer, "acquiring", "Idle/Acquiring"), {}],
[("Current queue length [seq]", self.timing_system.sequencer, "current_queue_length", "int"), {}],
[("Current queue sequence cnt", self.timing_system.sequencer, "current_queue_sequence_count", "int"), {}],
[("Current queue repeat count", self.timing_system.sequencer, "current_queue_repeat_count", "int"), {}],
[("Current queue max repeat", self.timing_system.sequencer, "current_queue_max_repeat_count", "int"), {}],
[("Default queue name", self.timing_system.sequencer, "default_queue_name", "str"), {"choices": self.queue_choices}],
[("Current queue name", self.timing_system.sequencer, "current_queue_name", "str"), {"choices": self.queue_choices}],
[("Next queue name", self.timing_system.sequencer, "next_queue_name", "str"), {"choices": self.queue_choices}],
[("Next queue sequence cnt", self.timing_system.sequencer, "next_queue_sequence_count", "int"), {}],
[("Queue length [sequences]", self.timing_system.sequencer, "queue_length", "int"), {}],
[("Queue sequence count", self.timing_system.sequencer, "queue_sequence_count", "int"), {}],
[("Queue repeat count", self.timing_system.sequencer, "queue_repeat_count", "int"), {}],
[("Queue max repeat count", self.timing_system.sequencer, "queue_max_repeat_count", "int"), {}],
[("Cache", self.timing_system.sequencer, "cache_enabled", "Disabled/Caching"), {}],
[("Generating Packets", self.timing_system.acquisition, "generating_packets", "Idle/Generating"), {}],
[("Updating Queues", self.timing_system.sequencer, "update_queues", "Idle/Updating"), {}],
[("Packets generated", self.timing_system.sequencer, "cache_size", "int"), {}],
[("Packets loaded", self.timing_system.sequencer, "remote_cache_size", "int"), {}],
[("Sequencer Configured", self.timing_system.sequencer, "configured", "Not Configured/Configured"), {}],
[("Sequencer Running", self.timing_system.sequencer, "running", "Stopped/Running"), {}],
[("Sequence generator", self.timing_system.composer, "generator", "str"), {"read_only": True}],
[("Sequence generator version", self.timing_system.composer, "generator_version", "str"), {"read_only": True}],
[("Timing sequence version", self.timing_system.composer, "timing_sequence_version", "str"), {"read_only": True}],
[("Heatload chopper phase", self.timing_system.registers.hlcnd, "value", "time.6"),
{"choices": self.hlc_choices}],
[("Heatload chop. act. phase", self.timing_system.registers.hlcad, "value", "time.6"),
{"choices": self.hlc_choices}],
[("High-speed chopper phase", self.timing_system.channels.hsc.delay, "value", "time.4"),
{"choices": self.hsc_choices}],
[("P0 shift", self.timing_system.p0_shift, "value", "time.4"), {}],
[("X-ray delay", self.timing_system.composer, "xd", "time.6"), {}],
]
standard_view = [
"Delay",
"Mode",
"Pump (laser)",
"Acquiring",
"Sequencer Running",
]
@property
def application_buttons(self):
from Panel_3 import Application_Button
from application import application
return [
Application_Button(
"Channels...",
application(f"{self.domain_name}.Timing_Channel_Configuration_Panel.Timing_Channel_Configuration_Panel('{self.domain_name}')")
),
Application_Button(
"Clock...",
application(f"{self.domain_name}.Timing_Clock_Configuration_Panel.Timing_Clock_Configuration_Panel('{self.domain_name}')")
),
Application_Button(
"Sequence...",
application(f"{self.domain_name}.Configuration_Table_Panel.Configuration_Table_Panel('{self.domain_name}.sequence_modes')")
),
Application_Button(
"PP Modes...",
application(f"{self.domain_name}.Configuration_Table_Panel.Configuration_Table_Panel('{self.domain_name}.timing_modes')")
),
]
@property
def application_menu_items(self):
from Panel_3 import Application_Menu_Item
from application import application
return [
Application_Menu_Item(
"Setup...",
application(f"{self.domain_name}.Timing_Setup_Panel.Timing_Setup_Panel('{self.domain_name}')")
),
Application_Menu_Item(
"Channel Configuration...",
application(f"{self.domain_name}.Timing_Channel_Configuration_Panel.Timing_Channel_Configuration_Panel('{self.domain_name}')")
),
Application_Menu_Item(
"Calibration...",
application(f"{self.domain_name}.Timing_Calibration_Panel.Timing_Calibration_Panel('{self.domain_name}')")
),
Application_Menu_Item(
"Clock Configuration...",
application(f"{self.domain_name}.Timing_Clock_Configuration_Panel.Timing_Clock_Configuration_Panel('{self.domain_name}')")
),
Application_Menu_Item(
"PP Modes...",
application(f"{self.domain_name}.Configuration_Table_Panel.Configuration_Table_Panel('{self.domain_name}.timing_modes')")
),
Application_Menu_Item(
"Sequence Modes...",
application(f"{self.domain_name}.Configuration_Table_Panel.Configuration_Table_Panel('{self.domain_name}.sequence_modes')")
),
Application_Menu_Item(
"Configuration...",
application(f"{self.domain_name}.Timing_Configuration_Panel.Timing_Configuration_Panel('{self.domain_name}')")
),
Application_Menu_Item(
"Delay Scan...",
application(f"{self.domain_name}.Timing_System_Delay_Scan_Panel.Timing_System_Delay_Scan_Panel('{self.domain_name}')")
),
Application_Menu_Item(
"Laser On Scan...",
application(f"{self.domain_name}.Timing_System_Laser_On_Scan_Panel.Timing_System_Laser_On_Scan_Panel('{self.domain_name}')")
),
]
@property
def timing_system(self):
from timing_system_client import timing_system_client
return timing_system_client(self.timing_system_name)
@property
def domain_name(self):
return self.timing_system_name
@property
def delay_choices(self):
from numpy import concatenate, arange
choices = concatenate(([-100e-12, 0], 10 ** (arange(-10, 1, 1.0))))
return choices
queue_choices = ["queue1", "queue2", "queue", ""]
@property
def hlc_choices(self):
choices = []
from numpy import arange, finfo
eps = finfo(float).eps
hsct = self.timing_system.hsct
try:
choices = arange(-12 * hsct, +12 * hsct + eps, hsct)
except ValueError:
warning(format_exc())
return choices
@property
def hsc_choices(self):
choices = []
from numpy import arange, finfo
eps = finfo(float).eps
P0t = self.timing_system.P0t
try:
choices = arange(-12 * P0t / 24, 12 * P0t / 24 + eps, P0t / 24)
except ValueError:
warning(format_exc())
return choices
if __name__ == '__main__':
timing_system_name = "BioCARS"
# timing_system_name = "LaserLab"
# timing_system_name = "TestBench"
msg_format = "%(asctime)s %(levelname)s %(module)s.%(funcName)s: %(message)s"
from redirect import redirect
redirect("%s.Timing_Panel" % timing_system_name, format=msg_format)
import wx
app = wx.GetApp() if wx.GetApp() else wx.App()
self = Timing_Panel(timing_system_name)
app.MainLoop()
| [
"[email protected]"
] | |
fd337b75f0eb10484074e08ba64c0b939849c29f | ed756885498f009618c4be880f255e5c2402d537 | /web/feeds.py | d65fd10180cb506fc5739ed0d781aa1940d95fda | [] | no_license | GunioRobot/BlancoWeb | c6d1d242960918a6170ed0b1432ac36ea686546f | cff8540c8f5bc0a372cc3500b035f1fdbbc7eab8 | refs/heads/master | 2021-01-20T11:31:50.434756 | 2011-02-21T22:43:42 | 2011-02-21T22:43:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 615 | py | # -*- coding: utf-8 -*-
from django.contrib.syndication.feeds import Feed;
from web.models import Evento;
class EventoFeed(Feed):
title = "Blanco Irish Tavern";
link = "web/feeds/eventos/";
description = "Suscríbete para conocer todas nuestras fiestas"
def items(self):
return Evento.objects.all().order_by("-fecha");
def item_title(self, item):
return item.nombre
def item_description(self, item):
descripcion = "%s" % item.fecha;
descripcion += " %s" % item.hora_inicio;
descripcion += " %s" % item.info;
return descripcion; | [
"[email protected]"
] | |
943eb7ef872e91eecdf173c1d2bcf133d8a02938 | 484f9502f2d1fa35df77df8d3a08bd2bfd2d253d | /src/testers/unittests/test_ast_utils.py | 3debbde14f5a9683c67c251be3268860790f2dd6 | [
"Apache-2.0"
] | permissive | pmeerw/Triton | 5d1c58e93ed257f06f1586a1aa542d1ba307dcbb | 82f11d6b15302e7900ed7f9eb3d686b6313d5b37 | refs/heads/master | 2020-08-04T17:01:47.442181 | 2019-09-30T08:16:44 | 2019-09-30T08:16:44 | 212,212,395 | 0 | 0 | Apache-2.0 | 2019-10-01T22:38:58 | 2019-10-01T22:38:58 | null | UTF-8 | Python | false | false | 1,494 | py | #!/usr/bin/env python2
## -*- coding: utf-8 -*-
"""Test AST utils."""
import unittest
from triton import *
class TestAstUtils(unittest.TestCase):
"""Testing the AST utilities."""
def setUp(self):
self.ctx = TritonContext()
self.ctx.setArchitecture(ARCH.X86_64)
self.astCtxt = self.ctx.getAstContext()
self.sv1 = self.ctx.newSymbolicVariable(8)
self.sv2 = self.ctx.newSymbolicVariable(8)
self.v1 = self.astCtxt.variable(self.sv1)
self.v2 = self.astCtxt.variable(self.sv2)
def test_lookingForNodes(self):
n = (((self.v1 + self.v2 * 3) + self.v2) - 1)
# Looking for variables
l = self.astCtxt.lookingForNodes(n, AST_NODE.VARIABLE)
self.assertEqual(len(l), 2)
self.assertEqual(l[0], self.v1)
self.assertEqual(l[1], self.v2)
self.assertEqual(l[0].getSymbolicVariable().getName(), self.sv1.getName())
self.assertEqual(l[1].getSymbolicVariable().getName(), self.sv2.getName())
l = self.astCtxt.lookingForNodes(n, AST_NODE.ANY)
self.assertEqual(len(l), 12)
l = self.astCtxt.lookingForNodes(n, AST_NODE.BVADD)
self.assertEqual(len(l), 2)
l = self.astCtxt.lookingForNodes(n, AST_NODE.BVSUB)
self.assertEqual(len(l), 1)
l = self.astCtxt.lookingForNodes(n, AST_NODE.BVMUL)
self.assertEqual(len(l), 1)
l = self.astCtxt.lookingForNodes(n, AST_NODE.BV)
self.assertEqual(len(l), 2)
| [
"[email protected]"
] | |
c4a35b1184ddc9951b0bf9e8a1ceeaccd2c708a0 | b951ee6d2de741e84f7bfe2dc5a66853c1d5cd4e | /Array/LinkedInstead.py | 5960eaa4dc231e2a7ddbf5349c752a8df806be84 | [] | no_license | Chongkai-Ma/Fundamentals-of-Python-Data-Structures | e78569f79dfad16ebc18121c250c25d91bb94754 | 170e58d23d9ee73c53b2ab596d7fcfc3e63eccc9 | refs/heads/master | 2020-09-21T03:46:04.980838 | 2019-12-04T14:18:27 | 2019-12-04T14:18:27 | 224,669,370 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 345 | py | #!/usr/bin/python3
from node import Node
head = None
for count in range(1, 10):
head = Node(count, head)
probe = head
targetItem = 5
while probe != None and targetItem != probe.data:
probe = probe.next
if probe != None:
probe.data = 88888
print ("The item has been changed")
else:
print ("The item has not been found")
| [
"[email protected]"
] | |
da2c797441188d198de8c57c9a59473cbd5ed769 | c36fdb4d07aeaf0b1e568c45e8020b34c6fa5aca | /usps_integration/models/__init__.py | 136e6ed2d4acabb36fa173d1a7051621eeeb8395 | [] | no_license | planetodooofficial/usps_integration_v13 | c6cf33c49d753c44831d3f6e1da10271d37f0e82 | ad69aa442b0ee65d1b7589b7f7ca409313f908aa | refs/heads/master | 2022-12-24T10:05:39.397215 | 2020-09-30T19:14:55 | 2020-09-30T19:14:55 | 258,160,591 | 0 | 3 | null | 2020-09-30T19:14:56 | 2020-04-23T09:52:32 | Python | UTF-8 | Python | false | false | 1,098 | py | # -*- encoding: utf-8 -*-
##############################################################################
# Copyright (c) 2015 - Present Planet Odoo. All Rights Reserved
# Author: [Planet Odoo]
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of the GNU General Public License is available at:
# <http://www.gnu.org/licenses/gpl.html>.
#
##############################################################################
from . import shipping_conf
from . import delivery
from . import product
from . import sale
from . import shipping_endicia
from . import endicia
from . import shipping
from . import stock
| [
"https://[email protected]"
] | https://[email protected] |
7e57dcbf4f48f8fcfe88cb68a3ebfbe549f6d2ab | e944d912def98d7546d17c4303169f52517348ca | /interview_questions/basic/sum_of_rows_1.py | 046b5461ff003408ecb4ae700d527c671bb16873 | [] | no_license | balajich/python-crash-course | 0710854de3cd04695f969cbfe774ef336f707f48 | e62b578f7dc93f6a47fbae00dac2d496b985fe8d | refs/heads/master | 2021-07-30T16:00:45.392119 | 2021-07-29T11:41:49 | 2021-07-29T11:41:49 | 192,034,679 | 9 | 0 | null | null | null | null | UTF-8 | Python | false | false | 425 | py | '''
Take matrix as input and returns sum of rows`
'''
import numpy as np
def rowsum(matrix):
"""
:param matrix (list): A list of lists where each inner list represents a row.
:returns: (list) A list containing the sum of each row.
"""
result=[]
for sub_list in matrix:
result.append(sum(sub_list))
return result
print(rowsum([[1, 2, 3], [4, 5, 6], [7, 8, 9]])) # Should print [6, 15, 24] | [
"[email protected]"
] | |
e7a6da8b913047441c8ecbd61af44920ea566c1b | 95d20fe737d711cf92d68130eb59b6aef4435ec2 | /pyecharts数据可视化/中国国内生产总值.py | 20106c170698be17ce82f33a10baa3719363738d | [] | no_license | CCH21/Python | d11b93851579d85f972828c760a96c5be1f79531 | 33e218810856971f3f1f97a2b8a4c8dce761362e | refs/heads/master | 2022-04-29T11:48:01.816283 | 2022-03-17T11:53:01 | 2022-03-17T11:53:01 | 226,452,057 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,686 | py | #!/usr/bin/env python3
import csv
from pyecharts import Line
Quarter = []
GDP = []
Primary_industry = []
Secondary_industry = []
Tertiary_industry = []
with open('中国国内生产总值.csv', 'r', newline='') as csv_in_file:
filereader = csv.reader(csv_in_file)
head = next(filereader)
for row_list in filereader:
Quarter.append(row_list[0])
gdp = round(eval(row_list[2][:-1]) / 100, 3)
GDP.append(gdp)
pri = round(eval(row_list[4][:-1]) / 100, 3)
Primary_industry.append(pri)
sec = round(eval(row_list[6][:-1]) / 100, 3)
Secondary_industry.append(sec)
ter = round(eval(row_list[8][:-1]) / 100, 3)
Tertiary_industry.append(ter)
Quarter = Quarter[::-1]
GDP = GDP[::-1]
Primary_industry = Primary_industry[::-1]
Secondary_industry = Secondary_industry[::-1]
Tertiary_industry = Tertiary_industry[::-1]
line = Line('中国国内生产总值同比增长率', '时间:2006年第1季度-2020年第1季度 数据来源:东方财富网', width=1280, height=720)
line.add('国内生产总值', Quarter, GDP, is_smooth=False, mark_point=['max'], mark_line=['average'], legend_pos='right')
line.add('第一产业', Quarter, Primary_industry, is_smooth=False, mark_point=['max'], mark_line=['average'],
legend_pos='right')
line.add('第二产业', Quarter, Secondary_industry, is_smooth=False, mark_point=['max'], mark_line=['average'],
legend_pos='right')
line.add('第三产业', Quarter, Tertiary_industry, is_smooth=False, mark_point=['max'], mark_line=['average'],
legend_pos='right')
line.render('中国国内生产总值.html')
| [
"[email protected]"
] | |
7b10e1771bc7133cd12e42ff4ced75617ba3270c | 826cdefb3554e6bbc7b5e5fa9bc6f55268cd58dd | /src/main/python/basics/itertools.py | 4c731ae4d27d12208d8fbb8b22bcd656bceb3a3f | [] | no_license | lj015625/CodeSnippet | 67d1f556497948b3db51c67af07f16a21751427e | 73e9375c5d7edcc50170569c0bd99fd415557d85 | refs/heads/master | 2023-09-01T14:59:57.162553 | 2023-08-24T11:07:37 | 2023-08-24T11:07:37 | 61,499,418 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,043 | py | import itertools as it
def combinational_dice_rolls(n, m):
return list(it.product(range(1, m+1), repeat=n))
combinational_dice_rolls(2,2)
def cartesian_product(arr1, arr2):
print(*it.product(A, B))
A = [1,2,3]
B = [1,2,3]
cartesian_product(A,B)
s, n = 2, 3
s = sorted(str(s))
n = int(n)
for i in it.permutations(s,n):
print(''.join(i), sep='\n')
s, n = 'ABC', 2
for i in range(1, int(n)+1):
for j in it.combinations(sorted(s), i):
print(''.join(j))
# This tool returns length subsequences of elements from the input iterable allowing individual elements to be repeated more than once.
s, n = 'ABC', 2
for c in it.combinations_with_replacement(sorted(s), int(n)):
print("".join(c))
# create list of tuples from repeating items in a string
print(*[(len(list(values)), int(key)) for key, values in it.groupby('12345')])
# count number of a in combinations
n = 4
arr = ['a', 'a', 'c', 'd']
k = 2
count = 0
total = 0
for t in it.combinations(arr, k):
total += 1
count += 'a' in t
print(count/total)
| [
"[email protected]"
] | |
684e78d298475edf5350934fbb380bb497a3bb7e | 0cc9ba497efeae7de808b3063f932cee9449bc20 | /akshare/fx/currency_investing.py | 41abf6485117ce7ccd18ebed6240baa7a5dd72a6 | [
"MIT"
] | permissive | louis100/akshare | 08dc7d71c194e973092174dabc307e28a2aaf7d6 | 0b2ad15982dc1e4081929ed634e96c559bf3ef7e | refs/heads/master | 2022-12-12T16:26:38.294899 | 2020-09-16T04:25:46 | 2020-09-16T04:25:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,907 | py | # -*- coding:utf-8 -*-
# /usr/bin/env python
"""
Date: 2020/9/9 11:56
Desc: 英为财情-外汇-货币对历史数据
https://cn.investing.com/currencies/
https://cn.investing.com/currencies/eur-usd-historical-data
"""
import re
import pandas as pd
import requests
from bs4 import BeautifulSoup
from akshare.index.cons import short_headers, long_headers
def _currency_name_url() -> dict:
"""
货币键值对
:return: 货币键值对
:rtype: dict
"""
url = "https://cn.investing.com/currencies/"
res = requests.post(url, headers=short_headers)
data_table = pd.read_html(res.text)[0].iloc[:, 1:] # 实时货币行情
data_table.columns = ['中文名称', '英文名称', '最新', '最高', '最低', '涨跌额', '涨跌幅', '时间']
name_code_dict = dict(
zip(data_table["中文名称"].tolist(), [item.lower().replace("/", "-") for item in data_table["英文名称"].tolist()]))
return name_code_dict
def currency_hist(symbol: str = "usd-vnd", start_date: str = "20050101", end_date: str = "20200717") -> pd.DataFrame:
"""
外汇历史数据, 注意获取数据区间的长短, 输入任意货币对, 具体能否获取, 通过 currency_name_code_dict 查询
:param symbol: 货币对
:type symbol: str
:param start_date: 日期
:type start_date: str
:param end_date: 日期
:type end_date: str
:return: 货币对历史数据
:rtype: pandas.DataFrame
"""
start_date = "/".join([start_date[:4], start_date[4:6], start_date[6:]])
end_date = "/".join([end_date[:4], end_date[4:6], end_date[6:]])
temp_url = f"https://cn.investing.com/currencies/{symbol.lower().replace('/', '-')}-historical-data"
res = requests.post(temp_url, headers=short_headers)
soup = BeautifulSoup(res.text, "lxml")
title = soup.find("h2", attrs={"class": "float_lang_base_1"}).get_text()
res = requests.post(temp_url, headers=short_headers)
soup = BeautifulSoup(res.text, "lxml")
data = soup.find_all(text=re.compile("window.histDataExcessInfo"))[0].strip()
para_data = re.findall(r"\d+", data)
payload = {
"curr_id": para_data[0],
"smlID": para_data[1],
"header": title,
"st_date": start_date,
"end_date": end_date,
"interval_sec": "Daily",
"sort_col": "date",
"sort_ord": "DESC",
"action": "historical_data",
}
url = "https://cn.investing.com/instruments/HistoricalDataAjax"
res = requests.post(url, data=payload, headers=long_headers)
soup = BeautifulSoup(res.text, "lxml")
vest_list = [item.get_text().strip().split("\n") for item in soup.find_all("tr")]
raw_df = pd.DataFrame(vest_list)
df_data = pd.DataFrame(vest_list, columns=raw_df.iloc[0, :].tolist()).iloc[1:-1, :]
df_data.index = pd.to_datetime(df_data["日期"], format="%Y年%m月%d日")
df_data["涨跌幅"] = pd.DataFrame(round(df_data['涨跌幅'].str.replace('%', '').astype(float) / 100, 6))
del df_data["日期"]
df_data.iloc[:, :-1] = df_data.iloc[:, :-1].applymap(lambda x: x.replace(',', ''))
df_data = df_data.astype(float)
return df_data
def _currency_single() -> pd.DataFrame:
"""
英为财情-外汇-单种货币兑换汇率-单种货币列表
:return: 单种货币列表
:rtype: pandas.DataFrame
"""
url = "https://cn.investing.com/currencies/single-currency-crosses"
res = requests.post(url, headers=short_headers)
soup = BeautifulSoup(res.text, "lxml")
name_url_option_list = soup.find("select", attrs={"class": "newInput selectBox"}).find_all("option")
temp_df = pd.DataFrame([item.get_text().split('-', 1) for item in name_url_option_list])
temp_df.columns = ["short_name", "name"]
temp_df["short_name"] = temp_df["short_name"].str.strip()
temp_df["name"] = temp_df["name"].str.strip()
temp_df["code"] = [item["value"] for item in name_url_option_list]
return temp_df
def currency_name_code(symbol: str = "usd/jpy") -> pd.DataFrame:
"""
当前货币对的所有可兑换货币对
:param symbol: "usd/jpy"
:type symbol: str
:return: 中英文货币对
:rtype: pandas.DataFrame
name code
0 欧元/美元 eur-usd
1 英镑/美元 gbp-usd
2 美元/日元 usd-jpy
3 美元/瑞士法郎 usd-chf
4 澳大利亚元/美元 aud-usd
.. ... ...
268 日元/新加坡元 jpy-sgd
269 科威特第纳尔/日元 kwd-jpy
270 日元/白俄罗斯卢布 jpy-byn
271 日元/乌克兰赫里纳 jpy-uah
272 日元/土耳其里拉 jpy-try
"""
symbol = symbol.upper()
currency_df = _currency_single()
url = "https://cn.investing.com/currencies/Service/ChangeCurrency"
params = {
"session_uniq_id": "53bee677662a2336ec07b40738753fc1",
"currencies": currency_df[currency_df["short_name"] == symbol.split("/")[0]]["code"].values[0],
}
headers = {"Accept": "application/json, text/javascript, */*; q=0.01",
"Accept-Encoding": "gzip, deflate, br",
"Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"Host": "cn.investing.com",
"Pragma": "no-cache",
"Referer": "https://cn.investing.com/currencies/single-currency-crosses",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "same-origin",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36",
"X-Requested-With": "XMLHttpRequest",
}
res = requests.get(url, params=params, headers=headers)
temp_df = pd.read_html(res.json()["HTML"])[0].iloc[:, 1:]
temp_df.rename(columns={"名称.1": "简称"}, inplace=True)
temp_df["pids"] = [item[:-1] for item in res.json()["pids"]]
name_code_dict_one = dict(zip(temp_df["名称"].tolist(), [item.lower().replace("/", "-") for item in temp_df["简称"].tolist()]))
params = {
"session_uniq_id": "53bee677662a2336ec07b40738753fc1",
"currencies": currency_df[currency_df["short_name"] == symbol.split("/")[1]]["code"].values[0],
}
headers = {"Accept": "application/json, text/javascript, */*; q=0.01",
"Accept-Encoding": "gzip, deflate, br",
"Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"Host": "cn.investing.com",
"Pragma": "no-cache",
"Referer": "https://cn.investing.com/currencies/single-currency-crosses",
"Sec-Fetch-Mode": "cors",
"Sec-Fetch-Site": "same-origin",
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.130 Safari/537.36",
"X-Requested-With": "XMLHttpRequest",
}
res = requests.get(url, params=params, headers=headers)
temp_df = pd.read_html(res.json()["HTML"])[0].iloc[:, 1:]
temp_df.rename(columns={"名称.1": "简称"}, inplace=True)
temp_df["pids"] = [item[:-1] for item in res.json()["pids"]]
name_code_dict_two = dict(zip(temp_df["名称"].tolist(), [item.lower().replace("/", "-") for item in temp_df["简称"].tolist()]))
name_code_dict_one.update(name_code_dict_two)
temp_df = pd.DataFrame.from_dict(name_code_dict_one, orient="index").reset_index()
temp_df.columns = ["name", "code"]
return temp_df
if __name__ == '__main__':
currency_name_code_df = currency_name_code(symbol="usd/jpy")
print(currency_name_code_df)
currency_hist_df = currency_hist(symbol="usd-mmk", start_date="20131018", end_date="20200915")
print(currency_hist_df)
| [
"[email protected]"
] | |
6b2e29155e7989d4f19247ee43a3ae011cd71080 | a3c662a5eda4e269a8c81c99e229879b946a76f6 | /.venv/lib/python3.7/site-packages/pylint/test/functional/line_endings.py | 0b6e795e3f2c9b2e9ec6f93c33469324e1ea67ba | [
"MIT"
] | permissive | ahmadreza-smdi/ms-shop | 0c29da82c58b243507575672bbc94fb6e8068aeb | 65ba3f3061e2ac5c63115b08dadfe7d67f645fb6 | refs/heads/master | 2023-04-27T19:51:34.858182 | 2019-11-24T20:57:59 | 2019-11-24T20:57:59 | 223,616,552 | 6 | 2 | MIT | 2023-04-21T20:51:21 | 2019-11-23T16:09:03 | Python | UTF-8 | Python | false | false | 107 | py | "mixing line endings are not welcome"
# +1: [unexpected-line-ending-format, mixed-line-endings]
CONST = 1
| [
"[email protected]"
] | |
8c66385405873707fcd3fa458d8f11637899adb4 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/323/usersdata/278/89121/submittedfiles/mdc.py | 3185a3f53219a38653c6a227ae7e28e217f85f66 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 604 | py | # -*- coding: utf-8 -*-
import math
n1 = int(input("Digite o primeiro número inteiro: "))
n2 = int(input("Digite o segundo número inteiro: "))
mdc=1
if n1>n2:
for i in range (2,n2,1):
resto1=n1%i
resto2=n2%i
if resto1==0 and resto2==0:
mdc=mdc*i
if mdc==1:
print(mdc)
else:
print(mdc/2)
if n2>n1:
for i in range (2,n2,1):
resto1=n1%i
resto2=n2%i
if resto1!=0 and resto2!=0:
mdc=mdc*i
if mdc==1:
print(mdc)
else:
print(mdc/2)
if n1==n2:
print(n1)
| [
"[email protected]"
] | |
b2b57465fe49db3db5b58fef26c370a7f74985ee | ba88cd6db28f160fec810d69e27fdd42c84b753a | /prep_model.py | f87cdab4ae43ca39ff5f905f99fd5e81bf0681b1 | [
"MIT"
] | permissive | erhanbas/imgclsmob | 02057ceabf863ce571507abbee89d7d4fd3431b3 | 5eacf51b96c8e715f73a77261395f0fac35dfffc | refs/heads/master | 2023-03-11T05:43:02.358759 | 2021-02-11T17:56:22 | 2021-02-11T17:56:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,378 | py | """
Script for preparing the model for publication.
"""
import os
import argparse
import subprocess
import shutil
import re
import hashlib
import zipfile
import pandas as pd
def parse_args():
"""
Parse python script parameters.
Returns:
-------
ArgumentParser
Resulted args.
"""
parser = argparse.ArgumentParser(description="Prepare model",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
"--model",
type=str,
required=True,
help="model name")
parser.add_argument(
"--resume",
type=str,
default="",
help="model weights (Gluon) file path")
args = parser.parse_args()
return args
def calc_sha1(file_name):
"""
Calculate sha1 hash of the file content.
Parameters:
----------
file_name : str
Path to the file.
sha1_hash : str
Expected sha1 hash in hexadecimal digits.
Returns:
-------
str
sha1 hex digest.
"""
sha1 = hashlib.sha1()
with open(file_name, "rb") as f:
while True:
data = f.read(1048576)
if not data:
break
sha1.update(data)
return sha1.hexdigest()
def post_process(dst_dir_path,
model_name,
model_file_path,
log_file_path,
dst_model_file_ext,
log_line_num):
"""
Post-process weight/log files.
Parameters:
----------
dst_dir_path : str
Destination dir path.
model_name : str
Model name.
model_file_path : str
Model file path.
log_file_path : str
Log file path.
dst_model_file_ext : str
Destination model file extension.
log_line_num : int
Log file last line number for analysis.
Returns:
-------
top5_err : str
top5 error value.
sha1_value : str
sha1 hex digest.
"""
with open(log_file_path, "r") as f:
log_file_tail = f.read().splitlines()[log_line_num]
top5_err = re.findall(r"\d+\.\d+", re.findall(r", err-top5=\d+\.\d+", log_file_tail)[0])[0].split(".")[1]
sha1_value = calc_sha1(model_file_path)
dst_model_file_name = "{}-{}-{}.{}".format(model_name, top5_err, sha1_value[:8], dst_model_file_ext)
dst_model_file_path = os.path.join(dst_dir_path, dst_model_file_name)
os.rename(model_file_path, dst_model_file_path)
os.rename(log_file_path, dst_model_file_path + ".log")
with zipfile.ZipFile(dst_model_file_path + ".zip", "w", zipfile.ZIP_DEFLATED) as zf:
zf.write(filename=dst_model_file_path, arcname=dst_model_file_name)
os.remove(dst_model_file_path)
return top5_err, sha1_value
def process_fwk(prep_info_dict,
dst_framework,
dst_dir_path,
model_name,
model_file_path,
log_file_path):
"""
Process weights on specific framework.
Parameters:
----------
prep_info_dict : dict
Dictionary with preparation meta-info.
dst_dir_path : str
Destination dir path.
model_name : str
Model name.
model_file_path : str
Model file path.
log_file_path : str
Log file path.
dst_framework : str
Destination framework.
"""
if dst_framework == "gluon":
dst_model_file_ext = "params"
eval_script = "eval_gl"
num_gpus = 1
calc_flops = "--calc-flops"
log_line_num = -3
elif dst_framework == "pytorch":
dst_model_file_ext = "pth"
eval_script = "eval_pt"
num_gpus = 1
calc_flops = "--calc-flops"
log_line_num = -3
elif dst_framework == "chainer":
dst_model_file_ext = "npz"
eval_script = "eval_ch"
num_gpus = 0
calc_flops = ""
log_line_num = -2
elif dst_framework == "tf2":
dst_model_file_ext = "tf2.h5"
eval_script = "eval_tf2"
num_gpus = 1
calc_flops = ""
log_line_num = -2
else:
raise ValueError("Unknown framework: {}".format(dst_framework))
post_proc_log_files = [f for f in os.listdir(dst_dir_path) if f.endswith(".{}.log".format(dst_model_file_ext))]
assert (len(post_proc_log_files) in [0, 1])
if len(post_proc_log_files) == 0:
dst_raw_log_file_path = os.path.join(dst_dir_path, "train.log")
shutil.copy2(log_file_path, dst_raw_log_file_path)
dst_raw_model_file_path = os.path.join(dst_dir_path, "{}.{}".format(model_name, dst_model_file_ext))
if dst_framework == "gluon":
shutil.copy2(model_file_path, dst_raw_model_file_path)
else:
command = "python3 convert_models.py --src-fwk=gluon --dst-fwk={dst_framework} --src-model={model_name}" \
" --dst-model={model_name} --src-params={model_file_path}" \
" --dst-params={dst_raw_model_file_path} --save-dir={dst_dir_path}"
subprocess.call([command.format(
dst_framework=dst_framework,
model_name=model_name,
model_file_path=model_file_path,
dst_raw_model_file_path=dst_raw_model_file_path,
dst_dir_path=dst_dir_path)], shell=True)
command = "python3 {eval_script}.py --model={model_name} --resume={dst_raw_model_file_path}" \
" --save-dir={dst_dir_path} --num-gpus={num_gpus} --batch-size=100 -j=4 {calc_flops}"
subprocess.call([command.format(
eval_script=eval_script,
model_name=model_name,
dst_raw_model_file_path=dst_raw_model_file_path,
dst_dir_path=dst_dir_path,
num_gpus=num_gpus,
calc_flops=calc_flops)], shell=True)
if dst_framework == "gluon":
shutil.copy2(dst_raw_log_file_path, log_file_path)
top5_err, sha1_value = post_process(
dst_dir_path=dst_dir_path,
model_name=model_name,
model_file_path=dst_raw_model_file_path,
log_file_path=dst_raw_log_file_path,
dst_model_file_ext=dst_model_file_ext,
log_line_num=log_line_num)
else:
model_name1, top5_err, sha1_short = post_proc_log_files[0].split(".")[0].split("-")
assert (model_name1 == model_name)
dst_model_file_name = "{}-{}-{}.{}".format(model_name, top5_err, sha1_short, dst_model_file_ext)
dst_model_file_path = os.path.join(dst_dir_path, dst_model_file_name)
dst_zip_model_file_path = dst_model_file_path + ".zip"
assert os.path.exists(dst_zip_model_file_path)
with zipfile.ZipFile(dst_zip_model_file_path, "r") as zf:
zf.extract(dst_model_file_name, dst_dir_path)
sha1_value = calc_sha1(dst_model_file_path)
os.remove(dst_model_file_path)
prep_info_dict["Type"].append(dst_framework)
prep_info_dict["Top5"].append(top5_err)
prep_info_dict["Sha1"].append(sha1_value)
def main():
args = parse_args()
model_name = args.model
model_file_path = os.path.expanduser(args.resume)
if not os.path.exists(model_file_path):
raise Exception("Model file doesn't exist: {}".format(model_file_path))
root_dir_path = os.path.dirname(model_file_path)
log_file_path = os.path.join(root_dir_path, "train.log")
if not os.path.exists(log_file_path):
raise Exception("Log file doesn't exist: {}".format(log_file_path))
dst_dir_path = os.path.join(root_dir_path, "_result")
if not os.path.exists(dst_dir_path):
os.mkdir(dst_dir_path)
prep_info_dict = {
"Type": [],
"Top5": [],
"Sha1": [],
}
dst_frameworks = ["gluon", "pytorch", "chainer", "tf2"]
# dst_frameworks = ["tf2"]
for dst_framework in dst_frameworks:
process_fwk(
prep_info_dict=prep_info_dict,
dst_framework=dst_framework,
dst_dir_path=dst_dir_path,
model_name=model_name,
model_file_path=model_file_path,
log_file_path=log_file_path)
prep_info_df = pd.DataFrame(prep_info_dict)
prep_info_df.to_csv(
os.path.join(root_dir_path, "prep_info.csv"),
sep="\t",
index=False)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
e48a135645e3ef4e54e636050eed7af1fa14972e | 9d1c260ff8e58335e0f373cfdd530e637ea803a8 | /EVENT.py | 963c1b66313d4f3a5b72fbc09ebc1ccffb81b482 | [
"MIT"
] | permissive | rambasnet/EVENT | e52931e3224b712e8b044e58382e4d170a835dc4 | dd3a6507112e4adc054481608d8968706f80d23f | refs/heads/master | 2020-06-01T23:39:28.843906 | 2019-06-09T20:07:02 | 2019-06-09T20:07:02 | 190,967,667 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 963 | py | #-----------------------------------------------------------------------------
# Name: event.py
# Purpose:
#
# Author: Ram Basnet
#
# Created: 2009/10/10
# RCS-ID: $Id: event.py $
# Copyright: (c) 2009
# Licence: All Rights Reserved.
#-----------------------------------------------------------------------------
import sys
import Parsers
import Consolidation
import Reports
import Charts
import Navigation
from Config import *
if __name__ == "__main__":
#fout = open('EVENT.log', 'w')
#temp = sys.stdout
#sys.stdout = fout
ReadConfigFile()
#Run parsers
Parsers.main()
#Run consolidation
Consolidation.main()
#run reports
Reports.main()
#run charts
Charts.main()
#run navigation
Navigation.GenerateNavigation()
raw_input('All done! Please hit Enter key to continue...')
#sys.stdout.close()
#sys.stdout = temp
| [
"[email protected]"
] | |
fb329b172f65df3eda2304e0d4b90c8211e3863f | 30816710f64515d9af98b19da522ecdd2a745258 | /origin/faster_rcnn/core/loader.py | 4281edcfaa68aef9a31cdc9bc02c5122ecfb40b4 | [] | no_license | unsky/Feature-Pyramid-Networks | 457a441a500b1b552b5a89c11384e96f8cf60dd5 | 890e9c74a8fcea20bd33b90bac6c58e42294298d | refs/heads/master | 2021-06-26T00:59:50.874246 | 2017-09-06T02:57:19 | 2017-09-06T02:57:19 | 101,043,340 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,702 | py | # --------------------------------------------------------
# Deformable Convolutional Networks
# Copyright (c) 2016 by Contributors
# Copyright (c) 2017 Microsoft
# Licensed under The Apache-2.0 License [see LICENSE for details]
# Modified by Yuwen Xiong
# --------------------------------------------------------
import numpy as np
import mxnet as mx
from mxnet.executor_manager import _split_input_slice
from config.config import config
from utils.image import tensor_vstack
from rpn.rpn import get_rpn_testbatch, get_rpn_batch, assign_anchor
from rcnn import get_rcnn_testbatch, get_rcnn_batch
class TestLoader(mx.io.DataIter):
def __init__(self, roidb, config, batch_size=1, shuffle=False,
has_rpn=False):
super(TestLoader, self).__init__()
# save parameters as properties
self.cfg = config
self.roidb = roidb
self.batch_size = batch_size
self.shuffle = shuffle
self.has_rpn = has_rpn
# infer properties from roidb
self.size = len(self.roidb)
self.index = np.arange(self.size)
# decide data and label names (only for training)
if has_rpn:
self.data_name = ['data', 'im_info']
else:
self.data_name = ['data', 'rois']
self.label_name = None
# status variable for synchronization between get_data and get_label
self.cur = 0
self.data = None
self.label = []
self.im_info = None
# get first batch to fill in provide_data and provide_label
self.reset()
self.get_batch()
@property
def provide_data(self):
return [[(k, v.shape) for k, v in zip(self.data_name, idata)] for idata in self.data]
@property
def provide_label(self):
return [None for _ in range(len(self.data))]
@property
def provide_data_single(self):
return [(k, v.shape) for k, v in zip(self.data_name, self.data[0])]
@property
def provide_label_single(self):
return None
def reset(self):
self.cur = 0
if self.shuffle:
np.random.shuffle(self.index)
def iter_next(self):
return self.cur < self.size
def next(self):
if self.iter_next():
self.get_batch()
self.cur += self.batch_size
return self.im_info, mx.io.DataBatch(data=self.data, label=self.label,
pad=self.getpad(), index=self.getindex(),
provide_data=self.provide_data, provide_label=self.provide_label)
else:
raise StopIteration
def getindex(self):
return self.cur / self.batch_size
def getpad(self):
if self.cur + self.batch_size > self.size:
return self.cur + self.batch_size - self.size
else:
return 0
def get_batch(self):
cur_from = self.cur
cur_to = min(cur_from + self.batch_size, self.size)
roidb = [self.roidb[self.index[i]] for i in range(cur_from, cur_to)]
if self.has_rpn:
data, label, im_info = get_rpn_testbatch(roidb, self.cfg)
else:
data, label, im_info = get_rcnn_testbatch(roidb, self.cfg)
self.data = [[mx.nd.array(idata[name]) for name in self.data_name] for idata in data]
self.im_info = im_info
def get_batch_individual(self):
cur_from = self.cur
cur_to = min(cur_from + self.batch_size, self.size)
roidb = [self.roidb[self.index[i]] for i in range(cur_from, cur_to)]
if self.has_rpn:
data, label, im_info = get_rpn_testbatch(roidb, self.cfg)
else:
data, label, im_info = get_rcnn_testbatch(roidb, self.cfg)
self.data = [mx.nd.array(data[name]) for name in self.data_name]
self.im_info = im_info
class AnchorLoader(mx.io.DataIter):
def __init__(self, feat_sym_p3,feat_sym_p4,feat_sym_p5,feat_sym_p6,
roidb, cfg, batch_size=1, shuffle=False, ctx=None, work_load_list=None,
feat_stride_p3=4,
anchor_scales_p3=(8, 16, 32),
anchor_ratios_p3=(0.5, 1, 2),
feat_stride_p4=8,
anchor_scales_p4=(8, 16, 32),
anchor_ratios_p4=(0.5, 1, 2),
feat_stride_p5=16,
anchor_scales_p5=(8, 16, 32),
anchor_ratios_p5=(0.5, 1, 2),
feat_stride_p6=32,
anchor_scales_p6=(8, 16, 32),
anchor_ratios_p6=(0.5, 1, 2),
allowed_border=0,
aspect_grouping=False):
"""
This Iter will provide roi data to Fast R-CNN network
:param feat_sym: to infer shape of assign_output
:param roidb: must be preprocessed
:param batch_size: must divide BATCH_SIZE(128)
:param shuffle: bool
:param ctx: list of contexts
:param work_load_list: list of work load
:param aspect_grouping: group images with similar aspects
:return: AnchorLoader
"""
super(AnchorLoader, self).__init__()
# save parameters as properties
self.feat_sym_p3 = feat_sym_p3
self.feat_sym_p4 = feat_sym_p4
self.feat_sym_p5 = feat_sym_p5
self.feat_sym_p6 = feat_sym_p6
self.roidb = roidb
self.cfg = cfg
self.batch_size = batch_size
self.shuffle = shuffle
self.ctx = ctx
if self.ctx is None:
self.ctx = [mx.cpu()]
self.work_load_list = work_load_list
self.feat_stride_p3 = feat_stride_p3
self.anchor_scales_p3 = anchor_scales_p3
self.anchor_ratios_p3 = anchor_ratios_p3
self.feat_stride_p4 = feat_stride_p4
self.anchor_scales_p4 = anchor_scales_p4
self.anchor_ratios_p4 = anchor_ratios_p4
self.feat_stride_p5 = feat_stride_p5
self.anchor_scales_p5 = anchor_scales_p5
self.anchor_ratios_p5 = anchor_ratios_p5
self.feat_stride_p6 = feat_stride_p6
self.anchor_scales_p6 = anchor_scales_p6
self.anchor_ratios_p6 = anchor_ratios_p6
self.allowed_border = allowed_border
self.aspect_grouping = aspect_grouping
# infer properties from roidb
self.size = len(roidb)
self.index = np.arange(self.size)
# decide data and label names
if config.TRAIN.END2END:
self.data_name = ['data', 'im_info', 'gt_boxes']
else:
self.data_name = ['data']
self.label_name = ['label/p3','label/p4','label/p5', 'label/p6','bbox_target/p3','bbox_target/p4','bbox_target/p5', 'bbox_target/p6','bbox_weight/p3','bbox_weight/p4','bbox_weight/p5','bbox_weight/p6']
# status variable for synchronization between get_data and get_label
self.cur = 0
self.batch = None
self.data = None
self.label = None
# get first batch to fill in provide_data and provide_label
self.reset()
self.get_batch_individual()
@property
def provide_data(self):
return [[(k, v.shape) for k, v in zip(self.data_name, self.data[i])] for i in xrange(len(self.data))]
@property
def provide_label(self):
return [[(k, v.shape) for k, v in zip(self.label_name, self.label[i])] for i in xrange(len(self.data))]
@property
def provide_data_single(self):
return [(k, v.shape) for k, v in zip(self.data_name, self.data[0])]
@property
def provide_label_single(self):
return [(k, v.shape) for k, v in zip(self.label_name, self.label[0])]
def reset(self):
self.cur = 0
if self.shuffle:
if self.aspect_grouping:
widths = np.array([r['width'] for r in self.roidb])
heights = np.array([r['height'] for r in self.roidb])
horz = (widths >= heights)
vert = np.logical_not(horz)
horz_inds = np.where(horz)[0]
vert_inds = np.where(vert)[0]
inds = np.hstack((np.random.permutation(horz_inds), np.random.permutation(vert_inds)))
extra = inds.shape[0] % self.batch_size
inds_ = np.reshape(inds[:-extra], (-1, self.batch_size))
row_perm = np.random.permutation(np.arange(inds_.shape[0]))
inds[:-extra] = np.reshape(inds_[row_perm, :], (-1,))
self.index = inds
else:
np.random.shuffle(self.index)
def iter_next(self):
return self.cur + self.batch_size <= self.size
def next(self):
if self.iter_next():
self.get_batch_individual()
self.cur += self.batch_size
return mx.io.DataBatch(data=self.data, label=self.label,
pad=self.getpad(), index=self.getindex(),
provide_data=self.provide_data, provide_label=self.provide_label)
else:
raise StopIteration
def getindex(self):
return self.cur / self.batch_size
def getpad(self):
if self.cur + self.batch_size > self.size:
return self.cur + self.batch_size - self.size
else:
return 0
def infer_shape(self, max_data_shape=None, max_label_shape=None):
""" Return maximum data and label shape for single gpu """
if max_data_shape is None:
max_data_shape = []
if max_label_shape is None:
max_label_shape = []
max_shapes = dict(max_data_shape + max_label_shape)
input_batch_size = max_shapes['data'][0]
im_info = [[max_shapes['data'][2], max_shapes['data'][3], 1.0]]
_, feat_shape_p3, _ = self.feat_sym_p3.infer_shape(**max_shapes)
_, feat_shape_p4, _ = self.feat_sym_p4.infer_shape(**max_shapes)
_, feat_shape_p5, _ = self.feat_sym_p5.infer_shape(**max_shapes)
_, feat_shape_p6, _ = self.feat_sym_p6.infer_shape(**max_shapes)
label = assign_anchor(feat_shape_p3[0],feat_shape_p4[0],feat_shape_p5[0] ,feat_shape_p6[0],np.zeros((0, 5)), im_info, self.cfg,
self.feat_stride_p3, self.anchor_scales_p3, self.anchor_ratios_p3,
self.feat_stride_p4, self.anchor_scales_p4, self.anchor_ratios_p4,
self.feat_stride_p5, self.anchor_scales_p5, self.anchor_ratios_p5,
self.feat_stride_p6, self.anchor_scales_p6, self.anchor_ratios_p6,
self.allowed_border)
label = [label[k] for k in self.label_name]
label_shape = [(k, tuple([input_batch_size] + list(v.shape[1:]))) for k, v in zip(self.label_name, label)]
return max_data_shape, label_shape
def get_batch(self):
# slice roidb
cur_from = self.cur
cur_to = min(cur_from + self.batch_size, self.size)
roidb = [self.roidb[self.index[i]] for i in range(cur_from, cur_to)]
# decide multi device slice
work_load_list = self.work_load_list
ctx = self.ctx
if work_load_list is None:
work_load_list = [1] * len(ctx)
assert isinstance(work_load_list, list) and len(work_load_list) == len(ctx), \
"Invalid settings for work load. "
slices = _split_input_slice(self.batch_size, work_load_list)
# get testing data for multigpu
data_list = []
label_list = []
for islice in slices:
iroidb = [roidb[i] for i in range(islice.start, islice.stop)]
data, label = get_rpn_batch(iroidb, self.cfg)
data_list.append(data)
label_list.append(label)
# pad data first and then assign anchor (read label)
data_tensor = tensor_vstack([batch['data'] for batch in data_list])
for data, data_pad in zip(data_list, data_tensor):
data['data'] = data_pad[np.newaxis, :]
new_label_list = []
for data, label in zip(data_list, label_list):
# infer label shape
data_shape = {k: v.shape for k, v in data.items()}
del data_shape['im_info']
_, feat_shape_p3, _ = self.feat_sym_p3.infer_shape(**data_shape)
feat_shape_p3 = [int(i) for i in feat_shape_p3[0]]
_, feat_shape_p4, _ = self.feat_sym_p4.infer_shape(**data_shape)
feat_shape_p4 = [int(i) for i in feat_shape_p4[0]]
_, feat_shape_p5, _ = self.feat_sym_p5.infer_shape(**data_shape)
feat_shape_p5 = [int(i) for i in feat_shape_p5[0]]
_, feat_shape_p6, _ = self.feat_sym_p6.infer_shape(**data_shape)
feat_shape_p6 = [int(i) for i in feat_shape_p6[0]]
# add gt_boxes to data for e2e
data['gt_boxes'] = label['gt_boxes'][np.newaxis, :, :]
# assign anchor for label
label = assign_anchor(feat_shape_p3,feat_shape_p4,feat_shape_p5,feat_shape_p6,label['gt_boxes'], data['im_info'], self.cfg,
self.feat_stride_p3, self.anchor_scales_p3,self.anchor_ratios_p3,
self.feat_stride_p4, self.anchor_scales_p4,self.anchor_ratios_p4,
self.feat_stride_p5, self.anchor_scales_p5,self.anchor_ratios_p5,
self.feat_stride_p6, self.anchor_scales_p6,self.anchor_ratios_p6,
self.allowed_border)
new_label_list.append(label)
all_data = dict()
for key in self.data_name:
all_data[key] = tensor_vstack([batch[key] for batch in data_list])
all_label = dict()
for key in self.label_name:
pad = -1 if key == 'label' else 0
all_label[key] = tensor_vstack([batch[key] for batch in new_label_list], pad=pad)
self.data = [mx.nd.array(all_data[key]) for key in self.data_name]
self.label = [mx.nd.array(all_label[key]) for key in self.label_name]
def get_batch_individual(self):
cur_from = self.cur
cur_to = min(cur_from + self.batch_size, self.size)
roidb = [self.roidb[self.index[i]] for i in range(cur_from, cur_to)]
# decide multi device slice
work_load_list = self.work_load_list
ctx = self.ctx
if work_load_list is None:
work_load_list = [1] * len(ctx)
assert isinstance(work_load_list, list) and len(work_load_list) == len(ctx), \
"Invalid settings for work load. "
slices = _split_input_slice(self.batch_size, work_load_list)
rst = []
for idx, islice in enumerate(slices):
iroidb = [roidb[i] for i in range(islice.start, islice.stop)]
rst.append(self.parfetch(iroidb))
all_data = [_['data'] for _ in rst]
all_label = [_['label'] for _ in rst]
self.data = [[mx.nd.array(data[key]) for key in self.data_name] for data in all_data]
self.label = [[mx.nd.array(label[key]) for key in self.label_name] for label in all_label]
def parfetch(self, iroidb):
# get testing data for multigpu
data, label = get_rpn_batch(iroidb, self.cfg)
data_shape = {k: v.shape for k, v in data.items()}
del data_shape['im_info']
_, feat_shape_p3, _ = self.feat_sym_p3.infer_shape(**data_shape)
feat_shape_p3 = [int(i) for i in feat_shape_p3[0]]
_, feat_shape_p4, _ = self.feat_sym_p4.infer_shape(**data_shape)
feat_shape_p4 = [int(i) for i in feat_shape_p4[0]]
_, feat_shape_p5, _ = self.feat_sym_p5.infer_shape(**data_shape)
feat_shape_p5 = [int(i) for i in feat_shape_p5[0]]
_, feat_shape_p6, _ = self.feat_sym_p6.infer_shape(**data_shape)
feat_shape_p6 = [int(i) for i in feat_shape_p6[0]]
# add gt_boxes to data for e2e
data['gt_boxes'] = label['gt_boxes'][np.newaxis, :, :]
# assign anchor for label
label = assign_anchor(feat_shape_p3,feat_shape_p4,feat_shape_p5,feat_shape_p6, label['gt_boxes'], data['im_info'], self.cfg,
self.feat_stride_p3, self.anchor_scales_p3,self.anchor_ratios_p3,
self.feat_stride_p4, self.anchor_scales_p4,self.anchor_ratios_p4,
self.feat_stride_p5, self.anchor_scales_p5,self.anchor_ratios_p5,
self.feat_stride_p6, self.anchor_scales_p6,self.anchor_ratios_p6,
self.allowed_border)
return {'data': data, 'label': label}
| [
"[email protected]"
] | |
e8a823a890546c56c66c3bb0dbf0a510a17cf622 | 13f7adf576114c51f9f806a6fc5797b276d93f97 | /devel/lib/python2.7/dist-packages/autoware_msgs/msg/_traffic_light.py | 90177962beab0e196bf6f3c7b6ff861fedd20be4 | [] | no_license | yunjeongkim/keti_ws | a72a5ebc367b208654bdffb5bb9e8372cd959d33 | aaac717c15a7be7431b22fb4ec7a96a734f2e03c | refs/heads/master | 2020-04-05T06:18:52.334522 | 2018-11-21T01:47:34 | 2018-11-21T01:47:34 | 156,633,425 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,252 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from autoware_msgs/traffic_light.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import std_msgs.msg
class traffic_light(genpy.Message):
_md5sum = "a4931ba214a0e37e220dd00b2acca20a"
_type = "autoware_msgs/traffic_light"
_has_header = True #flag to mark the presence of a Header object
_full_text = """Header header
int32 traffic_light
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
"""
__slots__ = ['header','traffic_light']
_slot_types = ['std_msgs/Header','int32']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,traffic_light
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(traffic_light, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.traffic_light is None:
self.traffic_light = 0
else:
self.header = std_msgs.msg.Header()
self.traffic_light = 0
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_get_struct_i().pack(self.traffic_light))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
start = end
end += 4
(self.traffic_light,) = _get_struct_i().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_get_struct_i().pack(self.traffic_light))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
start = end
end += 4
(self.traffic_light,) = _get_struct_i().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_i = None
def _get_struct_i():
global _struct_i
if _struct_i is None:
_struct_i = struct.Struct("<i")
return _struct_i
_struct_3I = None
def _get_struct_3I():
global _struct_3I
if _struct_3I is None:
_struct_3I = struct.Struct("<3I")
return _struct_3I
| [
"[email protected]"
] | |
66a9052b381170d325564e1f868643a4dbafd533 | ad5d38fce4785037c108186f17eb1c64380355ef | /sddsd/google-cloud-sdk/lib/googlecloudsdk/third_party/apis/billingbudgets/v1beta1/billingbudgets_v1beta1_messages.py | 703342bc274d39c84cc7f65280b83732457e9420 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | saranraju90/multik8s | 75864b605a139ddb7947ed4de4ae8466bdd49acb | 428576dedef7bb9cd6516e2c1ab2714581e1137c | refs/heads/master | 2023-03-03T21:56:14.383571 | 2021-02-20T14:56:42 | 2021-02-20T14:56:42 | 339,665,231 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,295 | py | """Generated message classes for billingbudgets version v1beta1.
The Cloud Billing Budget API stores Cloud Billing budgets, which define a
budget plan and the rules to execute as spend is tracked against that plan.
"""
# NOTE: This file is autogenerated and should not be edited by hand.
from __future__ import absolute_import
from apitools.base.protorpclite import messages as _messages
from apitools.base.py import encoding
from apitools.base.py import extra_types
package = 'billingbudgets'
class BillingbudgetsBillingAccountsBudgetsCreateRequest(_messages.Message):
r"""A BillingbudgetsBillingAccountsBudgetsCreateRequest object.
Fields:
googleCloudBillingBudgetsV1beta1CreateBudgetRequest: A
GoogleCloudBillingBudgetsV1beta1CreateBudgetRequest resource to be
passed as the request body.
parent: Required. The name of the billing account to create the budget in.
Values are of the form `billingAccounts/{billingAccountId}`.
"""
googleCloudBillingBudgetsV1beta1CreateBudgetRequest = _messages.MessageField('GoogleCloudBillingBudgetsV1beta1CreateBudgetRequest', 1)
parent = _messages.StringField(2, required=True)
class BillingbudgetsBillingAccountsBudgetsDeleteRequest(_messages.Message):
r"""A BillingbudgetsBillingAccountsBudgetsDeleteRequest object.
Fields:
name: Required. Name of the budget to delete. Values are of the form
`billingAccounts/{billingAccountId}/budgets/{budgetId}`.
"""
name = _messages.StringField(1, required=True)
class BillingbudgetsBillingAccountsBudgetsGetRequest(_messages.Message):
r"""A BillingbudgetsBillingAccountsBudgetsGetRequest object.
Fields:
name: Required. Name of budget to get. Values are of the form
`billingAccounts/{billingAccountId}/budgets/{budgetId}`.
"""
name = _messages.StringField(1, required=True)
class BillingbudgetsBillingAccountsBudgetsListRequest(_messages.Message):
r"""A BillingbudgetsBillingAccountsBudgetsListRequest object.
Fields:
pageSize: Optional. The maximum number of budgets to return per page. The
default and maximum value are 100.
pageToken: Optional. The value returned by the last `ListBudgetsResponse`
which indicates that this is a continuation of a prior `ListBudgets`
call, and that the system should return the next page of data.
parent: Required. Name of billing account to list budgets under. Values
are of the form `billingAccounts/{billingAccountId}`.
"""
pageSize = _messages.IntegerField(1, variant=_messages.Variant.INT32)
pageToken = _messages.StringField(2)
parent = _messages.StringField(3, required=True)
class BillingbudgetsBillingAccountsBudgetsPatchRequest(_messages.Message):
r"""A BillingbudgetsBillingAccountsBudgetsPatchRequest object.
Fields:
googleCloudBillingBudgetsV1beta1UpdateBudgetRequest: A
GoogleCloudBillingBudgetsV1beta1UpdateBudgetRequest resource to be
passed as the request body.
name: Output only. Resource name of the budget. The resource name implies
the scope of a budget. Values are of the form
`billingAccounts/{billingAccountId}/budgets/{budgetId}`.
"""
googleCloudBillingBudgetsV1beta1UpdateBudgetRequest = _messages.MessageField('GoogleCloudBillingBudgetsV1beta1UpdateBudgetRequest', 1)
name = _messages.StringField(2, required=True)
class GoogleCloudBillingBudgetsV1beta1AllUpdatesRule(_messages.Message):
r"""AllUpdatesRule defines notifications that are sent based on budget spend
and thresholds.
Fields:
disableDefaultIamRecipients: Optional. When set to true, disables default
notifications sent when a threshold is exceeded. Default notifications
are sent to those with Billing Account Administrator and Billing Account
User IAM roles for the target account.
monitoringNotificationChannels: Optional. Targets to send notifications to
when a threshold is exceeded. This is in addition to default recipients
who have billing account IAM roles. The value is the full REST resource
name of a monitoring notification channel with the form
`projects/{project_id}/notificationChannels/{channel_id}`. A maximum of
5 channels are allowed. See https://cloud.google.com/billing/docs/how-
to/budgets-notification-recipients for more details.
pubsubTopic: Optional. The name of the Pub/Sub topic where budget related
messages will be published, in the form
`projects/{project_id}/topics/{topic_id}`. Updates are sent at regular
intervals to the topic. The topic needs to be created before the budget
is created; see https://cloud.google.com/billing/docs/how-
to/budgets#manage-notifications for more details. Caller is expected to
have `pubsub.topics.setIamPolicy` permission on the topic when it's set
for a budget, otherwise, the API call will fail with PERMISSION_DENIED.
See https://cloud.google.com/billing/docs/how-to/budgets-programmatic-
notifications for more details on Pub/Sub roles and permissions.
schemaVersion: Optional. Required when AllUpdatesRule.pubsub_topic is set.
The schema version of the notification sent to
AllUpdatesRule.pubsub_topic. Only "1.0" is accepted. It represents the
JSON schema as defined in https://cloud.google.com/billing/docs/how-
to/budgets-programmatic-notifications#notification_format.
"""
disableDefaultIamRecipients = _messages.BooleanField(1)
monitoringNotificationChannels = _messages.StringField(2, repeated=True)
pubsubTopic = _messages.StringField(3)
schemaVersion = _messages.StringField(4)
class GoogleCloudBillingBudgetsV1beta1Budget(_messages.Message):
r"""A budget is a plan that describes what you expect to spend on Cloud
projects, plus the rules to execute as spend is tracked against that plan,
(for example, send an alert when 90% of the target spend is met). Currently
all plans are monthly budgets so the usage period(s) tracked are implied
(calendar months of usage back-to-back).
Fields:
allUpdatesRule: Optional. Rules to apply to notifications sent based on
budget spend and thresholds.
amount: Required. Budgeted amount.
budgetFilter: Optional. Filters that define which resources are used to
compute the actual spend against the budget.
displayName: User data for display name in UI. Validation: <= 60 chars.
etag: Optional. Etag to validate that the object is unchanged for a read-
modify-write operation. An empty etag will cause an update to overwrite
other changes.
name: Output only. Resource name of the budget. The resource name implies
the scope of a budget. Values are of the form
`billingAccounts/{billingAccountId}/budgets/{budgetId}`.
thresholdRules: Optional. Rules that trigger alerts (notifications of
thresholds being crossed) when spend exceeds the specified percentages
of the budget.
"""
allUpdatesRule = _messages.MessageField('GoogleCloudBillingBudgetsV1beta1AllUpdatesRule', 1)
amount = _messages.MessageField('GoogleCloudBillingBudgetsV1beta1BudgetAmount', 2)
budgetFilter = _messages.MessageField('GoogleCloudBillingBudgetsV1beta1Filter', 3)
displayName = _messages.StringField(4)
etag = _messages.StringField(5)
name = _messages.StringField(6)
thresholdRules = _messages.MessageField('GoogleCloudBillingBudgetsV1beta1ThresholdRule', 7, repeated=True)
class GoogleCloudBillingBudgetsV1beta1BudgetAmount(_messages.Message):
r"""The budgeted amount for each usage period.
Fields:
lastPeriodAmount: Use the last period's actual spend as the budget for the
present period.
specifiedAmount: A specified amount to use as the budget. `currency_code`
is optional. If specified when creating a budget, it must match the
currency of the billing account. If specified when updating a budget, it
must match the existing budget currency_code. The `currency_code` is
provided on output.
"""
lastPeriodAmount = _messages.MessageField('GoogleCloudBillingBudgetsV1beta1LastPeriodAmount', 1)
specifiedAmount = _messages.MessageField('GoogleTypeMoney', 2)
class GoogleCloudBillingBudgetsV1beta1CreateBudgetRequest(_messages.Message):
r"""Request for CreateBudget
Fields:
budget: Required. Budget to create.
"""
budget = _messages.MessageField('GoogleCloudBillingBudgetsV1beta1Budget', 1)
class GoogleCloudBillingBudgetsV1beta1Filter(_messages.Message):
r"""A filter for a budget, limiting the scope of the cost to calculate.
Enums:
CreditTypesTreatmentValueValuesEnum: Optional. If not set, default
behavior is `INCLUDE_ALL_CREDITS`.
Messages:
LabelsValue: Optional. A single label and value pair specifying that usage
from only this set of labeled resources should be included in the
budget. Currently, multiple entries or multiple values per entry are not
allowed. If omitted, the report will include all labeled and unlabeled
usage.
Fields:
creditTypes: Optional. If Filter.credit_types_treatment is
INCLUDE_SPECIFIED_CREDITS, this is a list of credit types to be
subtracted from gross cost to determine the spend for threshold
calculations. If Filter.credit_types_treatment is **not**
INCLUDE_SPECIFIED_CREDITS, this field must be empty. See [a list of
acceptable credit type
values](https://cloud.google.com/billing/docs/how-to/export-data-
bigquery-tables#credits-type).
creditTypesTreatment: Optional. If not set, default behavior is
`INCLUDE_ALL_CREDITS`.
labels: Optional. A single label and value pair specifying that usage from
only this set of labeled resources should be included in the budget.
Currently, multiple entries or multiple values per entry are not
allowed. If omitted, the report will include all labeled and unlabeled
usage.
projects: Optional. A set of projects of the form `projects/{project}`,
specifying that usage from only this set of projects should be included
in the budget. If omitted, the report will include all usage for the
billing account, regardless of which project the usage occurred on. Only
zero or one project can be specified currently.
services: Optional. A set of services of the form `services/{service_id}`,
specifying that usage from only this set of services should be included
in the budget. If omitted, the report will include usage for all the
services. The service names are available through the Catalog API:
https://cloud.google.com/billing/v1/how-tos/catalog-api.
subaccounts: Optional. A set of subaccounts of the form
`billingAccounts/{account_id}`, specifying that usage from only this set
of subaccounts should be included in the budget. If a subaccount is set
to the name of the parent account, usage from the parent account will be
included. If omitted, the report will include usage from the parent
account and all subaccounts, if they exist.
"""
class CreditTypesTreatmentValueValuesEnum(_messages.Enum):
r"""Optional. If not set, default behavior is `INCLUDE_ALL_CREDITS`.
Values:
CREDIT_TYPES_TREATMENT_UNSPECIFIED: <no description>
INCLUDE_ALL_CREDITS: All types of credit are subtracted from the gross
cost to determine the spend for threshold calculations.
EXCLUDE_ALL_CREDITS: All types of credit are added to the net cost to
determine the spend for threshold calculations.
INCLUDE_SPECIFIED_CREDITS: Credit types specified in the credit_types
field are subtracted from the gross cost to determine the spend for
threshold calculations.
"""
CREDIT_TYPES_TREATMENT_UNSPECIFIED = 0
INCLUDE_ALL_CREDITS = 1
EXCLUDE_ALL_CREDITS = 2
INCLUDE_SPECIFIED_CREDITS = 3
@encoding.MapUnrecognizedFields('additionalProperties')
class LabelsValue(_messages.Message):
r"""Optional. A single label and value pair specifying that usage from
only this set of labeled resources should be included in the budget.
Currently, multiple entries or multiple values per entry are not allowed.
If omitted, the report will include all labeled and unlabeled usage.
Messages:
AdditionalProperty: An additional property for a LabelsValue object.
Fields:
additionalProperties: Additional properties of type LabelsValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a LabelsValue object.
Fields:
key: Name of the additional property.
value: A extra_types.JsonValue attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('extra_types.JsonValue', 2, repeated=True)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
creditTypes = _messages.StringField(1, repeated=True)
creditTypesTreatment = _messages.EnumField('CreditTypesTreatmentValueValuesEnum', 2)
labels = _messages.MessageField('LabelsValue', 3)
projects = _messages.StringField(4, repeated=True)
services = _messages.StringField(5, repeated=True)
subaccounts = _messages.StringField(6, repeated=True)
class GoogleCloudBillingBudgetsV1beta1LastPeriodAmount(_messages.Message):
r"""Describes a budget amount targeted to last period's spend. At this time,
the amount is automatically 100% of last period's spend; that is, there are
no other options yet. Future configuration will be described here (for
example, configuring a percentage of last period's spend).
"""
class GoogleCloudBillingBudgetsV1beta1ListBudgetsResponse(_messages.Message):
r"""Response for ListBudgets
Fields:
budgets: List of the budgets owned by the requested billing account.
nextPageToken: If not empty, indicates that there may be more budgets that
match the request; this value should be passed in a new
`ListBudgetsRequest`.
"""
budgets = _messages.MessageField('GoogleCloudBillingBudgetsV1beta1Budget', 1, repeated=True)
nextPageToken = _messages.StringField(2)
class GoogleCloudBillingBudgetsV1beta1ThresholdRule(_messages.Message):
r"""ThresholdRule contains a definition of a threshold which triggers an
alert (a notification of a threshold being crossed) to be sent when spend
goes above the specified amount. Alerts are automatically e-mailed to users
with the Billing Account Administrator role or the Billing Account User
role. The thresholds here have no effect on notifications sent to anything
configured under `Budget.all_updates_rule`.
Enums:
SpendBasisValueValuesEnum: Optional. The type of basis used to determine
if spend has passed the threshold. Behavior defaults to CURRENT_SPEND if
not set.
Fields:
spendBasis: Optional. The type of basis used to determine if spend has
passed the threshold. Behavior defaults to CURRENT_SPEND if not set.
thresholdPercent: Required. Send an alert when this threshold is exceeded.
This is a 1.0-based percentage, so 0.5 = 50%. Validation: non-negative
number.
"""
class SpendBasisValueValuesEnum(_messages.Enum):
r"""Optional. The type of basis used to determine if spend has passed the
threshold. Behavior defaults to CURRENT_SPEND if not set.
Values:
BASIS_UNSPECIFIED: Unspecified threshold basis.
CURRENT_SPEND: Use current spend as the basis for comparison against the
threshold.
FORECASTED_SPEND: Use forecasted spend for the period as the basis for
comparison against the threshold.
"""
BASIS_UNSPECIFIED = 0
CURRENT_SPEND = 1
FORECASTED_SPEND = 2
spendBasis = _messages.EnumField('SpendBasisValueValuesEnum', 1)
thresholdPercent = _messages.FloatField(2)
class GoogleCloudBillingBudgetsV1beta1UpdateBudgetRequest(_messages.Message):
r"""Request for UpdateBudget
Fields:
budget: Required. The updated budget object. The budget to update is
specified by the budget name in the budget.
updateMask: Optional. Indicates which fields in the provided budget to
update. Read-only fields (such as `name`) cannot be changed. If this is
not provided, then only fields with non-default values from the request
are updated. See https://developers.google.com/protocol-
buffers/docs/proto3#default for more details about default values.
"""
budget = _messages.MessageField('GoogleCloudBillingBudgetsV1beta1Budget', 1)
updateMask = _messages.StringField(2)
class GoogleProtobufEmpty(_messages.Message):
r"""A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to use it as the request
or the response type of an API method. For instance: service Foo { rpc
Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The JSON
representation for `Empty` is empty JSON object `{}`.
"""
class GoogleTypeMoney(_messages.Message):
r"""Represents an amount of money with its currency type.
Fields:
currencyCode: The three-letter currency code defined in ISO 4217.
nanos: Number of nano (10^-9) units of the amount. The value must be
between -999,999,999 and +999,999,999 inclusive. If `units` is positive,
`nanos` must be positive or zero. If `units` is zero, `nanos` can be
positive, zero, or negative. If `units` is negative, `nanos` must be
negative or zero. For example $-1.75 is represented as `units`=-1 and
`nanos`=-750,000,000.
units: The whole units of the amount. For example if `currencyCode` is
`"USD"`, then 1 unit is one US dollar.
"""
currencyCode = _messages.StringField(1)
nanos = _messages.IntegerField(2, variant=_messages.Variant.INT32)
units = _messages.IntegerField(3)
class StandardQueryParameters(_messages.Message):
r"""Query parameters accepted by all methods.
Enums:
FXgafvValueValuesEnum: V1 error format.
AltValueValuesEnum: Data format for response.
Fields:
f__xgafv: V1 error format.
access_token: OAuth access token.
alt: Data format for response.
callback: JSONP
fields: Selector specifying which fields to include in a partial response.
key: API key. Your API key identifies your project and provides you with
API access, quota, and reports. Required unless you provide an OAuth 2.0
token.
oauth_token: OAuth 2.0 token for the current user.
prettyPrint: Returns response with indentations and line breaks.
quotaUser: Available to use for quota purposes for server-side
applications. Can be any arbitrary string assigned to a user, but should
not exceed 40 characters.
trace: A tracing token of the form "token:<tokenid>" to include in api
requests.
uploadType: Legacy upload protocol for media (e.g. "media", "multipart").
upload_protocol: Upload protocol for media (e.g. "raw", "multipart").
"""
class AltValueValuesEnum(_messages.Enum):
r"""Data format for response.
Values:
json: Responses with Content-Type of application/json
media: Media download with context-dependent Content-Type
proto: Responses with Content-Type of application/x-protobuf
"""
json = 0
media = 1
proto = 2
class FXgafvValueValuesEnum(_messages.Enum):
r"""V1 error format.
Values:
_1: v1 error format
_2: v2 error format
"""
_1 = 0
_2 = 1
f__xgafv = _messages.EnumField('FXgafvValueValuesEnum', 1)
access_token = _messages.StringField(2)
alt = _messages.EnumField('AltValueValuesEnum', 3, default='json')
callback = _messages.StringField(4)
fields = _messages.StringField(5)
key = _messages.StringField(6)
oauth_token = _messages.StringField(7)
prettyPrint = _messages.BooleanField(8, default=True)
quotaUser = _messages.StringField(9)
trace = _messages.StringField(10)
uploadType = _messages.StringField(11)
upload_protocol = _messages.StringField(12)
encoding.AddCustomJsonFieldMapping(
StandardQueryParameters, 'f__xgafv', '$.xgafv')
encoding.AddCustomJsonEnumMapping(
StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1')
encoding.AddCustomJsonEnumMapping(
StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2')
| [
"[email protected]"
] | |
4e3448bfeb4bf56e2ff41fc71a1727b619f401e6 | 526b6454565583700866463e46f66ede67165e2b | /expenses/pagination.py | d231586502537a64f68fbb878914834860e78b17 | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | thangadurairajapandiyan/django-expenses | a0f04ac41d1b02be82642a084545a2b356fd5a59 | 4a463052a67ac080427857d3fec16cf78eb70c3b | refs/heads/master | 2023-03-30T04:24:01.096399 | 2021-03-31T20:30:17 | 2021-03-31T20:30:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,126 | py | # Pretty Pagination
# Copyright © 2018-2021, Chris Warrick.
# All rights reserved.
# License: 3-clause BSD
from itertools import zip_longest
def pagination(num, maxpage):
"""Generate a pretty pagination."""
if maxpage <= 5:
return list(range(1, maxpage + 1))
page_range = []
if num == 1:
around = {1, 2, 3}
elif num == maxpage:
around = {num - 2, num - 1, num}
else:
around = {num - 1, num, num + 1}
around |= {1, maxpage}
page_range_base = [i for i in sorted(around) if 0 < i <= maxpage]
for current_page, next_page in zip_longest(page_range_base, page_range_base[1:]):
page_range.append(current_page)
if next_page is None:
continue
diff = next_page - current_page
if diff == 2:
page_range.append(current_page + 1) # ellipsis should not be one page
elif diff > 2:
page_range.append("...")
return page_range
if __name__ == "__main__":
maxpage = 15
print("Pages:", maxpage)
for i in range(1, maxpage + 1):
print(i, pagination(i, maxpage), sep="\t")
| [
"[email protected]"
] | |
a332729be8de4ce9a7e33437066ae82c80110be0 | bf7ad5c52e5be4fbf34816b95932d520e0f579d4 | /repeat.py | 0419ac8f22b5134ed7e2a5bb1e9e31d10d076841 | [] | no_license | veronicarose27/vero | 4722381a6598e3fc6f87596d52f6ca860219ad19 | c943344596dc4398accdd81bd9936ff114b8d738 | refs/heads/master | 2020-06-11T21:13:32.613495 | 2019-07-19T17:20:46 | 2019-07-19T17:20:46 | 194,087,132 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 246 | py | y,z=map(int,input().split())
p=list(map(int,input().split()))
count=0
for i in range(0,len(p)):
for j in range(1,len(p)):
if(p[i]==p[j]):
count=count+1
if(count==z):
print(p[i])
break
| [
"[email protected]"
] | |
6110d7d86503b01878af17b0f37d98e5097fece2 | f4b7d327581e500dc79079c834cc23af9939737e | /moonlight/models/base/glyph_patches.py | 263b33c17cd0a8a9c7f22e54295ce5b1953d0b75 | [
"LicenseRef-scancode-generic-cla",
"Apache-2.0"
] | permissive | abc3436645/moonlight | 095eea2b892528b9a3fe5d05af39e4a023c55628 | 7f8a3ab4e55570dd120e3965f8049dd866d12a6b | refs/heads/master | 2020-03-25T19:37:01.849371 | 2018-08-07T17:42:56 | 2018-08-07T17:42:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,031 | py | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base patch-based glyph model.
For example, this accepts the staff patch k-means centroids emitted by
staffline_patches_kmeans_pipeline and labeled by kmeans_labeler.
This defines the input and signature of the model, and allows any type of
multi-class classifier using the normalized patches as input.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
from absl import flags
from moonlight.models.base import batches
from moonlight.models.base import label_weights
from moonlight.util import memoize
import tensorflow as tf
from tensorflow.python.lib.io import file_io
from tensorflow.python.lib.io import tf_record
WEIGHT_COLUMN_NAME = 'weight'
FLAGS = flags.FLAGS
flags.DEFINE_string(
'train_input_patches', None, 'Glob of labeled patch TFRecords for training')
flags.DEFINE_string(
'eval_input_patches', None, 'Glob of labeled patch TFRecords for eval')
flags.DEFINE_string('model_dir', None, 'Output trained model directory')
flags.DEFINE_boolean(
'use_included_label_weight', False,
'Whether to multiply a "label_weight" feature included in the example by'
' the weight determined by the "label" value.')
flags.DEFINE_float(
'augmentation_x_shift_probability', 0.5,
'Probability of shifting the patch left or right by one pixel. The edge is'
' filled using the adjacent column. It is equally likely that the patch is'
' shifted left or right.')
flags.DEFINE_float(
'augmentation_max_rotation_degrees', 2.,
'Max rotation of the patch, in degrees. The rotation is selected uniformly'
' randomly from the range +- this value. A value of 0 implies no rotation.')
flags.DEFINE_integer(
'eval_throttle_secs', 60, 'Evaluate at at most this interval, in seconds.')
flags.DEFINE_integer(
'train_max_steps', 100000,
'Max steps for training. If 0, will train until the process is'
' interrupted.')
flags.DEFINE_integer('eval_steps', 500, 'Num steps to evaluate the model.')
flags.DEFINE_integer(
'exports_to_keep', 10,
'Keep the last N saved models (exported on each eval) before deleting'
' previous exports.')
@memoize.MemoizedFunction
def read_patch_dimensions():
"""Reads the dimensions of the input patches from disk.
Parses the first example in the training set, which must have "height" and
"width" features.
Returns:
Tuple of (height, width) read from disk, using the glob passed to
--train_input_patches.
"""
for filename in file_io.get_matching_files(FLAGS.train_input_patches):
# If one matching file is empty, go on to the next file.
for record in tf_record.tf_record_iterator(filename):
example = tf.train.Example.FromString(record)
# Convert long (int64) to int, necessary for use in feature columns in
# Python 2.
patch_height = int(example.features.feature['height'].int64_list.value[0])
patch_width = int(example.features.feature['width'].int64_list.value[0])
return patch_height, patch_width
def input_fn(input_patches):
"""Defines the estimator input function.
Args:
input_patches: The input patches TFRecords pattern.
Returns:
A callable. Each invocation returns a tuple containing:
* A dict with a single key 'patch', and the patch tensor as a value.
* A scalar tensor with the patch label, as an integer.
"""
patch_height, patch_width = read_patch_dimensions()
dataset = tf.data.TFRecordDataset(file_io.get_matching_files(input_patches))
def parser(record):
"""Dataset parser function.
Args:
record: A single serialized Example proto tensor.
Returns:
A tuple of:
* A dict of features ('patch' and 'weight')
* A label tensor (int64 scalar).
"""
feature_types = {
'patch':
tf.FixedLenFeature((patch_height, patch_width), tf.float32),
'label':
tf.FixedLenFeature((), tf.int64),
}
if FLAGS.use_included_label_weight:
feature_types['label_weight'] = tf.FixedLenFeature((), tf.float32)
features = tf.parse_single_example(record, feature_types)
label = features['label']
weight = label_weights.weights_from_labels(label)
if FLAGS.use_included_label_weight:
# Both operands must be the same type (float32).
weight = tf.to_float(weight) * tf.to_float(features['label_weight'])
patch = _augment(features['patch'])
return {'patch': patch, WEIGHT_COLUMN_NAME: weight}, label
return batches.get_batched_tensor(dataset.map(parser))
def _augment(patch):
"""Performs multiple augmentations on the patch, helping to generalize."""
return _augment_rotation(_augment_shift(patch))
def _augment_shift(patch):
"""Augments the patch by possibly shifting it 1 pixel horizontally."""
with tf.name_scope('augment_shift'):
rand = tf.random_uniform(())
def shift_left():
return _shift_left(patch)
def shift_right():
return _shift_right(patch)
def identity():
return patch
shift_prob = min(1., FLAGS.augmentation_x_shift_probability)
return tf.cond(rand < shift_prob / 2,
shift_left,
lambda: tf.cond(rand < shift_prob, shift_right, identity))
def _shift_left(patch):
patch = tf.convert_to_tensor(patch)
return tf.concat([patch[:, 1:], patch[:, -1:]], axis=1)
def _shift_right(patch):
patch = tf.convert_to_tensor(patch)
return tf.concat([patch[:, :1], patch[:, :-1]], axis=1)
def _augment_rotation(patch):
"""Augments the patch by rotating it by a small amount."""
max_rotation_radians = math.radians(FLAGS.augmentation_max_rotation_degrees)
rotation = tf.random_uniform(
(), minval=-max_rotation_radians, maxval=max_rotation_radians)
# Background is white (1.0) but tf.contrib.image.rotate currently always fills
# the edges with black (0). Invert the patch before rotating.
return 1. - tf.contrib.image.rotate(
1. - patch, rotation, interpolation='BILINEAR')
def serving_fn():
"""Returns the ServingInputReceiver for the exported model.
Returns:
A ServingInputReceiver object which may be passed to
`Estimator.export_savedmodel`. A model saved using this receiver may be used
for running OMR.
"""
examples = tf.placeholder(tf.string, shape=[None])
patch_height, patch_width = read_patch_dimensions()
parsed = tf.parse_example(examples, {
'patch': tf.FixedLenFeature((patch_height, patch_width), tf.float32),
})
return tf.estimator.export.ServingInputReceiver(
features={'patch': parsed['patch']},
receiver_tensors=parsed['patch'],
receiver_tensors_alternatives={
'example': examples,
'patch': parsed['patch']
})
def create_patch_feature_column():
return tf.feature_column.numeric_column(
'patch', shape=read_patch_dimensions())
def train_and_evaluate(estimator):
tf.estimator.train_and_evaluate(
estimator,
tf.estimator.TrainSpec(
input_fn=lambda: input_fn(FLAGS.train_input_patches),
max_steps=FLAGS.train_max_steps),
tf.estimator.EvalSpec(
input_fn=lambda: input_fn(FLAGS.eval_input_patches),
start_delay_secs=0,
throttle_secs=FLAGS.eval_throttle_secs,
steps=FLAGS.eval_steps,
exporters=[
tf.estimator.LatestExporter(
'exporter', serving_fn,
exports_to_keep=FLAGS.exports_to_keep),
]))
| [
"[email protected]"
] | |
04b61e88739ffadc8d675c0b4f576b5f7122eb69 | 576cc83449e10fd3f98281970c46016ea7a5aea2 | /OpenCV相机标定/CalibrationTest.py | 658370ca975416804ff63dff37187d3bdaa30be3 | [] | no_license | HotView/PycharmProjects | 215ab9edd341e3293daebcf86d97537f8cd28d75 | 61393fe5ba781a8c1216a5cbe7e0d06149a10190 | refs/heads/master | 2020-06-02T07:41:53.608742 | 2019-11-13T08:31:57 | 2019-11-13T08:31:57 | 191,085,178 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,778 | py | import cv2
import numpy as np
import glob
# termination criteria
criteria = (cv2.TERM_CRITERIA_EPS+cv2.TERM_CRITERIA_MAX_ITER,30,0.001)
# prepare objects points,like(0,0,0),(1,0,0),(2,0,0),....,(6,5,0)
objp = np.zeros((6*7,3),np.float32)
objp[:,:2] = np.mgrid[0:7,0:6].T.reshape(-1,2)
# Arrays to store object points and points from all the images.
objpoints = []
imgpoints = []
images = glob.glob('image/*.jpg')
for fname in images:
img = cv2.imread(fname)
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
#Find the chess board coners,按照行或者列连接,(7,6)就是7个连成一条线
ret,corners = cv2.findChessboardCorners(gray,(7,6),None)
#如果找出了角点,添加对象点和图像点
if ret:
objpoints.append(objp)
corners2 = cv2.cornerSubPix(gray,corners,(11,11),(-1,-1),criteria)
print("corners2",corners2)
imgpoints.append(corners2)
#绘制和展示角点,按照颜色来进行划分(7,6),6种颜色
#橙红色的为0号点,蓝色的为最后的点集
img = cv2.drawChessboardCorners(img,(7,6),corners2,ret)
for i,p in enumerate(corners2):
x = int(p[0][0])
y = int(p[0][1])
cv2.putText(img,str(i),(x,y),cv2.FONT_HERSHEY_SIMPLEX,0.5,(255,255,255),2)
cv2.imshow(fname,img)
rmse, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints, imgpoints, gray.shape[::-1], None, None)
h,w = gray.shape[:2]
imgsize = (w,h)
mtx2, roi=cv2.getOptimalNewCameraMatrix(mtx,dist,imgsize,alpha=0,
centerPrincipalPoint=True)
print("#######")
print(dist)
#np.savez("pose",mtx = mtx,dist = dist,newmtx = mtx2)
print(mtx,dist,mtx2)
with open('pose.py', 'wb') as fout:
fout.write(b'import numpy as np\n')
fout.write(b'\n')
fout.write(b'rmse = %r\n' % rmse)
fout.write(b'distortion_coefficients = np.array(%r, dtype=np.float32)\n'
% dist.tolist())
fout.write(b'raw_camera_matrix = np.array(%r, dtype=np.float32)\n'
% mtx.tolist())
fout.write(b'undistort_camera_matrix = np.array(%r, dtype=np.float32)\n'
% mtx2.tolist())
fout.write(b'roi = %d, %d, %d, %d\n'% roi)
fout.write(b'image_size = %d, %d\n' % imgsize)
print(roi)
print("----------------")
print(ret)
print("-----------")
print(mtx)
print("-----------")
matinv = np.linalg.inv(mtx)
print(matinv)
print("################################")
print(np.dot(mtx,matinv))
mean_error = 0
for i in range(len(objpoints)):
imgpoints2, _ = cv2.projectPoints(objpoints[i], rvecs[i], tvecs[i], mtx, dist)
error = cv2.norm(imgpoints[i],imgpoints2, cv2.NORM_L2)/len(imgpoints2)
mean_error += error
print ("total error: ", mean_error/len(objpoints))
cv2.waitKey(0)
| [
"[email protected]"
] | |
d7dd88ecf67ac7b20922fbc9779a463ad3cd8297 | a6610e191090e216b0e0f23018cecc5181400a7a | /robotframework-ls/src/robotframework_ls/constants.py | c6291b1baf57cfc01ddad6b1554e8002b7fe2a95 | [
"Apache-2.0"
] | permissive | JohanMabille/robotframework-lsp | d7c4c00157dd7c12ab15b7125691f7052f77427c | 610f0257fdcd79b8c38107a0ecf600f60160bc1f | refs/heads/master | 2023-01-19T10:29:48.982578 | 2020-11-25T13:46:22 | 2020-11-25T13:46:22 | 296,245,093 | 0 | 0 | NOASSERTION | 2020-09-17T06:58:54 | 2020-09-17T06:58:53 | null | UTF-8 | Python | false | false | 74 | py | from robocorp_ls_core.constants import *
DEFAULT_COMPLETIONS_TIMEOUT = 4
| [
"[email protected]"
] | |
b30f1b39fb3a2a1a6d9299203f6c492cd0e9aa87 | a7ca0a372a44bc9cee59a7e1e59734a4814a59b9 | /이것이코딩테스트다/병사배치하기.py | 8b38d29927c9fad4814ed5bac88c39daec9c4d28 | [] | no_license | schw240/Preparing-coding-test | 435d6dbdcf90fc8c0c408dfa032ad7f09fdc5a90 | 758a41270c409312a998152c5298369ec385bfdb | refs/heads/master | 2021-11-29T07:54:05.140178 | 2021-10-03T11:40:36 | 2021-10-03T11:40:36 | 245,345,693 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 510 | py | N = int(input())
scores = list(map(int, input().split(' ')))
scores.reverse()
# 병사를 배치할 때 전투력이 높은 병사가 앞으로 오도록 내림차순 배치
# 이때 정렬을 쓰는게 아니라 열외를 쓰는 방법으로 배치
dp = [1] * N
# 남아있는 병사의 수가 최대가 되도록 하기 위해 열외시켜야 하는 병사의 수
for i in range(1, N):
for j in range(i):
if scores[j] < scores[i]:
dp[i] = max(dp[i], dp[j] + 1)
print(N - max(dp))
| [
"[email protected]"
] | |
a52dfaac37bcd5fa128652b84d5b4b9904f40414 | 1974b3e9c5f2f677833e1608a41281f377fd331c | /dltesthttp_xuyalin2/www/testcase/dlmall/ts_couponx/getUseFulCoupons.py | e7f7c5c587deb28ea48f702a15f9161b6113a024 | [] | no_license | xyl00755/pythonLearning | ed0f540b61247c3560f347853da5886b2e2ba25d | c6aecff86ff34dcd7358d98201627ff84e9bf2cf | refs/heads/master | 2021-01-13T08:19:25.171016 | 2016-12-16T05:43:10 | 2016-12-16T05:43:10 | 71,764,553 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,696 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from www.api.dlmall import *
from www.common.excel import *
from www.common.database import *
"""
/couponx/getUseFulCoupons.html
request:
get
http://123.57.244.205:9003/couponx/getUseFulCoupons.html?goodsId=f4bccbbd84e44f9ba839e082970dccca%2C9c02bcf9737d499a8a72a3514730b425%2C24df3ab5b628496aa0fdded6c4230fec%2C2e78061ea75b26638ef38d1b0c848cbb%2Cbc54482ad9a44f3d95b63d9876bc3100%2C&t=1474440394741
response:json string
{
"status": 0,
"data": {
"unUseFulCoupons": [
{
"id": null,
"couponEntityId": "114512879800028",
"couponName": "6元优惠券验证",
"couponAmt": 600,
"couponEntityStatus": "01",
"effectiveTime": "2016-09-21",
"uneffectiveTime": "2016-10-21",
"category": "白酒",
"expireTime": null,
"useTime": null,
"records": null
}
],
"useFulCoupons": [
{
"id": null,
"couponEntityId": "114512879800028",
"couponName": "6元优惠券验证",
"couponAmt": 600,
"couponEntityStatus": "01",
"effectiveTime": "2016-09-21",
"uneffectiveTime": "2016-10-21",
"category": "白酒",
"expireTime": null,
"useTime": null,
"records": null
}
]
},
"msg": ""
}
"""
class getUseFulCoupons(unittest.TestCase):
UserShop = eData('WebManager')
danluCouponsInfo=eData('DanluCoupons')
dlservice = dlmall()
s = dlservice.login(UserShop.buyer_username,UserShop.buyer_password)
#四种红包1 过期时间最长 2 两个过期时间一样但是一个金额大一个金额小 3 过期时间最短 检查返回值排序
def test_Coupons_sort(self):
data =[self.danluConponsInfo.goodsId1,self.danluConponsInfo.goodsId2,self.danluConponsInfo.goodsId3,self.danluConponsInfo.goodsId4]
a =','
a.join(data)
couponlist= self.dlservice.getUseFulCoupons(s,data)
self.assertEqual(couponlist['data']['UseFulCoupons'][0]['couponEntityId'],self.danluCouponsInfo.couponEntityId4)
self.assertEqual(couponlist['data']['UseFulCoupons'][1]['couponEntityId'],self.danluCouponsInfo.couponEntityId3)
self.assertEqual(couponlist['data']['UseFulCoupons'][2]['couponEntityId'],self.danluCouponsInfo.couponEntityId2)
self.assertEqual(couponlist['data']['UseFulCoupons'][3]['couponEntityId'],self.danluCouponsInfo.couponEntityId1)
| [
"[email protected]"
] | |
b9bcf064e318743a5c5030ddf2e243fa9c742794 | 8537ecfe2a23cfee7c9f86e2318501f745078d67 | /Practise_stuff/matplotlib/click_on_point_to_see_timeseries.py | d68c313bc04fa4ac8a3c2008391627668a605bd3 | [] | no_license | oolsson/oo_eclipse | 91d33501d9ed6c6b3c51bb22b635eb75da88e4e1 | 1828866bc4e1f67b279c5a037e4a6a4439ddb090 | refs/heads/master | 2021-01-01T20:17:12.644890 | 2015-11-30T09:49:41 | 2015-11-30T09:49:41 | 23,485,434 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,316 | py | import numpy as np
class PointBrowser:
"""
Click on a point to select and highlight it -- the data that
generated the point will be shown in the lower axes. Use the 'n'
and 'p' keys to browse through the next and previous points
"""
def __init__(self):
self.lastind = 0
self.text = ax.text(0.05, 0.95, 'selected: none',
transform=ax.transAxes, va='top')
self.selected, = ax.plot([xs[0]], [ys[0]], 'o', ms=12, alpha=0.4,
color='yellow', visible=False)
def onpress(self, event):
if self.lastind is None: return
if event.key not in ('n', 'p'): return
if event.key=='n': inc = 1
else: inc = -1
self.lastind += inc
self.lastind = np.clip(self.lastind, 0, len(xs)-1)
self.update()
def onpick(self, event):
if event.artist!=line: return True
N = len(event.ind)
if not N: return True
# the click locations
x = event.mouseevent.xdata
y = event.mouseevent.ydata
distances = np.hypot(x-xs[event.ind], y-ys[event.ind])
indmin = distances.argmin()
dataind = event.ind[indmin]
self.lastind = dataind
self.update()
def update(self):
if self.lastind is None: return
dataind = self.lastind
ax2.cla()
ax2.plot(X[dataind])
ax2.text(0.05, 0.9, 'mu=%1.3f\nsigma=%1.3f'%(xs[dataind], ys[dataind]),
transform=ax2.transAxes, va='top')
ax2.set_ylim(-0.5, 1.5)
self.selected.set_visible(True)
self.selected.set_data(xs[dataind], ys[dataind])
self.text.set_text('selected: %d'%dataind)
fig.canvas.draw()
if __name__ == '__main__':
import matplotlib.pyplot as plt
X = np.random.rand(100, 200)
xs = np.mean(X, axis=1)
ys = np.std(X, axis=1)
fig, (ax, ax2) = plt.subplots(2, 1)
ax.set_title('click on point to plot time series')
line, = ax.plot(xs, ys, 'o', picker=5) # 5 points tolerance
browser = PointBrowser()
fig.canvas.mpl_connect('pick_event', browser.onpick)
fig.canvas.mpl_connect('key_press_event', browser.onpress)
plt.show()
| [
"[email protected]"
] | |
7851381b34746f3487ce259477ca9681dcb2349a | 32cfd6a8df9b24059ed7bee0b7bf99b6c0268f6e | /framework/seocortex/utils/soupselect_old.py | 2498bc8d37f106ed475746dca6bdd246a3d6be44 | [] | no_license | blorenz/seocortex | 5cd7acb647fbc4908e6045d2a89bdd2ade922434 | 3f1f7e8ac4a12e24e7f2cb58407ce52babfe5cf8 | refs/heads/master | 2016-09-05T21:36:01.039128 | 2012-04-23T13:33:46 | 2012-04-23T13:33:46 | 3,951,299 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 4,621 | py | # -*-coding:Utf-8 -*
# Copyright 2012 - Peoplze.com <[email protected]>
# Python imports
import re
def attrInNode(node,atr):
for k,val in node.attrs:
if k == atr:
return True
return False
def htmlFind(node,selector,n,defaut=""):
l = list(node.findSelect(selector))
if len(l) > n:
return l[n].text
else:
return defaut
tag_re = re.compile('^[a-z0-9]+$')
attribselect_re = re.compile(
r'^(?P<tag>\w+)?\[(?P<attribute>\w+)(?P<operator>[=~\|\^\$\*]?)' +
r'=?"?(?P<value>[^\]"]*)"?\]$'
)
# /^(\w+)\[(\w+)([=~\|\^\$\*]?)=?"?([^\]"]*)"?\]$/
# \---/ \---/\-------------/ \-------/
# | | | |
# | | | The value
# | | ~,|,^,$,* or =
# | Attribute
# Tag
def attribute_checker(operator, attribute, value=''):
"""
Takes an operator, attribute and optional value; returns a function that
will return True for elements that match that combination.
"""
return {
'=': lambda el: el.get(attribute) == value,
# attribute includes value as one of a set of space separated tokens
'~': lambda el: value in el.get(attribute, '').split(),
# attribute starts with value
'^': lambda el: el.get(attribute, '').startswith(value),
# attribute ends with value
'$': lambda el: el.get(attribute, '').endswith(value),
# attribute contains value
'*': lambda el: value in el.get(attribute, ''),
# attribute is either exactly value or starts with value-
'|': lambda el: el.get(attribute, '') == value \
or el.get(attribute, '').startswith('%s-' % value),
}.get(operator, lambda el: el.has_key(attribute))
def has_attr(soup, attr_name):
return attr_name in soup._getAttrMap()
def select(soup, selector):
"""
soup should be a BeautifulSoup instance; selector is a CSS selector
specifying the elements you want to retrieve.
"""
tokens = selector.split()
current_context = [soup]
for token in tokens:
m = attribselect_re.match(token)
if m:
# Attribute selector
tag, attribute, operator, value = m.groups()
if not tag:
tag = True
checker = attribute_checker(operator, attribute, value)
found = []
for context in current_context:
found.extend([el for el in context.findAll(tag) if checker(el)])
current_context = found
continue
if '#' in token:
# ID selector
tag, id = token.split('#', 1)
if not tag:
tag = True
el = current_context[0].find(tag, {'id': id})
if not el:
return [] # No match
current_context = [el]
continue
if '.' in token:
# Class selector
tag, klass = token.split('.', 1)
if not tag:
tag = True
classes = set(klass.split('.'))
found = []
for context in current_context:
found.extend(
context.findAll(tag,
{'class': lambda attr:
attr and classes.issubset(attr.split())}
)
)
current_context = found
continue
if token == '*':
# Star selector
found = []
for context in current_context:
found.extend(context.findAll(True))
current_context = found
continue
# Here we should just have a regular tag
if not tag_re.match(token):
return []
found = []
for context in current_context:
found.extend(context.findAll(token))
current_context = found
return current_context
def monkeypatch(BeautifulSoupClass=None):
"""
If you don't explicitly state the class to patch, defaults to the most
common import location for BeautifulSoup.
"""
if not BeautifulSoupClass:
# Je patch Tag, parce que c'est plus pratique a mon avis
from BeautifulSoup import Tag as BeautifulSoupClass
BeautifulSoupClass.findSelect = select
BeautifulSoupClass.has_attr = has_attr
def unmonkeypatch(BeautifulSoupClass=None):
if not BeautifulSoupClass:
from BeautifulSoup import Tag as BeautifulSoupClass
delattr(BeautifulSoupClass, 'findSelect')
delattr(BeautifulSoupClass, 'has_attr')
# Monkeypatch on import
monkeypatch()
| [
"[email protected]"
] | |
e68e5d91b7780d3fad236dfa0ad58ca34d4e4f9e | 8b3ca44ee3d990233e74655b7131d616094f70c2 | /experiments/cross_validation/movielens_100K/poisson_gamma_gamma.py | cc0781ad2618e0f9db05184959fc4e28140035a0 | [] | no_license | zshwuhan/BMF_Priors | 8b8c54271285a72d2085a56a9475c0756f375e67 | 6a600da1c41f1ccde2f2ba99298b40e68fb9910a | refs/heads/master | 2021-05-13T19:10:07.203215 | 2017-12-01T13:30:21 | 2017-12-01T13:30:21 | 116,883,181 | 1 | 0 | null | 2018-01-09T23:36:13 | 2018-01-09T23:36:13 | null | UTF-8 | Python | false | false | 1,609 | py | '''
Run nested cross-validation experiment on the MovieLens 100K dataset, with
Poisson likelihood, Gamma priors, and Gamma hierarchical priors.
'''
project_location = "/Users/thomasbrouwer/Documents/Projects/libraries/"
import sys
sys.path.append(project_location)
from BMF_Priors.code.models.bmf_poisson_gamma_gamma import BMF_Poisson_Gamma_Gamma
from BMF_Priors.code.cross_validation.nested_matrix_cross_validation import MatrixNestedCrossValidation
from BMF_Priors.data.movielens.load_data import load_movielens_100K
''' Settings BMF model. '''
method = BMF_Poisson_Gamma_Gamma
R, M = load_movielens_100K()
hyperparameters = { 'a':1., 'ap':1., 'bp':1. }
train_config = {
'iterations' : 200,
'init' : 'random',
}
predict_config = {
'burn_in' : 180,
'thinning' : 1,
}
''' Settings nested cross-validation. '''
K_range = [1,2,3]
no_folds = 5
no_threads = 5
parallel = False
folder_results = './results/poisson_gamma_gamma/'
output_file = folder_results+'results.txt'
files_nested_performances = [folder_results+'fold_%s.txt'%(fold+1) for fold in range(no_folds)]
''' Construct the parameter search. '''
parameter_search = [{'K':K, 'hyperparameters':hyperparameters} for K in K_range]
''' Run the cross-validation framework. '''
nested_crossval = MatrixNestedCrossValidation(
method=method,
R=R,
M=M,
K=no_folds,
P=no_threads,
parameter_search=parameter_search,
train_config=train_config,
predict_config=predict_config,
file_performance=output_file,
files_nested_performances=files_nested_performances,
)
nested_crossval.run(parallel=parallel) | [
"[email protected]"
] | |
f8dcb79496b226693eb440a4735a89a8bb445860 | 684b61f3405ed01b4184b222da342bd1533e4b90 | /shop/migrations/0002_auto_20200406_1505.py | 17c49a7cd1d4a26822a3030c54e37da8409a58fd | [] | no_license | Mubashar-javed/myshop | 6379d2568e969db9f8dc30354966d4054463959f | 0248c2cb6e26500b5fd1404dad45b14ebf1092ac | refs/heads/master | 2023-05-26T08:01:09.067626 | 2022-12-08T09:36:10 | 2022-12-08T09:36:10 | 254,266,547 | 0 | 0 | null | 2023-05-23T01:08:14 | 2020-04-09T03:57:41 | Python | UTF-8 | Python | false | false | 353 | py | # Generated by Django 2.2.5 on 2020-04-06 10:05
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('shop', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='product',
old_name='cereated',
new_name='created',
),
]
| [
"[email protected]"
] | |
aed11a2c4d13c0d479bf8465197e8185bcd75c06 | 14f0c423c109a75a8cbd10ca8c526f1482e3e385 | /Python/Django/multuple_apps/apps/blogs/views.py | 56e65b2b27072046cd5e9fc24cc6c724cda4ae09 | [] | no_license | thisolivier/dojo-master | 21fd5d13e25321cce0558cab3b0c0335774e173c | 9486f6b2af2148a296e2a238eee2b814fe0831fe | refs/heads/master | 2021-01-01T06:47:07.162851 | 2017-09-16T00:54:05 | 2017-09-16T00:54:05 | 97,511,225 | 0 | 0 | null | 2017-09-16T00:42:26 | 2017-07-17T18:51:47 | Python | UTF-8 | Python | false | false | 734 | py | from django.shortcuts import render
# Create your views here.
def blog_root(request):
print "---> Generating root template"
return render(request, 'blogs/index.html')
def blog_new(request):
print "---> Generating new blog template"
return render(request, 'blogs/index.html')
def blog_create(request):
print "---> Generating create blog template"
return render(request, 'blogs/index.html')
def blog_num(request, number):
print "---> Generating create blog number {}".format(number)
return render(request, 'blogs/index.html')
def blog_modify(request, number, action):
print "---> Generating {}ing template for blog number {}".format(action, number)
return render(request, 'blogs/index.html') | [
"[email protected]"
] | |
d0479fa248e992aff2046f147e18df724d37ad7f | 6fcfb638fa725b6d21083ec54e3609fc1b287d9e | /python/AirSage_Petrel/Petrel-master/petrel/petrel/storm.py | bd46ee3d47e13255528269e2dc4ef4455261a969 | [] | no_license | LiuFang816/SALSTM_py_data | 6db258e51858aeff14af38898fef715b46980ac1 | d494b3041069d377d6a7a9c296a14334f2fa5acc | refs/heads/master | 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 | Python | UTF-8 | Python | false | false | 16,718 | py | from __future__ import print_function
import sys
import os
import time
import socket
import logging
from collections import deque
import json
import six
storm_log = logging.getLogger('storm')
TUPLE_PROFILING = False
json_encode = lambda x: json.dumps(x)
json_decode = lambda x: json.loads(x)
BLANK_LINE_CHECK = True
# Save old stdout so we can still write to it after redirecting.
old_stdout = sys.stdout
# TODO: Get this value from a topology configuration setting.
MAX_MESSAGE_SIZE = 16777216
class StormIPCException(Exception):
pass
#reads lines and reconstructs newlines appropriately
def readMsg():
msg = ""
while True:
line = sys.stdin.readline()
if not line:
raise StormIPCException('Read EOF from stdin')
if line[0:-1] == "end":
break
msg = msg + line
return json_decode(msg[0:-1])
MODE = None
ANCHOR_TUPLE = None
#queue up commands we read while trying to read taskids
pending_commands = deque()
def readTaskIds():
if pending_taskids:
return pending_taskids.popleft()
else:
msg = readMsg()
while type(msg) is not list:
pending_commands.append(msg)
msg = readMsg()
return msg
#queue up taskids we read while trying to read commands/tuples
pending_taskids = deque()
def readCommand():
if pending_commands:
return pending_commands.popleft()
else:
msg = readMsg()
while type(msg) is list:
pending_taskids.append(msg)
msg = readMsg()
return msg
def readTuple():
cmd = readCommand()
return Tuple(cmd["id"], cmd["comp"], cmd["stream"], cmd["task"], cmd["tuple"])
def sendMsgToParent(msg):
print(json_encode(msg), file=old_stdout)
print('end', file=old_stdout)
try:
old_stdout.flush()
except (IOError, OSError) as e:
storm_log.exception(str(e))
raise StormIPCException('%s error [Errno %d] in sendMsgToParent: %s' % (
type(e).__name__,
e.errno,
str(e)))
# This function is probably obsolete with the addition of the new
# reportError() function.
# TODO: Consider getting rid of this function and call reportError() instead.
# However, need to consider the case where we are running on an older version
# of Storm where the Storm back end does not support reportError()? Can we
# detect that case and use this function instead?
def sendFailureMsgToParent(msg):
"""This function is kind of a hack, but useful when a Python task
encounters a fatal exception. "msg" should be a simple string like
"E_SPOUTFAILED". This function sends "msg" as-is to the Storm worker,
which tries to parse it as JSON. The hacky aspect is that we
*deliberately* make it fail by sending it non-JSON data. This causes
the Storm worker to throw an error and restart the Python task. This
is cleaner than simply letting the task die without notifying Storm,
because this way Storm restarts the task more quickly."""
assert isinstance(msg, six.string_types)
print(msg, file=old_stdout)
print('end', file=old_stdout)
storm_log.error('Sent failure message ("%s") to Storm', msg)
def sync():
sendMsgToParent({'command':'sync'})
def sendpid(heartbeatdir):
pid = os.getpid()
sendMsgToParent({'pid':pid})
open(heartbeatdir + "/" + str(pid), "w").close()
def emit(*args, **kwargs):
result = __emit(*args, **kwargs)
if result:
return readTaskIds()
def emitMany(*args, **kwargs):
"""A more efficient way to emit a number of tuples at once."""
global MODE
if MODE == Bolt:
emitManyBolt(*args, **kwargs)
elif MODE == Spout:
emitManySpout(*args, **kwargs)
def emitDirect(task, *args, **kwargs):
kwargs["directTask"] = task
__emit(*args, **kwargs)
def __emit(*args, **kwargs):
global MODE
if MODE == Bolt:
return emitBolt(*args, **kwargs)
elif MODE == Spout:
return emitSpout(*args, **kwargs)
def emitManyBolt(tuples, stream=None, anchors = [], directTask=None):
global ANCHOR_TUPLE
if ANCHOR_TUPLE is not None:
anchors = [ANCHOR_TUPLE]
m = {
"command": "emit",
"anchors": [a.id for a in anchors],
"tuple": None,
"need_task_ids": False,
}
if stream is not None:
m["stream"] = stream
if directTask is not None:
m["task"] = directTask
lines = []
for tup in tuples:
m["tuple"] = tup
lines.append(json_encode(m))
lines.append('end')
print(lines, '\n', file=old_stdout)
def emitBolt(tup, stream=None, anchors = [], directTask=None, need_task_ids=False):
global ANCHOR_TUPLE
if ANCHOR_TUPLE is not None:
anchors = [ANCHOR_TUPLE]
m = {
"command": "emit",
"anchors": [a.id for a in anchors],
"tuple": tup,
"need_task_ids": need_task_ids,
}
if stream is not None:
m["stream"] = stream
if directTask is not None:
m["task"] = directTask
sendMsgToParent(m)
return need_task_ids
def emitManySpout(tuples, stream=None, id=None, directTask=None, need_task_ids=False):
m = {
"command": "emit",
"tuple": None,
"need_task_ids": need_task_ids,
}
if id is not None:
m["id"] = id
if stream is not None:
m["stream"] = stream
if directTask is not None:
m["task"] = directTask
lines = []
for tup in tuples:
m["tuple"] = tup
lines.append(json_encode(m))
lines.append('end')
print(lines, '\n', file=old_stdout)
def emitSpout(tup, stream=None, id=None, directTask=None, need_task_ids=False):
m = {
"command": "emit",
"tuple": tup,
"need_task_ids": need_task_ids,
}
if id is not None:
m["id"] = id
if stream is not None:
m["stream"] = stream
if directTask is not None:
m["task"] = directTask
sendMsgToParent(m)
return need_task_ids
def ack(tup):
"""Acknowledge a tuple"""
sendMsgToParent({"command": "ack", "id": tup.id})
def ackId(tupid):
"""Acknowledge a tuple when you only have its ID"""
sendMsgToParent({"command": "ack", "id": tupid})
def fail(tup):
"""Fail a tuple"""
sendMsgToParent({"command": "fail", "id": tup.id})
def reportError(msg):
sendMsgToParent({"command": "error", "msg": msg})
def log(msg):
sendMsgToParent({"command": "log", "msg": msg})
def initComponent():
# Redirect stdout and stderr to logger instances. This is particularly
# important for stdout so 'print' statements won't crash the Storm Java
# worker.
sys.stdout = LogStream(logging.getLogger('storm.stdout'))
sys.stderr = LogStream(logging.getLogger('storm.stderr'))
setupInfo = readMsg()
storm_log.info('Task received setupInfo from Storm: %s', setupInfo)
sendpid(setupInfo['pidDir'])
storm_log.info('Task sent pid to Storm')
return [setupInfo['conf'], setupInfo['context']]
class Tuple(object):
__slots__ = ['id', 'component', 'stream', 'task', 'values']
def __init__(self, id, component, stream, task, values):
self.id = id
self.component = component
self.stream = stream
self.task = task
self.values = values
def __eq__(self, other):
if not isinstance(other, Tuple):
return False
for k in self.__slots__:
if getattr(self, k) != getattr(other, k):
return False
return True
def __ne__(self, other):
return not (self == other)
def __repr__(self):
return '<%s%s>' % (
self.__class__.__name__,
''.join(' %s=%r' % (k, getattr(self, k)) for k in sorted(self.__slots__)))
def is_heartbeat_tuple(self):
return self.task == -1 and self.stream == "__heartbeat"
def is_tick_tuple(self):
return self.task == -1 and self.stream == "__tick"
class Task(object):
def shared_initialize(self):
conf, context = initComponent()
# These values are only available with a patched version of Storm.
self.task_index = context.get('taskIndex', -1)
self.worker_port = context.get('workerPort', -1)
self.initialize(conf, context)
def report_exception(self, base_message, exception):
parameters = (
base_message,
os.environ.get('SCRIPT', sys.argv[0]),
socket.gethostname(),
'pid', os.getpid(),
'port', self.worker_port,
'taskindex', self.task_index,
type(exception).__name__,
#str(exception),
)
#message = '%s: %s (pid %d) on %s failed with %s: %s' % parameters
message = '__'.join(str(p).replace('.', '_') for p in parameters)
sendFailureMsgToParent(message)
# Sleep for a few seconds to try and ensure Storm reads this message
# before we terminate. If it does, then our message above will appear in
# the Storm UI.
time.sleep(5)
class Bolt(Task):
def __init__(self):
if TUPLE_PROFILING:
self.profiler = BoltProfiler()
else:
self.profiler = None
def initialize(self, stormconf, context):
pass
def process(self, tuple):
pass
def run(self):
global MODE
MODE = Bolt
self.shared_initialize()
profiler = self.profiler
try:
while True:
if profiler is not None: profiler.pre_read()
tup = readTuple()
if tup.is_heartbeat_tuple():
sync()
else:
if profiler is not None: profiler.post_read()
self.process(tup)
if profiler is not None: profiler.post_process()
except Exception as e:
self.report_exception('E_BOLTFAILED', e)
storm_log.exception('Caught exception in Bolt.run')
if 'tup' in locals():
# Only print the first 2000 characters of the tuple, otherwise
# the message may be too long for certain handlers (e.g.
# SysLogHandler).
storm_log.error(
'The error occurred while processing this tuple: %s',
repr(tup.values)[:2000])
class BasicBolt(Task):
def __init__(self):
if TUPLE_PROFILING:
self.profiler = BasicBoltProfiler()
else:
self.profiler = None
def initialize(self, stormconf, context):
pass
def process(self, tuple):
pass
def run(self):
global MODE
MODE = Bolt
global ANCHOR_TUPLE
self.shared_initialize()
profiler = self.profiler
try:
while True:
if profiler is not None: profiler.pre_read()
tup = readTuple()
if tup.is_heartbeat_tuple():
sync()
else:
if profiler is not None: profiler.post_read()
ANCHOR_TUPLE = tup
self.process(tup)
if profiler is not None: profiler.post_process()
ack(tup)
if profiler is not None: profiler.post_ack()
except Exception as e:
storm_log.info('Caught exception')
self.report_exception('E_BOLTFAILED', e)
storm_log.exception('Caught exception in BasicBolt.run')
if 'tup' in locals():
# Only print the first 2000 characters of the tuple, otherwise
# I've seen errors because the message is too long for
# SysLogHandler.
storm_log.error(
'The error occurred while processing this tuple: %s',
repr(tup.values)[:2000])
class Spout(Task):
def initialize(self, conf, context):
pass
def ack(self, id):
pass
def fail(self, id):
pass
def nextTuple(self):
pass
def run(self):
global MODE
MODE = Spout
self.shared_initialize()
try:
while True:
msg = readCommand()
command = msg["command"]
if command == "next":
self.nextTuple()
elif command == "ack":
self.ack(msg["id"])
elif command == "fail":
self.fail(msg["id"])
sync()
except Exception as e:
self.report_exception('E_SPOUTFAILED', e)
storm_log.exception('Caught exception in Spout.run: %s', str(e))
class BoltProfiler(object):
"""Helper class for Bolt. Implements some simple log-based counters for
profiling performance."""
MAX_COUNT = 1000
def __init__(self):
self.read_time = self.process_time = 0.0
self.num_tuples = self.total_num_tuples = 0
self.start_interval = None
def pre_read(self):
self.t1 = time.time()
if self.start_interval is None:
self.start_interval = self.t1
def post_read(self):
self.t2 = time.time()
self.read_time += self.t2 - self.t1
def post_process(self):
self.t3 = time.time()
self.process_time += self.t3 - self.t2
self.num_tuples += 1
if self.num_tuples % self.MAX_COUNT == 0 or self.t3 - self.start_interval > 1.0:
self.total_num_tuples += self.num_tuples
self.total_time = self.read_time + self.process_time
storm_log.debug(
'Bolt profile: total_num_tuples=%d, num_tuples=%d, avg_read_time=%f (%.1f%%), avg_process_time=%f (%.1f%%)',
self.total_num_tuples,
self.num_tuples,
self.read_time / self.num_tuples, self.read_time / self.total_time * 100.0,
self.process_time / self.num_tuples, self.process_time / self.total_time * 100.0)
# Clear the timing data.
self.start_interval = None
self.num_tuples = 0
self.read_time = self.process_time = 0.0
class BasicBoltProfiler(object):
"""Helper class for BasicBolt. Implements some simple log-based counters for
profiling performance."""
MAX_COUNT = 1000
def __init__(self):
self.read_time = self.process_time = self.ack_time = 0.0
self.num_tuples = self.total_num_tuples = 0
self.start_interval = None
def pre_read(self):
self.t1 = time.time()
if self.start_interval is None:
self.start_interval = self.t1
def post_read(self):
self.t2 = time.time()
self.read_time += self.t2 - self.t1
def post_process(self):
self.t3 = time.time()
self.process_time += self.t3 - self.t2
def post_ack(self):
self.t4 = time.time()
self.ack_time += self.t4 - self.t3
self.num_tuples += 1
if self.num_tuples % self.MAX_COUNT == 0 or self.t4 - self.start_interval > 1.0:
self.total_num_tuples += self.num_tuples
self.total_time = self.read_time + self.process_time + self.ack_time
storm_log.debug(
'BasicBolt profile: total_num_tuples=%d, num_tuples=%d, avg_read_time=%f (%.1f%%), avg_process_time=%f (%.1f%%), avg_ack_time=%f (%.1f%%)',
self.total_num_tuples,
self.num_tuples,
self.read_time / self.num_tuples, self.read_time / self.total_time * 100.0,
self.process_time / self.num_tuples, self.process_time / self.total_time * 100.0,
self.ack_time / self.num_tuples, self.ack_time / self.total_time * 100.0)
# Clear the timing data.
self.start_interval = None
self.num_tuples = 0
self.read_time = self.process_time = self.ack_time = 0.0
def initialize_profiling():
global TUPLE_PROFILING
TUPLE_PROFILING = storm_log.isEnabledFor(logging.DEBUG)
if TUPLE_PROFILING:
storm_log.info('Tuple profiling enabled. Will log tuple processing times.')
else:
storm_log.info('Tuple profiling NOT enabled. Will not log tuple processing times.')
class LogStream(object):
"""Object that implements enough of the Python stream API to be used as
sys.stdout and sys.stderr. Messages are written to the Python logger.
"""
def __init__(self, logger):
self.logger = logger
def write(self, message):
for line in message.split('\n'):
self.logger.error(line)
| [
"[email protected]"
] | |
f1010cdff0106ff59ffa22b5b5d3ee835bf5829f | fcc88521f63a3c22c81a9242ae3b203f2ea888fd | /Python3/0689-Maximum-Sum-of-3-Non-Overlapping-Subarrays/soln-1.py | f62c0aa991d5e1a4c2f04b46e6f6e54a54e99d0f | [
"MIT"
] | permissive | wyaadarsh/LeetCode-Solutions | b5963e3427aa547d485d3a2cb24e6cedc72804fd | 3719f5cb059eefd66b83eb8ae990652f4b7fd124 | refs/heads/master | 2022-12-06T15:50:37.930987 | 2020-08-30T15:49:27 | 2020-08-30T15:49:27 | 291,811,790 | 0 | 1 | MIT | 2020-08-31T19:57:35 | 2020-08-31T19:57:34 | null | UTF-8 | Python | false | false | 996 | py | class Solution:
def maxSumOfThreeSubarrays(self, nums: List[int], k: int) -> List[int]:
idx1, idx2, idx3 = 0, k, k * 2
s1, s2, s3 = sum(nums[idx1:idx1 + k]), sum(nums[idx2:idx2 + k]), sum(nums[idx3:idx3 + k])
bests1, bests12, bests123 = s1, s1 + s2, s1 + s2 + s3
besti1 = 0
besti12 = [idx1, idx2]
besti123 = [idx1, idx2, idx3]
n = len(nums)
while idx3 + k < n:
s1 += nums[idx1 + k] - nums[idx1]
s2 += nums[idx2 + k] - nums[idx2]
s3 += nums[idx3 + k] - nums[idx3]
if s1 > bests1:
bests1 = s1
besti1 = idx1 + 1
if bests1 + s2 > bests12:
bests12 = bests1 + s2
besti12 = [besti1, idx2 + 1]
if bests12 + s3 > bests123:
bests123 = bests12 + s3
besti123 = besti12 + [idx3 + 1]
idx1 += 1
idx2 += 1
idx3 += 1
return besti123
| [
"[email protected]"
] | |
94458562cd7bf5f91df8d4257fc676713c9cdb93 | 55c250525bd7198ac905b1f2f86d16a44f73e03a | /Python/Games/Python wow/tests/models/spells/test_buff_schema.py | 86c88aee46e4d4ca161e2602ae7dee3c807b81df | [] | no_license | NateWeiler/Resources | 213d18ba86f7cc9d845741b8571b9e2c2c6be916 | bd4a8a82a3e83a381c97d19e5df42cbababfc66c | refs/heads/master | 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:e63d9f081f022098267a17e7874b4d2fd1dcffedf91d27fcc4a3820700084f41
size 1940
| [
"[email protected]"
] | |
6c01e0c87f1231c1a696e1b6cbdbd868b04e2a06 | 011a750fae8ade67f726a9749e05cc4afb8e360d | /text_file_analyser/test/main.py | 141c4ce62d4e23028bccb531ba11cf934d5e6550 | [] | no_license | MartinCarufel/PycharmProjects | c7e50b66a24d4a216b7a217192fcd446f5324d9f | aaa6c95b3e2e6525586fb6a03d1c9d484065899b | refs/heads/master | 2023-07-05T22:40:38.650892 | 2023-06-26T13:55:13 | 2023-06-26T13:55:13 | 150,859,642 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 648 | py | from text_file_analyser import Text_File_analyser
import pandas as pd
def main():
tf = Text_File_analyser("usb_stress_testIO-04-000979.log")
csv_data = tf.data_spliter("\| Date :", " +")
df = pd.DataFrame(csv_data)
df = df[[3, 4]]
print(df)
def main2():
# Creating the dataframe
df = pd.DataFrame({"A": [12, 4, 5, None, 1],
"B": [7, 2, 54, 3, None],
"C": [20, 16, 11, 3, 8],
"D": [14, 3, None, 2, 6]})
print(df)
# skip the Na values while finding the maximum
print(df.max(axis=1, skipna=True))
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
f0438fb43a911298fba48e71620bc3f5ff15ba8b | c16ab2c9c675bdbca43a4603a14106790d9e7da2 | /lib/gae_mini_profiler/appstats_profiler.py | d1e30d4d9a70e975ccedc29a5068b4e0987559b9 | [
"MIT"
] | permissive | y2bishop2y/microengine | 7e7e8b5852188fcceb9559f9d8d339bf6257a0d7 | 2322fdce0718a06bdc0332682e8ef9e393f8e7af | refs/heads/master | 2021-01-01T18:38:17.250888 | 2013-03-20T08:21:53 | 2013-03-20T08:22:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,136 | py | """RPC profiler that uses appstats to track, time, and log all RPC events.
This is just a simple wrapper for appstats with result formatting. See
https://developers.google.com/appengine/docs/python/tools/appstats for more.
"""
import logging
from pprint import pformat
from google.appengine.ext.appstats import recording
from lib.gae_mini_profiler import util, cleanup, unformatter
class Profile(object):
"""Profiler that wraps appstats for programmatic access and reporting."""
def __init__(self):
# Configure AppStats output, keeping a high level of request
# content so we can detect dupe RPCs more accurately
recording.config.MAX_REPR = 750
# Each request has its own internal appstats recorder
self.recorder = None
def results(self):
"""Return appstats results in a dictionary for template context."""
if not self.recorder:
# If appstats fails to initialize for any reason, return an empty
# set of results.
logging.warn("Missing recorder for appstats profiler.")
return {
"calls": [],
"total_time": 0,
}
total_call_count = 0
total_time = 0
calls = []
service_totals_dict = {}
likely_dupes = False
end_offset_last = 0
requests_set = set()
appstats_key = long(self.recorder.start_timestamp * 1000)
for trace in self.recorder.traces:
total_call_count += 1
total_time += trace.duration_milliseconds()
# Don't accumulate total RPC time for traces that overlap asynchronously
if trace.start_offset_milliseconds() < end_offset_last:
total_time -= (end_offset_last - trace.start_offset_milliseconds())
end_offset_last = trace.start_offset_milliseconds() + trace.duration_milliseconds()
service_prefix = trace.service_call_name()
if "." in service_prefix:
service_prefix = service_prefix[:service_prefix.find(".")]
if service_prefix not in service_totals_dict:
service_totals_dict[service_prefix] = {
"total_call_count": 0,
"total_time": 0,
"total_misses": 0,
}
service_totals_dict[service_prefix]["total_call_count"] += 1
service_totals_dict[service_prefix]["total_time"] += trace.duration_milliseconds()
stack_frames_desc = []
for frame in trace.call_stack_list():
stack_frames_desc.append("%s:%s %s" %
(util.short_rpc_file_fmt(frame.class_or_file_name()),
frame.line_number(),
frame.function_name()))
request = trace.request_data_summary()
response = trace.response_data_summary()
likely_dupe = request in requests_set
likely_dupes = likely_dupes or likely_dupe
requests_set.add(request)
request_short = request_pretty = None
response_short = response_pretty = None
miss = 0
try:
request_object = unformatter.unformat(request)
response_object = unformatter.unformat(response)
request_short, response_short, miss = cleanup.cleanup(request_object, response_object)
request_pretty = pformat(request_object)
response_pretty = pformat(response_object)
except Exception, e:
logging.warning("Prettifying RPC calls failed.\n%s\nRequest: %s\nResponse: %s",
e, request, response, exc_info=True)
service_totals_dict[service_prefix]["total_misses"] += miss
calls.append({
"service": trace.service_call_name(),
"start_offset": util.milliseconds_fmt(trace.start_offset_milliseconds()),
"total_time": util.milliseconds_fmt(trace.duration_milliseconds()),
"request": request_pretty or request,
"response": response_pretty or response,
"request_short": request_short or cleanup.truncate(request),
"response_short": response_short or cleanup.truncate(response),
"stack_frames_desc": stack_frames_desc,
"likely_dupe": likely_dupe,
})
service_totals = []
for service_prefix in service_totals_dict:
service_totals.append({
"service_prefix": service_prefix,
"total_call_count": service_totals_dict[service_prefix]["total_call_count"],
"total_misses": service_totals_dict[service_prefix]["total_misses"],
"total_time": util.milliseconds_fmt(service_totals_dict[service_prefix]["total_time"]),
})
service_totals = sorted(service_totals, reverse=True, key=lambda service_total: float(service_total["total_time"]))
return {
"total_call_count": total_call_count,
"total_time": util.milliseconds_fmt(total_time),
"calls": calls,
"service_totals": service_totals,
"likely_dupes": likely_dupes,
"appstats_key": appstats_key,
}
def wrap(self, app):
"""Wrap and return a WSGI application with appstats recording enabled.
Args:
app: existing WSGI application to be wrapped
Returns:
new WSGI application that will run the original app with appstats
enabled.
"""
def wrapped_appstats_app(environ, start_response):
# Use this wrapper to grab the app stats recorder for RequestStats.save()
if recording.recorder_proxy.has_recorder_for_current_request():
self.recorder = recording.recorder_proxy.get_for_current_request()
return app(environ, start_response)
return recording.appstats_wsgi_middleware(wrapped_appstats_app)
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.