blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b597e2e13e7f65f2a8bed1b72a651e20fdcb6a35
|
81407be1385564308db7193634a2bb050b4f822e
|
/testdemo/pytestdemo/ssh-copy-id.py
|
d8024561b2c7733acd24f0b2f173d39c8f5e8a1c
|
[
"MIT"
] |
permissive
|
gottaegbert/penter
|
6db4f7d82c143af1209b4259ba32145aba7d6bd3
|
8cbb6be3c4bf67c7c69fa70e597bfbc3be4f0a2d
|
refs/heads/master
| 2022-12-30T14:51:45.132819 | 2020-10-09T05:33:23 | 2020-10-09T05:33:23 | 305,266,398 | 0 | 0 |
MIT
| 2020-10-19T04:56:02 | 2020-10-19T04:53:05 | null |
UTF-8
|
Python
| false | false | 1,620 |
py
|
"""ssh-copy-id for Windows.
Example usage: python ssh-copy-id.py ceilfors@my-remote-machine
This script is dependent on msysgit by default as it requires scp and ssh.
For convenience you can also try that comes http://bliker.github.io/cmder/.
"""
# python ssh-copy-id.py [email protected]
import argparse, os
from subprocess import call
def winToPosix(win):
"""Converts the specified windows path as a POSIX path in msysgit.
Example:
win: C:\\home\\user
posix: /c/home/user
"""
posix = win.replace('\\', '/')
return "/" + posix.replace(':', '', 1)
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--identity_file", help="identity file, default to ~\\.ssh\\idrsa.pub", default=os.environ['HOME']+"\\.ssh\\id_rsa.pub")
parser.add_argument("-d", "--dry", help="run in the dry run mode and display the running commands.", action="store_true")
parser.add_argument("remote", metavar="user@machine")
args = parser.parse_args()
local_key = winToPosix(args.identity_file)
remote_key = "~/temp_id_rsa.pub"
# Copy the public key over to the remote temporarily
scp_command = "scp {} {}:{}".format(local_key, args.remote, remote_key)
print(scp_command)
if not args.dry:
call(scp_command)
# Append the temporary copied public key to authorized_key file and then remove the temporary public key
ssh_command = ("ssh {} "
"mkdir ~/.ssh;"
"touch ~/.ssh/authorized_keys;"
"cat {} >> ~/.ssh/authorized_keys;"
"rm {};").format(args.remote, remote_key, remote_key)
print(ssh_command)
if not args.dry:
call(ssh_command)
|
[
"[email protected]"
] | |
ad092d278eb7c28c208e8cd80fb0a6b2851e33a9
|
746e0181955176741385091fe795e360a5f8fa3f
|
/yushubook/app/web/auth.py
|
98a647abd4d89c6f9643a1d3edea07eb8d377e93
|
[] |
no_license
|
zhengpanone/flask_web
|
c26547483219011d9f1051d383f0d9a0a72d48df
|
87d324ffee503aaa794c415ba6e16785dbf84d99
|
refs/heads/master
| 2022-07-22T09:07:47.939938 | 2019-12-18T07:14:21 | 2019-12-18T07:14:21 | 206,232,120 | 0 | 0 | null | 2022-06-28T14:44:55 | 2019-09-04T04:35:50 |
JavaScript
|
UTF-8
|
Python
| false | false | 2,718 |
py
|
from flask import render_template, request, redirect, url_for, flash
from flask_login import login_user, logout_user
from app.forms.auth import RegisterForm, LoginForm, EmailForm, ResetPasswordForm
from app.models.base import db
from app.models.user import User
from . import web
__author__ = 'zhengpanone'
@web.route('/register', methods=['GET', 'POST'])
def register():
form = RegisterForm(request.form)
if request.method == 'POST' and form.validate():
with db.auto_commit():
user = User()
user.set_attrs(form.data)
db.session.add(user)
# db.session.commit()
return redirect(url_for('web.login'))
return render_template('auth/register.html', form=form)
@web.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm(request.form)
if request.method == 'POST' and form.validate():
user = User.query.filter_by(email=form.email.data).first()
if user and user.check_password(form.password.data):
login_user(user, remember=True)
next = request.args.get('next')
if not next or not next.startswith('/'):
next = url_for('web.index')
return redirect(next)
else:
flash('账号不存在或密码错误')
return render_template('auth/login.html', form=form)
@web.route('/reset/password', methods=['GET', 'POST'])
def forget_password_request():
form = EmailForm(request.form)
if request.method == 'POST':
if form.validate():
account_email = form.email.data
user = User.query.filter_by(email=account_email).first_or_404()
from app.lib.email import send_mail
send_mail(form.email.data, '重置你的密码', 'email/reset_password.html', user=user, token=user.generate_token())
flash('一封邮件已发送到邮箱' + account_email + ',请及时查收')
# return redirect(url_for('web.login'))
return render_template('auth/forget_password_request.html', form=form)
@web.route('/reset/password/<token>', methods=['GET', 'POST'])
def forget_password(token):
form = ResetPasswordForm(request.form)
if request.method == 'POST' and form.validate():
success = User.reset_password(token, form.password1.data)
if success:
flash('密码重置成功')
return redirect(url_for('web.login'))
else:
flash('密码重置失败')
return render_template('auth/forget_password.html')
@web.route('/change/password', methods=['GET', 'POST'])
def change_password():
pass
@web.route('/logout')
def logout():
logout_user()
return redirect(url_for('web.index'))
|
[
"[email protected]"
] | |
02646855dd3fe965ae1abbf7f8ac90f9fb74127b
|
adf2e802c7563e4b7b7cc279a54deceb6a803098
|
/openapi_client/models/pdf_save_as_png_response.py
|
d67e711a78498d5c15d094309115b2d000f36c8c
|
[] |
no_license
|
Orpalis/passportpdfsdk-python
|
2466f7568becf2bd386bd9e4e00b4e3c1e642727
|
257d305ca9e6508d44fe521a1e4721f1835e8d0e
|
refs/heads/master
| 2022-04-24T15:58:21.257112 | 2020-04-27T11:09:37 | 2020-04-27T11:09:37 | 254,665,250 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,129 |
py
|
# coding: utf-8
"""
PassportPDF API
Another brick in the cloud # noqa: E501
The version of the OpenAPI document: 1.0.1
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from openapi_client.configuration import Configuration
class PdfSaveAsPNGResponse(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'error': 'Error',
'remaining_tokens': 'int',
'page_images': 'list[PageImage]'
}
attribute_map = {
'error': 'Error',
'remaining_tokens': 'RemainingTokens',
'page_images': 'PageImages'
}
def __init__(self, error=None, remaining_tokens=None, page_images=None, local_vars_configuration=None): # noqa: E501
"""PdfSaveAsPNGResponse - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._error = None
self._remaining_tokens = None
self._page_images = None
self.discriminator = None
if error is not None:
self.error = error
if remaining_tokens is not None:
self.remaining_tokens = remaining_tokens
self.page_images = page_images
@property
def error(self):
"""Gets the error of this PdfSaveAsPNGResponse. # noqa: E501
:return: The error of this PdfSaveAsPNGResponse. # noqa: E501
:rtype: Error
"""
return self._error
@error.setter
def error(self, error):
"""Sets the error of this PdfSaveAsPNGResponse.
:param error: The error of this PdfSaveAsPNGResponse. # noqa: E501
:type: Error
"""
self._error = error
@property
def remaining_tokens(self):
"""Gets the remaining_tokens of this PdfSaveAsPNGResponse. # noqa: E501
Specifies the number of remaining tokens. # noqa: E501
:return: The remaining_tokens of this PdfSaveAsPNGResponse. # noqa: E501
:rtype: int
"""
return self._remaining_tokens
@remaining_tokens.setter
def remaining_tokens(self, remaining_tokens):
"""Sets the remaining_tokens of this PdfSaveAsPNGResponse.
Specifies the number of remaining tokens. # noqa: E501
:param remaining_tokens: The remaining_tokens of this PdfSaveAsPNGResponse. # noqa: E501
:type: int
"""
self._remaining_tokens = remaining_tokens
@property
def page_images(self):
"""Gets the page_images of this PdfSaveAsPNGResponse. # noqa: E501
The page(s) of the PDF saved as PNG image(s). # noqa: E501
:return: The page_images of this PdfSaveAsPNGResponse. # noqa: E501
:rtype: list[PageImage]
"""
return self._page_images
@page_images.setter
def page_images(self, page_images):
"""Sets the page_images of this PdfSaveAsPNGResponse.
The page(s) of the PDF saved as PNG image(s). # noqa: E501
:param page_images: The page_images of this PdfSaveAsPNGResponse. # noqa: E501
:type: list[PageImage]
"""
self._page_images = page_images
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PdfSaveAsPNGResponse):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, PdfSaveAsPNGResponse):
return True
return self.to_dict() != other.to_dict()
|
[
"[email protected]"
] | |
b03483024f8dbee4a47691a1248905822c5c67f2
|
e3765def4a180f1d51eaef3884448b0bb9be2cd3
|
/example/13.3.4_reconstruct_create_fleet/game_functions.py
|
01b601c1630e2d7ae6580554ff2caa2de174d3e5
|
[] |
no_license
|
spearfish/python-crash-course
|
cbeb254efdf0c1ab37d8a7d2fa0409194f19fa2b
|
66bc42d41395cc365e066a597380a96d3282d30b
|
refs/heads/master
| 2023-07-14T11:04:49.276764 | 2021-08-20T10:02:27 | 2021-08-20T10:02:27 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,109 |
py
|
#!/usr/bin/env python3
import sys
import pygame
from bullet import Bullet
from alien import Alien
# check the events, and modify ship and bullets according to settings
def check_events(ship, bullets, settings) :
'''check for keyboard input and mouse move'''
# pygame's event class has a method get.
for event in pygame.event.get() :
if event.type == pygame.QUIT :
sys.exit()
elif event.type == pygame.KEYDOWN :
check_key_down_events(event, settings, ship, bullets)
elif event.type == pygame.KEYUP :
check_key_up_events(event, ship)
def check_key_down_events(event, settings, ship, bullets) :
''' handles key down events. '''
if event.key == pygame.K_RIGHT :
ship.moving_right = True
elif event.key == pygame.K_LEFT :
ship.moving_left = True
elif event.key == pygame.K_SPACE :
fire_bullet(settings, bullets, ship)
elif event.key == pygame.K_q :
sys.exit()
def fire_bullet(settings, bullets, ship) :
if len(bullets) < settings.bullets_allowed :
new_bullet = Bullet(settings, ship)
bullets.add(new_bullet)
def check_key_up_events(event, ship):
''' handles key up events. '''
if event.key == pygame.K_RIGHT :
ship.moving_right = False
elif event.key == pygame.K_LEFT :
ship.moving_left = False
def update_screen(settings, screen, ship, bullets, aliens) :
''' redraw the screen '''
# setting the color for the new screen.
screen.fill(settings.bg_color)
# draw the new screen.
ship.blitme()
# draw the bullets.
# Group.spirtes() method gives us a list of bullet object (sprites).
for bullet in bullets.sprites() :
# update the screen with the fleet of bullets.
bullet.draw_bullet(screen)
# draw the alien
aliens.draw(screen)
# show the screen
pygame.display.flip()
def update_bullets(bullets) :
''' remove bullets if bullet run out of the screen '''
# we should not delete list element when we are in for loop
# this will cause chaos for the index of the element during for loop execution.
for bullet in bullets.copy() :
if bullet.rect.bottom < 0 :
# remove the elemnts from the sprite group
bullets.remove(bullet)
def create_fleet(settings, screen, aliens) :
''' creates a fleet on the screen '''
# calculate number of aliens in a row
alien = Alien(settings, screen)
number_aliens_x = get_number_aliens_x(settings, alien.rect.width)
alien_height = alien.rect.height
for index in range(number_aliens_x) :
create_alien(settings, screen, aliens, index)
def get_number_aliens_x(settings, alien_width) :
space_x = settings.screen_width - alien_width * 2
number_aliens_x = space_x // ( 2 * alien_width )
return number_aliens_x
def create_alien(settings, screen, aliens, index) :
alien = Alien(settings, screen)
alien_width = alien.rect.width
alien.x = alien_width + 2 * alien_width * index
alien.rect.x = alien.x
aliens.add(alien)
|
[
"[email protected]"
] | |
8b282c325b4e4b9b70b53d8a2da4dc881e6a881d
|
3562fa51db47b1b1e97785191f0c04644d47c283
|
/python/plat3/1395.py
|
e2d0dff2789f4aa3d406947ea475b1da19862513
|
[] |
no_license
|
seono/algorithm
|
c74181d564525e3a0214824c4a619c51cd52a042
|
78a252b29290eaa1ea076d76cd83e5dbbb7d8d89
|
refs/heads/master
| 2021-07-13T07:13:41.523888 | 2021-04-24T14:05:00 | 2021-04-24T14:05:00 | 244,609,427 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,182 |
py
|
import sys
input = sys.stdin.readline
output = sys.stdout.write
N, M = map(int,input().split())
tree = [0]*(4*N)
lazy = [0]*(4*N)
def propagate(node,now_s,now_e):
if lazy[node]:
tree[node]=(now_e-now_s+1)-tree[node]
if now_s!=now_e:
lazy[node<<1]^=1
lazy[node<<1|1]^=1
lazy[node]=0
def update(now_s,now_e,s,e,node):
propagate(node,now_s,now_e)
if now_s>e or now_e<s:return
if s<=now_s and now_e<=e:
tree[node]=(now_e-now_s+1)-tree[node]
if now_s!=now_e:
lazy[node<<1]^=1
lazy[node<<1|1]^=1
return
mid = (now_e+now_s)>>1
update(now_s,mid,s,e,node<<1)
update(mid+1,now_e,s,e,node<<1|1)
tree[node]=tree[node<<1] + tree[node<<1|1]
def query(now_s,now_e,s,e,node):
propagate(node,now_s,now_e)
if e<now_s or s>now_e:return 0
if s<=now_s and now_e<=e:return tree[node]
mid = (now_e+now_s)>>1
return query(now_s,mid,s,e,node<<1)+query(mid+1,now_e,s,e,node<<1|1)
for _ in range(M):
cmd, i, j = map(int,input().split())
if cmd==0:
update(1,N,i,j,1)
else:
output("%d\n"%query(1,N,i,j,1))
#print(tree[:2*N])
|
[
"[email protected]"
] | |
7a3e5d086048d2b3fd47910b56f0d85fcb4d8453
|
6ee533cb075c80663e6ce15e049046aeaf54f880
|
/mp/migrations/0001_initial.py
|
e28828b5ad2531f412626acc293e3544519a282e
|
[] |
no_license
|
ARDaVinci/arttnet
|
21afd6c00a9ec27aae3bd12fb2cf18126226d112
|
fbc762852839960f2382ef9a5662d47419d943e0
|
refs/heads/master
| 2023-04-01T17:09:11.628441 | 2021-04-23T19:06:13 | 2021-04-23T19:06:13 | 360,658,769 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,117 |
py
|
# Generated by Django 3.2 on 2021-04-22 14:57
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Messageprive',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('creation_date', models.DateTimeField(auto_now=True)),
('title', models.CharField(max_length=500)),
('text', models.TextField(max_length=5000)),
('all_answers', models.IntegerField(default=0)),
('receiver', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='receiver', to=settings.AUTH_USER_MODEL)),
('sender', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sender', to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"[email protected]"
] | |
e0889544104e6c7c8d0da19b000e6e4f617b4ccf
|
5f8e7e119524319c14366d7ef56a6a1ff9fd971b
|
/django/django_intro/firstDjangoProject/firstDjangoProject/urls.py
|
7f2c10295a53f3a890c89b349ad8adbd863b58c9
|
[] |
no_license
|
everhartC/Python
|
8df322212eb72a7ac243609020f4bdfd2de93615
|
4f385acbf734bdfa957d659685b10d0506a8d94a
|
refs/heads/main
| 2023-07-02T22:03:10.851211 | 2021-08-02T05:50:31 | 2021-08-02T05:50:31 | 329,521,228 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 774 |
py
|
"""firstDjangoProject URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('', include('single_app.urls')),
]
|
[
"[email protected]"
] | |
273d2a79ff8672a69151a5d581e2a4c1059ac710
|
09e57dd1374713f06b70d7b37a580130d9bbab0d
|
/benchmark/startCirq668.py
|
d6a190b3715fe3cd9362da7477ea50eaef3b7ef7
|
[
"BSD-3-Clause"
] |
permissive
|
UCLA-SEAL/QDiff
|
ad53650034897abb5941e74539e3aee8edb600ab
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
refs/heads/main
| 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,871 |
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=4
# total number=12
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.H.on(input_qubit[1])) # number=9
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.H.on(input_qubit[2])) # number=8
c.append(cirq.H.on(input_qubit[2])) # number=3
c.append(cirq.X.on(input_qubit[2])) # number=7
c.append(cirq.H.on(input_qubit[3])) # number=4
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=6
c.append(cirq.CNOT.on(input_qubit[2],input_qubit[0])) # number=10
c.append(cirq.CNOT.on(input_qubit[2],input_qubit[0])) # number=11
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq668.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close()
|
[
"[email protected]"
] | |
adc6788eb8ddf274faef776b14af7bd7bc8f8565
|
fdbcc456e953b29c96b5c66b96a3f00e7605107c
|
/stats/generic.py
|
5b17449424b8e522e2c56e7ca3f7cdbeac42e940
|
[] |
no_license
|
paulsavala/student-monitoring-backend
|
a0a86f59eda063fffa538974e9cda2636b899da4
|
df729e647adc9ad3d31d7ece30e0488dbe0f035a
|
refs/heads/master
| 2022-12-03T22:42:28.354093 | 2020-08-31T01:22:09 | 2020-08-31T01:22:09 | 257,088,344 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 478 |
py
|
import numpy as np
from scipy.stats import beta
class GenericDistribution:
def __init__(self, **kwargs):
pass
def fit(self, assignment_collection):
raise NotImplementedError
def conf_int(self, assignment_collection, conf_level=0.05):
raise NotImplementedError
def pdf(self, assignment_collection):
raise NotImplementedError
def p_value(self, x, assignment_collection, one_tailed=False):
raise NotImplementedError
|
[
"[email protected]"
] | |
ae30419213eabe7a8d336d33dce31950f52c7c41
|
424c73412ccebe09198bf91c3601a61ad2242932
|
/azure-mgmt-containerservice/azure/mgmt/containerservice/container_service_client.py
|
5f3e1d60bcaf9f0ce8a77a7ff3916612bd372bc2
|
[
"MIT"
] |
permissive
|
arifulmondal/azure-sdk-for-python
|
25c7ad16ba572c2e2bee60e117258c556ea5bdc3
|
38b3ce0fe3fdd6dd1e607627c611b8a9c97c2372
|
refs/heads/master
| 2021-03-24T13:50:32.389409 | 2017-10-23T18:38:49 | 2017-10-23T18:38:49 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,483 |
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import ServiceClient
from msrest import Serializer, Deserializer
from msrestazure import AzureConfiguration
from .version import VERSION
from .operations.container_services_operations import ContainerServicesOperations
from . import models
class ContainerServiceClientConfiguration(AzureConfiguration):
"""Configuration for ContainerServiceClient
Note that all parameters used to create this instance are saved as instance
attributes.
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: Subscription credentials which uniquely identify
Microsoft Azure subscription. The subscription ID forms part of the URI
for every service call.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self, credentials, subscription_id, base_url=None):
if credentials is None:
raise ValueError("Parameter 'credentials' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
if not base_url:
base_url = 'https://management.azure.com'
super(ContainerServiceClientConfiguration, self).__init__(base_url)
self.add_user_agent('containerserviceclient/{}'.format(VERSION))
self.add_user_agent('Azure-SDK-For-Python')
self.credentials = credentials
self.subscription_id = subscription_id
class ContainerServiceClient(object):
"""The Container Service Client.
:ivar config: Configuration for client.
:vartype config: ContainerServiceClientConfiguration
:ivar container_services: ContainerServices operations
:vartype container_services: azure.mgmt.containerservice.operations.ContainerServicesOperations
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param subscription_id: Subscription credentials which uniquely identify
Microsoft Azure subscription. The subscription ID forms part of the URI
for every service call.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self, credentials, subscription_id, base_url=None):
self.config = ContainerServiceClientConfiguration(credentials, subscription_id, base_url)
self._client = ServiceClient(self.config.credentials, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self.api_version = '2017-01-31'
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.container_services = ContainerServicesOperations(
self._client, self.config, self._serialize, self._deserialize)
|
[
"[email protected]"
] | |
50e6cb70ed23e62e0f2206d5f819a6455f9db8a0
|
b19c9fe62eaa309851dc11f6fd7a05bda463fb58
|
/bigfish/apps/areas/__init__.py
|
3dc4ee516b7d709d71001cd9eaf7f76c7fcbacf8
|
[] |
no_license
|
hyu9999/bigfish
|
3ff3b025982e71bd6dd80f60ad6c70e735e98936
|
4189fdcacc20795a4778b53c9d47d6fdd3e71811
|
refs/heads/master
| 2022-07-08T13:55:12.908583 | 2019-03-22T09:36:12 | 2019-03-22T09:36:12 | 177,055,829 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 59 |
py
|
default_app_config = 'bigfish.apps.areas.apps.AreasConfig'
|
[
"[email protected]"
] | |
ef66115bb76808efd291616eb713cbc9d8cfdd66
|
41d9b92ef2a74a4ba05d27ffbe3beb87884c4ce7
|
/supervised_learning/0x01-classification/4-neuron.py
|
2e62079af446cbe189d8a81e2f081f500022e808
|
[] |
no_license
|
JosephK89/holbertonschool-machine_learning
|
3f96d886c61d8de99a23e4348fb045b9c930740e
|
aa5c500f7d8ebeec951f9ab5ec017cae64007c25
|
refs/heads/main
| 2023-08-14T18:42:53.481354 | 2021-10-10T19:53:40 | 2021-10-10T19:53:40 | 386,248,140 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,314 |
py
|
#!/usr/bin/env python3
"""neuron class module"""
import numpy as np
class Neuron:
"""Neuron class"""
def __init__(self, nx):
"""Class Initialization"""
if type(nx) != int:
raise TypeError("nx must be an integer")
if nx < 1:
raise ValueError("nx must be a positive integer")
self.__W = np.random.randn(1, nx)
self.__b = 0
self.__A = 0
@property
def W(self):
"""getter function for weights"""
return self.__W
@property
def b(self):
"""getter function for biases"""
return self.__b
@property
def A(self):
"""getter function for A"""
return self.__A
def forward_prop(self, X):
"""forward propagation function"""
Z = np.matmul(self.__W, X) + self.__b
self.__A = 1 / (1 + np.exp(-Z))
return self.__A
def cost(self, Y, A):
"""model cost function"""
cost_array = np.multiply(np.log(A), Y) + np.multiply((
1 - Y), np.log(1.0000001 - A))
cost = -np.sum(cost_array) / len(A[0])
return cost
def evaluate(self, X, Y):
"""evaluate neurons function"""
self.forward_prop(X)
cost = self.cost(Y, self.__A)
return (np.where(self.__A > 0.5, 1, 0), cost)
|
[
"[email protected]"
] | |
76eb0dc5718888863c17c3c6aa25053e1371d456
|
1a6c2be5ff1a8364c97a1ede23c824b2579ecf79
|
/tfx/dsl/io/filesystem_registry_test.py
|
236551f774277bfe81963eaddb2ea0bfc465df50
|
[
"Apache-2.0"
] |
permissive
|
418sec/tfx
|
fa1a4690df2178e9c6bd24f97df0bbde7436df95
|
df1529c91e52d442443eca5968ff33cf0a38dffa
|
refs/heads/master
| 2023-04-18T12:25:38.098958 | 2021-04-28T16:11:00 | 2021-04-28T16:11:00 | 333,769,030 | 2 | 1 |
Apache-2.0
| 2021-04-28T16:11:01 | 2021-01-28T13:35:14 | null |
UTF-8
|
Python
| false | false | 5,808 |
py
|
# Lint as: python2, python3
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tfx.dsl.components.base.base_driver."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tfx.dsl.io import filesystem
from tfx.dsl.io import filesystem_registry
from tfx.dsl.io.plugins import local
from tfx.dsl.io.plugins import tensorflow_gfile
class FakeFilesystemA(filesystem.Filesystem):
pass
class FakeFilesystemB(filesystem.Filesystem):
pass
class FakeFilesystemC(filesystem.Filesystem):
pass
class FilesystemRegistryTest(tf.test.TestCase):
def testRegistry(self):
registry = filesystem_registry.FilesystemRegistry()
# Test exceptions properly raised when schemes not registered.
with self.assertRaisesRegexp(Exception, 'is not available for use'):
registry.get_filesystem_for_scheme('')
with self.assertRaisesRegexp(Exception, 'is not available for use'):
registry.get_filesystem_for_path('/tmp/my/file')
with self.assertRaisesRegexp(Exception, 'is not available for use'):
registry.get_filesystem_for_scheme('gs://')
with self.assertRaisesRegexp(Exception, 'is not available for use'):
registry.get_filesystem_for_path('gs://bucket/tmp/my/file')
with self.assertRaisesRegexp(Exception, 'is not available for use'):
registry.get_filesystem_for_scheme('s3://')
with self.assertRaisesRegexp(Exception, 'is not available for use'):
registry.get_filesystem_for_path('s3://bucket/tmp/my/file')
with self.assertRaisesRegexp(Exception, 'is not available for use'):
registry.get_filesystem_for_path('unknown://bucket/tmp/my/file')
# Test after local filesystem is registered.
registry.register(local.LocalFilesystem, 20)
self.assertIs(local.LocalFilesystem, registry.get_filesystem_for_scheme(''))
self.assertIs(local.LocalFilesystem,
registry.get_filesystem_for_path('/tmp/my/file'))
with self.assertRaisesRegexp(Exception, 'is not available for use'):
registry.get_filesystem_for_scheme('gs://')
with self.assertRaisesRegexp(Exception, 'is not available for use'):
registry.get_filesystem_for_path('gs://bucket/tmp/my/file')
with self.assertRaisesRegexp(Exception, 'is not available for use'):
registry.get_filesystem_for_path('unknown://bucket/tmp/my/file')
# Test after Tensorflow filesystems are registered with higher priority.
registry.register(
tensorflow_gfile.TensorflowFilesystem, 10, use_as_fallback=True)
self.assertIs(tensorflow_gfile.TensorflowFilesystem,
registry.get_filesystem_for_scheme(''))
self.assertIs(tensorflow_gfile.TensorflowFilesystem,
registry.get_filesystem_for_path('/tmp/my/file'))
self.assertIs(tensorflow_gfile.TensorflowFilesystem,
registry.get_filesystem_for_scheme('gs://'))
self.assertIs(tensorflow_gfile.TensorflowFilesystem,
registry.get_filesystem_for_path('gs://bucket/tmp/my/file'))
self.assertIs(tensorflow_gfile.TensorflowFilesystem,
registry.get_filesystem_for_scheme('s3://'))
self.assertIs(tensorflow_gfile.TensorflowFilesystem,
registry.get_filesystem_for_path('s3://bucket/tmp/my/file'))
self.assertIs(tensorflow_gfile.TensorflowFilesystem,
registry.get_filesystem_for_scheme('hdfs://'))
self.assertIs(tensorflow_gfile.TensorflowFilesystem,
registry.get_filesystem_for_path('hdfs://bucket/tmp/my/file'))
self.assertIs(tensorflow_gfile.TensorflowFilesystem,
registry.get_filesystem_for_path(
'unknown://bucket/tmp/my/file'))
# Test that fallback filesystems are correctly prioritized. `FilesystemA`
# should not be used as the fallback since it has lower priority than
# `TensorflowFilesystem`.
registry.register(FakeFilesystemA, 15, use_as_fallback=True)
self.assertIs(tensorflow_gfile.TensorflowFilesystem,
registry.get_filesystem_for_path(
'unknown://bucket/tmp/my/file'))
# A filesystem registered without `use_as_fallback=True` should not be used
# as a fallback.
registry.register(FakeFilesystemB, 5)
self.assertIs(tensorflow_gfile.TensorflowFilesystem,
registry.get_filesystem_for_path(
'unknown://bucket/tmp/my/file'))
# `FakeFilesystemC` is a fallback with higher priority than
# `TensorflowFilesystem` and so should be used as the fallback.
registry.register(FakeFilesystemC, 5, use_as_fallback=True)
self.assertIs(FakeFilesystemC,
registry.get_filesystem_for_path(
'unknown://bucket/tmp/my/file'))
# Test usage of byte paths.
self.assertIs(tensorflow_gfile.TensorflowFilesystem,
registry.get_filesystem_for_scheme(b'hdfs://'))
self.assertIs(
tensorflow_gfile.TensorflowFilesystem,
registry.get_filesystem_for_path(b'hdfs://bucket/tmp/my/file'))
with self.assertRaisesRegexp(ValueError, 'Invalid path type'):
registry.get_filesystem_for_path(123)
if __name__ == '__main__':
tf.test.main()
|
[
"[email protected]"
] | |
a1ec748cd25073683f73de54d482230285066f6e
|
3d2939ae9ce30b15c1c3cd18bb7bc1db655863fe
|
/openturns/1.8/user_manual/_generated/openturns-Multinomial-1.py
|
ed23bba47e9242992b3d6844b0dc0765204c63fc
|
[] |
no_license
|
ThibaultDelage/openturns.github.io
|
07c9d6c98118a7695c35192a59814c23a71cb861
|
726a8f9ae97dc27d78a822f4d46976af56691802
|
refs/heads/master
| 2020-05-07T14:06:08.368744 | 2019-04-08T14:05:56 | 2019-04-08T14:05:56 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,484 |
py
|
import openturns as ot
from matplotlib import pyplot as plt
from openturns.viewer import View
if (ot.Multinomial().__class__.__name__=='ComposedDistribution'):
correlation = ot.CorrelationMatrix(2)
correlation[1, 0] = 0.25
aCopula = ot.NormalCopula(correlation)
marginals = [ot.Normal(1.0, 2.0), ot.Normal(2.0, 3.0)]
distribution = ot.ComposedDistribution(marginals, aCopula)
elif (ot.Multinomial().__class__.__name__=='CumulativeDistributionNetwork'):
distribution = ot.CumulativeDistributionNetwork([ot.Normal(2),ot.Dirichlet([0.5, 1.0, 1.5])], ot.BipartiteGraph([[0,1], [0,1]]))
else:
distribution = ot.Multinomial()
dimension = distribution.getDimension()
if dimension <= 2:
if distribution.getDimension() == 1:
distribution.setDescription(['$x$'])
pdf_graph = distribution.drawPDF()
cdf_graph = distribution.drawCDF()
fig = plt.figure(figsize=(10, 4))
plt.suptitle(str(distribution))
pdf_axis = fig.add_subplot(121)
cdf_axis = fig.add_subplot(122)
View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False)
View(cdf_graph, figure=fig, axes=[cdf_axis], add_legend=False)
else:
distribution.setDescription(['$x_1$', '$x_2$'])
pdf_graph = distribution.drawPDF()
fig = plt.figure(figsize=(10, 5))
plt.suptitle(str(distribution))
pdf_axis = fig.add_subplot(111)
View(pdf_graph, figure=fig, axes=[pdf_axis], add_legend=False)
|
[
"[email protected]"
] | |
1f846a0df6dbb11c408ad08429e4a3048165f03c
|
0819c03aae157f0a73488911380d26d796026839
|
/ProjectEuler/ProjectEuler/Euler015.py
|
e9afa56b00daba148bd6a8cc8c81f9d2b7272c00
|
[] |
no_license
|
mohi-othman/mohi-euler-python
|
5c53f02a94a9541623758595e9dd602122f4bb5f
|
71aec394684362733a502eb3b4c3f88a18565387
|
refs/heads/master
| 2016-09-05T16:05:37.161813 | 2012-11-07T19:40:21 | 2012-11-07T19:40:21 | 32,092,676 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,310 |
py
|
import time
class node:
def __init__(self):
self.children = []
class network:
def __init__(self, firstNode, targetNodes):
self.firstNode = firstNode
self.targetNodes = targetNodes
def getFreshResultDict(self):
result = dict()
for target in self.targetNodes:
result[target] = 0
return result
def move(destination, targets, resultDict):
if destination in targets:
resultDict[destination]+=1
else:
for newDestination in destination.children:
move(newDestination, targets, resultDict)
def buildSquare(dimension):
grid = []
for i in range(0, dimension+1):
row = []
for j in range(0, dimension+1):
newNode = node()
row.append(newNode)
grid.append(row)
for i in range(0, dimension+1):
for j in range(0, dimension+1):
if j<dimension:
grid[i][j].children.append(grid[i][j+1])
if i<dimension:
grid[i][j].children.append(grid[i+1][j])
result = network(grid[0][0], [grid[dimension][dimension]])
return result
def buildTriangle(dimension):
tree = []
for i in range(0, dimension+1):
level = []
for j in range(0, i+2):
newNode = node()
level.append(newNode)
tree.append(level)
for i in range(0, dimension):
for j in range(0, i+2):
tree[i][j].children.append(tree[i+1][j])
tree[i][j].children.append(tree[i+1][j+1])
result = network(tree[0][0], tree[dimension])
return result
def buildPartialTriangle(dimension):
if (dimension)%2==0:
halfPoint = (dimension + 2) // 2
else:
halfPoint = (dimension + 1) // 2
tree = []
for i in range(0, dimension+1):
level = []
for j in range(0, (i+2,halfPoint)[i+2 > halfPoint]):
newNode = node()
level.append(newNode)
tree.append(level)
for i in range(0, dimension):
for j in range(0, (i+2,halfPoint)[i+2 > halfPoint]):
tree[i][j].children.append(tree[i+1][j])
if j < halfPoint - 1:
tree[i][j].children.append(tree[i+1][j+1])
result = network(tree[0][0], tree[dimension])
return result
def solve():
dimension = 20
squareNetwork = buildSquare(dimension)
gridResultDict = squareNetwork.getFreshResultDict()
triangleNetwork = buildTriangle(dimension)
treeResultDict = triangleNetwork.getFreshResultDict()
partialTriangleNetwork = buildPartialTriangle(dimension)
smallTreeResultDict = partialTriangleNetwork.getFreshResultDict()
t = time.clock()
move(triangleNetwork.firstNode, triangleNetwork.targetNodes, treeResultDict)
print("Triangle network result is:", sum([x**2 for x in treeResultDict.values()]))
print("Triangle network done in:",time.clock() - t)
t = time.clock()
move(partialTriangleNetwork.firstNode, partialTriangleNetwork.targetNodes, smallTreeResultDict)
if dimension%2==0:
result = 2 * sum([x**2 for x in smallTreeResultDict.values() if x < max(smallTreeResultDict.values())]) + max(smallTreeResultDict.values())**2
else:
result = 2 * sum([x**2 for x in smallTreeResultDict.values()])
print("Partial triangle result is:", result)
print("Partial triangle network done in:",time.clock() - t)
t = time.clock()
move(squareNetwork.firstNode, squareNetwork.targetNodes, gridResultDict)
print("Square network result is:", sum(gridResultDict.values()))
print("Square network done in:",time.clock() - t)
####################################################################################
def solvePascalTriangle(dimension):
t = time.clock()
level = [1,1]
for i in range(2, dimension+1):
newLevel = [1]
for j in range(0, len(level)-1):
newLevel.append(level[j]+level[j+1])
newLevel.append(1)
level = newLevel
print("Result is:", sum([x**2 for x in level]))
print("Pascal triangle done in:",time.clock() - t)
|
[
"[email protected]"
] | |
bf2ffece548fa7bd8cc89eabcda5c3a1929f6e4a
|
bb6ebff7a7f6140903d37905c350954ff6599091
|
/tools/perf/page_sets/__init__.py
|
36a23b43274917573f2ba939d7e0eeffdddd5422
|
[
"BSD-3-Clause",
"GPL-2.0-only",
"Apache-2.0",
"LicenseRef-scancode-unknown",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
PDi-Communication-Systems-Inc/lollipop_external_chromium_org
|
faa6602bd6bfd9b9b6277ce3cd16df0bd26e7f2f
|
ccadf4e63dd34be157281f53fe213d09a8c66d2c
|
refs/heads/master
| 2022-12-23T18:07:04.568931 | 2016-04-11T16:03:36 | 2016-04-11T16:03:36 | 53,677,925 | 0 | 1 |
BSD-3-Clause
| 2022-12-09T23:46:46 | 2016-03-11T15:49:07 |
C++
|
UTF-8
|
Python
| false | false | 585 |
py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import inspect
import os
import sys
from telemetry.core import discover
from telemetry.page import page_set
# Import all submodules' PageSet classes.
start_dir = os.path.dirname(os.path.abspath(__file__))
top_level_dir = os.path.dirname(start_dir)
base_class = page_set.PageSet
for cls in discover.DiscoverClasses(
start_dir, top_level_dir, base_class).values():
setattr(sys.modules[__name__], cls.__name__, cls)
|
[
"[email protected]"
] | |
a374fbc7800f7c329eafa018a794f24d885140dd
|
bb767bfc9db2b0ab7f24d3561b168a829c4eb0bc
|
/1st_Year/1st_Semestre/Fpro/Python/saved files/practicing.py
|
c4a51e39f0cb59ec533d6e851b3242027a5511a9
|
[] |
no_license
|
Hugomguima/FEUP
|
7e6e0faf5408d698a34c3b5aed977b20aa76c067
|
f26887e2b8e92e41ae5050515cd0b3cdf94d6476
|
refs/heads/master
| 2023-06-09T05:21:38.897094 | 2021-06-29T17:00:01 | 2021-06-29T17:00:01 | 272,567,282 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 72 |
py
|
print(type(10.0))
print(type(10))
print(type('hello'))
print(type(True))
|
[
"[email protected]"
] | |
50cd9fbadc769245e3339206c113b527065fe981
|
cf070f44eb2e4a218af93432608a04e85e1bbfac
|
/web/tests/eSearch/test_4504.py
|
332c2c5aced33e8c407502b881508f5e40c0a27e
|
[] |
no_license
|
NadyaDi/kms-automation
|
fbb680e95394b0c3286653ac5ae187f6bc02845e
|
3309a6f516386e824c23e03c6f6cb47661ea5ddd
|
refs/heads/master
| 2022-01-15T14:50:22.616235 | 2019-05-16T10:29:20 | 2019-05-16T10:29:20 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 13,376 |
py
|
import time, pytest
import sys,os
from _ast import Num
sys.path.insert(1,os.path.abspath(os.path.join(os.path.dirname( __file__ ),'..','..','lib')))
from clsCommon import Common
import clsTestService
from localSettings import *
import localSettings
from utilityTestFunc import *
import enums
class Test:
#================================================================================================================================
# @Author: Horia Cus
# Test Name : Filter by Publish Statuses - with search - My Media
# Test description:
# Verify that my media entries are properly displayed while using a search term and filter them by publish status
# Quiz needs to be add but we have a blocker issue
#================================================================================================================================
testNum = "4504"
supported_platforms = clsTestService.updatePlatforms(testNum)
status = "Pass"
timeout_accured = "False"
driver = None
common = None
entryName = "Filter by Status"
@pytest.fixture(scope='module',params=supported_platforms)
def driverFix(self,request):
return request.param
def test_01(self,driverFix,env):
#write to log we started the test
logStartTest(self,driverFix)
try:
########################### TEST SETUP ###########################
#capture test start time
self.startTime = time.time()
#initialize all the basic vars and start playing
self,self.driver = clsTestService.initializeAndLoginAsUser(self, driverFix)
self.common = Common(self.driver)
# Entries and dictionaries
self.entryName1 = "Filter by Status - Video Private"
self.entryName2 = "Filter by Status - Video Unlisted"
self.entryName3 = "Filter by Status - Video Multiple"
self.entryName4 = "Filter by Status - Audio Private"
self.entryName5 = "Filter by Status - Audio Unlisted"
self.entryName6 = "Filter by Status - Audio Multiple"
self.entryName7 = "Filter by Status - Image Private"
self.entryName8 = "Filter by Status - Image Unlisted"
self.entryName9 = "Filter by Status - Image Multiple"
self.entryName10 = "Filter by Status - Quiz Private - Quiz"
self.entryName11 = "Filter by Status - Quiz Unlisted - Quiz"
self.entryName12 = "Filter by Status - Quiz Multiple - Quiz"
self.allStatuses = {self.entryName1: True, self.entryName2: True, self.entryName3: True, self.entryName4: True, self.entryName5: True, self.entryName6: True, self.entryName7: True, self.entryName8: True, self.entryName9: True, self.entryName10: True, self.entryName11: True, self.entryName12: True}
self.privateStatus = {self.entryName1: True, self.entryName2: False, self.entryName3: False, self.entryName4: True, self.entryName5: False, self.entryName6: False, self.entryName7: True, self.entryName8: False, self.entryName9: False, self.entryName10: True, self.entryName11: False, self.entryName12: False}
self.publishedStatus = {self.entryName1: False, self.entryName2: False, self.entryName3: True, self.entryName4: False, self.entryName5: False, self.entryName6: True, self.entryName7: False, self.entryName8: False, self.entryName9: True, self.entryName10: False, self.entryName11: False, self.entryName12: True}
self.pendingStatus = {self.entryName1: False, self.entryName2: False, self.entryName3: True, self.entryName4: False, self.entryName5: False, self.entryName6: True, self.entryName7: False, self.entryName8: False, self.entryName9: True, self.entryName10: False, self.entryName11: False, self.entryName12: True}
self.rejectedStatus = {self.entryName1: False, self.entryName2: False, self.entryName3: True, self.entryName4: False, self.entryName5: False, self.entryName6: True, self.entryName7: False, self.entryName8: False, self.entryName9: True, self.entryName10: False, self.entryName11: False, self.entryName12: True}
self.unlistedStatus = {self.entryName1: False, self.entryName2: True, self.entryName3: False, self.entryName4: False, self.entryName5: True, self.entryName6: False, self.entryName7: False, self.entryName8: True, self.entryName9: False, self.entryName10: False, self.entryName11: True, self.entryName12: False}
##################### TEST STEPS - MAIN FLOW #####################
writeToLog("INFO","Step 1: Going to navigate to my media page")
if self.common.myMedia.navigateToMyMedia(forceNavigate=True) == False:
self.status = "Fail"
writeToLog("INFO","Step 1: FAILED to navigate to my media page")
return
writeToLog("INFO","Step 2: Going to make a search in my media page")
if self.common.myMedia.searchEntryMyMedia(self.entryName, forceNavigate=False, exactSearch=True) == False:
self.status = "Fail"
writeToLog("INFO","Step 2: FAILED to make a search in my media page")
return
writeToLog("INFO", "STEP 3 Going to filter My Media page entries by: " + enums.EntryPrivacyType.ALL_STATUSSES.value + "'")
if self.common.myMedia.SortAndFilter(enums.SortAndFilter.PUBLISH_STATUS, enums.EntryPrivacyType.ALL_STATUSSES) == False:
self.status = "Fail"
writeToLog("INFO", "STEP 3: FAILED to filter My Media page entries by '" + enums.EntryPrivacyType.ALL_STATUSSES.value + "'")
return
writeToLog("INFO", "STEP 4 Going to verify filter My Media page entries by: " + enums.EntryPrivacyType.ALL_STATUSSES.value + "'")
if self.common.myMedia.verifyFiltersInMyMedia(self.allStatuses) == False:
self.status = "Fail"
writeToLog("INFO", "STEP 4: FAILED to verify filter My Media page entries by '" + enums.EntryPrivacyType.ALL_STATUSSES.value + "'")
return
writeToLog("INFO", "STEP 5: Going to clear the filter search menu")
if self.common.myMedia.filterClearAllWhenOpened() == False:
self.status = "Fail"
writeToLog("INFO", "STEP 5: Failed to clear the search menu")
return
writeToLog("INFO", "STEP 6 Going to filter My Media page entries by: " + enums.EntryPrivacyType.PRIVATE.value + "'")
if self.common.myMedia.SortAndFilter(enums.SortAndFilter.PUBLISH_STATUS, enums.EntryPrivacyType.PRIVATE) == False:
self.status = "Fail"
writeToLog("INFO", "STEP 6: FAILED to filter My Media page entries by '" + enums.EntryPrivacyType.PRIVATE.value + "'")
return
writeToLog("INFO", "STEP 7 Going to verify filter My Media page entries by: " + enums.EntryPrivacyType.PRIVATE.value + "'")
if self.common.myMedia.verifyFiltersInMyMedia(self.privateStatus) == False:
self.status = "Fail"
writeToLog("INFO", "STEP 7: FAILED to verify filter My Media page entries by '" + enums.EntryPrivacyType.PRIVATE.value + "'")
return
writeToLog("INFO", "STEP 8: Going to clear the filter search menu")
if self.common.myMedia.filterClearAllWhenOpened() == False:
self.status = "Fail"
writeToLog("INFO", "STEP 8: Failed to clear the search menu")
return
writeToLog("INFO", "STEP 9 Going to filter My Media page entries by: " + enums.EntryPrivacyType.PUBLISHED.value + "'")
if self.common.myMedia.SortAndFilter(enums.SortAndFilter.PUBLISH_STATUS, enums.EntryPrivacyType.PUBLISHED) == False:
self.status = "Fail"
writeToLog("INFO", "STEP 9: FAILED to filter My Media page entries by '" + enums.EntryPrivacyType.PUBLISHED.value + "'")
return
writeToLog("INFO", "STEP 10: Going to verify filter My Media page entries by: " + enums.EntryPrivacyType.PUBLISHED.value + "'")
if self.common.myMedia.verifyFiltersInMyMedia(self.publishedStatus) == False:
self.status = "Fail"
writeToLog("INFO", "STEP 10: FAILED to verify filter My Media page entries by '" + enums.EntryPrivacyType.PUBLISHED.value + "'")
return
writeToLog("INFO", "STEP 11: Going to clear the filter search menu")
if self.common.myMedia.filterClearAllWhenOpened() == False:
self.status = "Fail"
writeToLog("INFO", "STEP 11: Failed to clear the search menu")
return
writeToLog("INFO", "STEP 12: Going to filter My Media page entries by: " + enums.EntryPrivacyType.PENDING.value + "'")
if self.common.myMedia.SortAndFilter(enums.SortAndFilter.PUBLISH_STATUS, enums.EntryPrivacyType.PENDING) == False:
self.status = "Fail"
writeToLog("INFO", "STEP 12: FAILED to filter My Media page entries by '" + enums.EntryPrivacyType.PENDING.value + "'")
return
writeToLog("INFO", "STEP 13: Going to verify filter My Media page entries by: " + enums.EntryPrivacyType.PENDING.value + "'")
if self.common.myMedia.verifyFiltersInMyMedia(self.pendingStatus) == False:
self.status = "Fail"
writeToLog("INFO", "STEP 13: FAILED to verify filter My Media page entries by '" + enums.EntryPrivacyType.PENDING.value + "'")
return
writeToLog("INFO", "STEP 14: Going to clear the filter search menu")
if self.common.myMedia.filterClearAllWhenOpened() == False:
self.status = "Fail"
writeToLog("INFO", "STEP 14: Failed to clear the search menu")
return
writeToLog("INFO", "STEP 15: Going to filter My Media page entries by: " + enums.EntryPrivacyType.REJECTED.value + "'")
if self.common.myMedia.SortAndFilter(enums.SortAndFilter.PUBLISH_STATUS, enums.EntryPrivacyType.REJECTED) == False:
self.status = "Fail"
writeToLog("INFO", "STEP 15: FAILED to filter My Media page entries by '" + enums.EntryPrivacyType.REJECTED.value + "'")
return
writeToLog("INFO", "STEP 16: Going to verify filter My Media page entries by: " + enums.EntryPrivacyType.REJECTED.value + "'")
if self.common.myMedia.verifyFiltersInMyMedia(self.rejectedStatus) == False:
self.status = "Fail"
writeToLog("INFO", "STEP 16: FAILED to verify filter My Media page entries by '" + enums.EntryPrivacyType.REJECTED.value + "'")
return
writeToLog("INFO", "STEP 17: Going to clear the filter search menu")
if self.common.myMedia.filterClearAllWhenOpened() == False:
self.status = "Fail"
writeToLog("INFO", "STEP 17: Failed to clear the search menu")
return
writeToLog("INFO", "STEP 18: Going to filter My Media page entries by: " + enums.EntryPrivacyType.UNLISTED.value + "'")
if self.common.myMedia.SortAndFilter(enums.SortAndFilter.PUBLISH_STATUS, enums.EntryPrivacyType.UNLISTED) == False:
self.status = "Fail"
writeToLog("INFO", "STEP 18: FAILED to filter My Media page entries by '" + enums.EntryPrivacyType.UNLISTED.value + "'")
return
writeToLog("INFO", "STEP 19: Going to verify filter My Media page entries by: " + enums.EntryPrivacyType.UNLISTED.value + "'")
if self.common.myMedia.verifyFiltersInMyMedia(self.unlistedStatus) == False:
self.status = "Fail"
writeToLog("INFO", "STEP 19: FAILED to verify filter My Media page entries by '" + enums.EntryPrivacyType.UNLISTED.value + "'")
return
writeToLog("INFO", "STEP 20: Going to clear the filter search menu")
if self.common.myMedia.filterClearAllWhenOpened() == False:
self.status = "Fail"
writeToLog("INFO", "STEP 20: Failed to clear the search menu")
return
##################################################################
writeToLog("INFO","TEST PASSED: All the entries are properly displayed in My Media page while using filter by publish status")
# if an exception happened we need to handle it and fail the test
except Exception as inst:
self.status = clsTestService.handleException(self,inst,self.startTime)
########################### TEST TEARDOWN ###########################
def teardown_method(self,method):
try:
self.common.handleTestFail(self.status)
writeToLog("INFO","**************** Starting: teardown_method ****************")
writeToLog("INFO","**************** Ended: teardown_method *******************")
except:
pass
clsTestService.basicTearDown(self)
#write to log we finished the test
logFinishedTest(self,self.startTime)
assert (self.status == "Pass")
pytest.main('test_' + testNum + '.py --tb=line')
|
[
"[email protected]"
] | |
fc8906480f952922d83cf3d060c29ff2fd9f1753
|
ee96ec6e09b0cc1af28ec7b77808eb4fa6611ca8
|
/components/collector/tests/source_collectors/cobertura_jenkins_plugin/test_uncovered_lines.py
|
ba7a4b2fc18da91ca3b88bffa86002a81af0639d
|
[
"Apache-2.0"
] |
permissive
|
Erik-Stel/quality-time
|
eb1b8db2022a91f06fc0edfc966dbec7a972b88c
|
602b6970e5d9088cb89cc6d488337349e54e1c9a
|
refs/heads/master
| 2023-03-28T13:22:11.043108 | 2021-03-18T14:27:18 | 2021-03-18T14:27:18 | 269,277,099 | 0 | 0 |
Apache-2.0
| 2021-03-18T14:20:21 | 2020-06-04T06:20:28 |
Python
|
UTF-8
|
Python
| false | false | 753 |
py
|
"""Unit tests for the Cobertura Jenkins plugin uncovered lines collector."""
from .base import CoberturaJenkinsPluginTestCase
class CoberturaJenkinsPluginUncoveredLinesTest(CoberturaJenkinsPluginTestCase):
"""Unit tests for the Cobertura Jenkins plugin uncovered lines collector."""
METRIC_TYPE = "uncovered_lines"
COBERTURA_JENKINS_PLUGIN_JSON = dict(results=dict(elements=[dict(denominator=15, numerator=13, name="Lines")]))
async def test_uncovered_lines(self):
"""Test that the number of uncovered lines and the total number of lines are returned."""
response = await self.collect(get_request_json_return_value=self.COBERTURA_JENKINS_PLUGIN_JSON)
self.assert_measurement(response, value="2", total="15")
|
[
"[email protected]"
] | |
a651609a35436a53f6a614b63ab7428a107075b2
|
82b946da326148a3c1c1f687f96c0da165bb2c15
|
/sdk/python/pulumi_azure_native/notificationhubs/v20160301/list_notification_hub_keys.py
|
2962a60d0ff4849a1f70790baa8413ed3a56e586
|
[
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
morrell/pulumi-azure-native
|
3916e978382366607f3df0a669f24cb16293ff5e
|
cd3ba4b9cb08c5e1df7674c1c71695b80e443f08
|
refs/heads/master
| 2023-06-20T19:37:05.414924 | 2021-07-19T20:57:53 | 2021-07-19T20:57:53 | 387,815,163 | 0 | 0 |
Apache-2.0
| 2021-07-20T14:18:29 | 2021-07-20T14:18:28 | null |
UTF-8
|
Python
| false | false | 5,190 |
py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'ListNotificationHubKeysResult',
'AwaitableListNotificationHubKeysResult',
'list_notification_hub_keys',
]
@pulumi.output_type
class ListNotificationHubKeysResult:
"""
Namespace/NotificationHub Connection String
"""
def __init__(__self__, key_name=None, primary_connection_string=None, primary_key=None, secondary_connection_string=None, secondary_key=None):
if key_name and not isinstance(key_name, str):
raise TypeError("Expected argument 'key_name' to be a str")
pulumi.set(__self__, "key_name", key_name)
if primary_connection_string and not isinstance(primary_connection_string, str):
raise TypeError("Expected argument 'primary_connection_string' to be a str")
pulumi.set(__self__, "primary_connection_string", primary_connection_string)
if primary_key and not isinstance(primary_key, str):
raise TypeError("Expected argument 'primary_key' to be a str")
pulumi.set(__self__, "primary_key", primary_key)
if secondary_connection_string and not isinstance(secondary_connection_string, str):
raise TypeError("Expected argument 'secondary_connection_string' to be a str")
pulumi.set(__self__, "secondary_connection_string", secondary_connection_string)
if secondary_key and not isinstance(secondary_key, str):
raise TypeError("Expected argument 'secondary_key' to be a str")
pulumi.set(__self__, "secondary_key", secondary_key)
@property
@pulumi.getter(name="keyName")
def key_name(self) -> Optional[str]:
"""
KeyName of the created AuthorizationRule
"""
return pulumi.get(self, "key_name")
@property
@pulumi.getter(name="primaryConnectionString")
def primary_connection_string(self) -> Optional[str]:
"""
PrimaryConnectionString of the AuthorizationRule.
"""
return pulumi.get(self, "primary_connection_string")
@property
@pulumi.getter(name="primaryKey")
def primary_key(self) -> Optional[str]:
"""
PrimaryKey of the created AuthorizationRule.
"""
return pulumi.get(self, "primary_key")
@property
@pulumi.getter(name="secondaryConnectionString")
def secondary_connection_string(self) -> Optional[str]:
"""
SecondaryConnectionString of the created AuthorizationRule
"""
return pulumi.get(self, "secondary_connection_string")
@property
@pulumi.getter(name="secondaryKey")
def secondary_key(self) -> Optional[str]:
"""
SecondaryKey of the created AuthorizationRule
"""
return pulumi.get(self, "secondary_key")
class AwaitableListNotificationHubKeysResult(ListNotificationHubKeysResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return ListNotificationHubKeysResult(
key_name=self.key_name,
primary_connection_string=self.primary_connection_string,
primary_key=self.primary_key,
secondary_connection_string=self.secondary_connection_string,
secondary_key=self.secondary_key)
def list_notification_hub_keys(authorization_rule_name: Optional[str] = None,
namespace_name: Optional[str] = None,
notification_hub_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableListNotificationHubKeysResult:
"""
Namespace/NotificationHub Connection String
:param str authorization_rule_name: The connection string of the NotificationHub for the specified authorizationRule.
:param str namespace_name: The namespace name.
:param str notification_hub_name: The notification hub name.
:param str resource_group_name: The name of the resource group.
"""
__args__ = dict()
__args__['authorizationRuleName'] = authorization_rule_name
__args__['namespaceName'] = namespace_name
__args__['notificationHubName'] = notification_hub_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:notificationhubs/v20160301:listNotificationHubKeys', __args__, opts=opts, typ=ListNotificationHubKeysResult).value
return AwaitableListNotificationHubKeysResult(
key_name=__ret__.key_name,
primary_connection_string=__ret__.primary_connection_string,
primary_key=__ret__.primary_key,
secondary_connection_string=__ret__.secondary_connection_string,
secondary_key=__ret__.secondary_key)
|
[
"[email protected]"
] | |
611bd7353b0fbdc7b5663cd846967b689d762292
|
8b606215d26314c046b24f779cad1d29679de73f
|
/GroundSegment/GroundSegment/models/__init__.py
|
67a0a2722a371aa69c721afc1836ade5d5fc417b
|
[] |
no_license
|
unlamgidsa/unlam_gs_backend
|
b5a76660458fd43557602840eb24f838aabc4ce2
|
6284a5d55b8fe3b5b7c8f3a8def505409f7ea735
|
refs/heads/master
| 2023-07-20T10:30:30.618320 | 2022-12-29T21:55:51 | 2022-12-29T21:55:51 | 244,700,010 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,486 |
py
|
from GroundSegment.models.Satellite import Satellite
from GroundSegment.models.Tle import Tle
from GroundSegment.models.Parameter import Parameter
from GroundSegment.models.SatelliteState import SatelliteState
from GroundSegment.models.Propagation import Propagation
from GroundSegment.models.PropagationDetail import PropagationDetail
from GroundSegment.models.Pasada import Pasada
#from GroundSegment.models.Site import Site
from GroundSegment.models.Notification.Notification import Notification
from GroundSegment.models.Notification.Contact import Contact
from GroundSegment.models.Notification.MessageTemplate import MessageTemplate
from GroundSegment.models.Notification.NotificationType import NotificationType
from GroundSegment.models.Notification.AlarmTypeNotificationType import AlarmTypeNotificationType
from GroundSegment.models.SubSystem import SubSystem
from GroundSegment.models.Log import Log
from GroundSegment.models.Watchdog import Watchdog
from GroundSegment.models.DCPData import DCPData
from GroundSegment.models.DCPPlatform import DCPPlatform
from GroundSegment.models.Alarm import *
from GroundSegment.models.DownlinkFrame import DownlinkFrame
from GroundSegment.models.Country import Country
from GroundSegment.models.State import State
from GroundSegment.models.PassGeneration import PassGeneration
from GroundSegment.models.Eclipse import Eclipse
from GroundSegment.models.TrackPoint import TrackPoint
from GroundSegment.models.UserItem import UserItem
|
[
"[email protected]"
] | |
9fe6dbd17637c2e9944165ad8b3a493d8a867030
|
a4681043cb56a9ab45be32a62fa9700b391f087f
|
/Exercícios/ex_txt01.py
|
2d55dd693973772b33747357a92538abd6f9af71
|
[] |
no_license
|
MarceloDL-A/Python
|
b16b221ae4355b6323092d069bf83d1d142b9975
|
c091446ae0089f03ffbdc47b3a6901f4fa2a25fb
|
refs/heads/main
| 2023-01-01T02:29:31.591861 | 2020-10-27T19:04:11 | 2020-10-27T19:04:11 | 301,565,957 | 0 | 0 | null | 2020-10-27T19:04:12 | 2020-10-05T23:41:30 |
Python
|
UTF-8
|
Python
| false | false | 277 |
py
|
valores_celulares = [850, 2230, 150, 3500, 5000]
with open('valores_celulares.txt', 'a') as arquivo:
for valor in valores_celulares:
arquivo.write(str(valor) + '\n')
with open('valores_celulares.txt', 'r') as arquivo:
for valor in arquivo:
print(valor)
|
[
"[email protected]"
] | |
e2e9ef0272dab73fa72d9f8bf0360e7edffc4e5b
|
b9e6499ab7431a2dd514fa97e7ee99dfe4c1ef3f
|
/corehq/apps/reports/filters/fixtures.py
|
b1d405cf5fe618a016ff744ca772b08d95b379fc
|
[] |
no_license
|
kennknowles/commcare-hq
|
9bcae8301b5888b4aaf374b684a7670c2f6fa0e7
|
b1b894f4cb4a266b2dff7598cf9a9ae295bfa671
|
refs/heads/master
| 2023-08-16T05:50:31.822089 | 2013-09-03T22:23:28 | 2013-09-03T22:24:56 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,884 |
py
|
import json
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_noop
from corehq.apps.fixtures.models import FixtureDataType, FixtureDataItem
from corehq.apps.locations.util import load_locs_json, location_hierarchy_config
from corehq.apps.reports.filters.base import BaseReportFilter
class AsyncDrillableFilter(BaseReportFilter):
# todo: add documentation
# todo: cleanup template
"""
example_hierarchy = [{"type": "state", "display": "name"},
{"type": "district", "parent_ref": "state_id", "references": "id", "display": "name"},
{"type": "block", "parent_ref": "district_id", "references": "id", "display": "name"},
{"type": "village", "parent_ref": "block_id", "references": "id", "display": "name"}]
"""
template = "reports/filters/drillable_async.html"
hierarchy = [] # a list of fixture data type names that representing different levels of the hierarchy. Starting with the root
def fdi_to_json(self, fdi):
return {
'fixture_type': fdi.data_type_id,
'fields': fdi.fields,
'id': fdi.get_id,
'children': getattr(fdi, '_children', None),
}
fdts = {}
def data_types(self, index=None):
if not self.fdts:
self.fdts = [FixtureDataType.by_domain_tag(self.domain, h["type"]).one() for h in self.hierarchy]
return self.fdts if index is None else self.fdts[index]
@property
def api_root(self):
return reverse('api_dispatch_list', kwargs={'domain': self.domain,
'resource_name': 'fixture',
'api_name': 'v0.1'})
@property
def full_hierarchy(self):
ret = []
for i, h in enumerate(self.hierarchy):
new_h = dict(h)
new_h['id'] = self.data_types(i).get_id
ret.append(new_h)
return ret
def generate_lineage(self, leaf_type, leaf_item_id):
leaf_fdi = FixtureDataItem.get(leaf_item_id)
for i, h in enumerate(self.hierarchy[::-1]):
if h["type"] == leaf_type:
index = i
lineage = [leaf_fdi]
for i, h in enumerate(self.full_hierarchy[::-1]):
if i < index or i >= len(self.hierarchy)-1: continue
real_index = len(self.hierarchy) - (i+1)
lineage.insert(0, FixtureDataItem.by_field_value(self.domain, self.data_types(real_index - 1),
h["references"], lineage[0].fields[h["parent_ref"]]).one())
return lineage
@property
def filter_context(self):
root_fdis = [self.fdi_to_json(f) for f in FixtureDataItem.by_data_type(self.domain, self.data_types(0).get_id)]
f_id = self.request.GET.get('fixture_id', None)
selected_fdi_type = f_id.split(':')[0] if f_id else None
selected_fdi_id = f_id.split(':')[1] if f_id else None
if selected_fdi_id:
index = 0
lineage = self.generate_lineage(selected_fdi_type, selected_fdi_id)
parent = {'children': root_fdis}
for i, fdi in enumerate(lineage[:-1]):
this_fdi = [f for f in parent['children'] if f['id'] == fdi.get_id][0]
next_h = self.hierarchy[i+1]
this_fdi['children'] = [self.fdi_to_json(f) for f in FixtureDataItem.by_field_value(self.domain,
self.data_types(i+1), next_h["parent_ref"], fdi.fields[next_h["references"]])]
parent = this_fdi
return {
'api_root': self.api_root,
'control_name': self.label,
'control_slug': self.slug,
'selected_fdi_id': selected_fdi_id,
'fdis': json.dumps(root_fdis),
'hierarchy': self.full_hierarchy
}
class AsyncLocationFilter(BaseReportFilter):
# todo: cleanup template
label = ugettext_noop("Location")
slug = "location_async"
template = "reports/filters/location_async.html"
@property
def filter_context(self):
api_root = reverse('api_dispatch_list', kwargs={'domain': self.domain,
'resource_name': 'location',
'api_name': 'v0.3'})
selected_loc_id = self.request.GET.get('location_id')
return {
'api_root': api_root,
'control_name': self.label, # todo: cleanup, don't follow this structure
'control_slug': self.slug, # todo: cleanup, don't follow this structure
'loc_id': selected_loc_id,
'locations': json.dumps(load_locs_json(self.domain, selected_loc_id)),
'hierarchy': location_hierarchy_config(self.domain),
}
|
[
"[email protected]"
] | |
bc8c2fe0c88ac4e6708e0ca76ad3267aa79466b7
|
32cf9c3099c36a46804e393dd1491a8954f50263
|
/1학기 cloud9 총정리/django_intro/workspace/.c9/metadata/workspace/workshop_test/myapp/models.py
|
9e07b03671da2d9a7f87d8d5dbb7b579bef71f19
|
[] |
no_license
|
ash92kr/s_code
|
ce3bda6a403600892750e181dca5ed8c4caebcb1
|
92eace551d132b91ee91db6c0afd38b93f9b647b
|
refs/heads/master
| 2020-04-12T00:27:07.043091 | 2019-05-21T08:17:39 | 2019-05-21T08:17:39 | 162,200,447 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 9,238 |
py
|
{"filter":false,"title":"models.py","tooltip":"/workshop_test/myapp/models.py","undoManager":{"mark":27,"position":27,"stack":[[{"start":{"row":3,"column":0},"end":{"row":3,"column":1},"action":"insert","lines":["c"],"id":2},{"start":{"row":3,"column":1},"end":{"row":3,"column":2},"action":"insert","lines":["l"]},{"start":{"row":3,"column":2},"end":{"row":3,"column":3},"action":"insert","lines":["a"]},{"start":{"row":3,"column":3},"end":{"row":3,"column":4},"action":"insert","lines":["s"]},{"start":{"row":3,"column":4},"end":{"row":3,"column":5},"action":"insert","lines":["s"]}],[{"start":{"row":3,"column":5},"end":{"row":3,"column":6},"action":"insert","lines":[" "],"id":3},{"start":{"row":3,"column":6},"end":{"row":3,"column":7},"action":"insert","lines":["M"]},{"start":{"row":3,"column":7},"end":{"row":3,"column":8},"action":"insert","lines":["u"]},{"start":{"row":3,"column":8},"end":{"row":3,"column":9},"action":"insert","lines":["s"]},{"start":{"row":3,"column":9},"end":{"row":3,"column":10},"action":"insert","lines":["i"]},{"start":{"row":3,"column":10},"end":{"row":3,"column":11},"action":"insert","lines":["c"]},{"start":{"row":3,"column":11},"end":{"row":3,"column":12},"action":"insert","lines":["i"]}],[{"start":{"row":3,"column":12},"end":{"row":3,"column":13},"action":"insert","lines":["a"],"id":4},{"start":{"row":3,"column":13},"end":{"row":3,"column":14},"action":"insert","lines":["n"]}],[{"start":{"row":3,"column":14},"end":{"row":3,"column":16},"action":"insert","lines":["()"],"id":5}],[{"start":{"row":3,"column":15},"end":{"row":3,"column":16},"action":"insert","lines":["m"],"id":6},{"start":{"row":3,"column":16},"end":{"row":3,"column":17},"action":"insert","lines":["o"]},{"start":{"row":3,"column":17},"end":{"row":3,"column":18},"action":"insert","lines":["d"]},{"start":{"row":3,"column":18},"end":{"row":3,"column":19},"action":"insert","lines":["e"]},{"start":{"row":3,"column":19},"end":{"row":3,"column":20},"action":"insert","lines":["l"]},{"start":{"row":3,"column":20},"end":{"row":3,"column":21},"action":"insert","lines":["s"]},{"start":{"row":3,"column":21},"end":{"row":3,"column":22},"action":"insert","lines":["."]},{"start":{"row":3,"column":22},"end":{"row":3,"column":23},"action":"insert","lines":["M"]},{"start":{"row":3,"column":23},"end":{"row":3,"column":24},"action":"insert","lines":["o"]},{"start":{"row":3,"column":24},"end":{"row":3,"column":25},"action":"insert","lines":["d"]},{"start":{"row":3,"column":25},"end":{"row":3,"column":26},"action":"insert","lines":["e"]}],[{"start":{"row":3,"column":26},"end":{"row":3,"column":27},"action":"insert","lines":["l"],"id":7}],[{"start":{"row":3,"column":28},"end":{"row":3,"column":29},"action":"insert","lines":[":"],"id":8}],[{"start":{"row":3,"column":29},"end":{"row":4,"column":0},"action":"insert","lines":["",""],"id":9},{"start":{"row":4,"column":0},"end":{"row":4,"column":4},"action":"insert","lines":[" "]},{"start":{"row":4,"column":4},"end":{"row":4,"column":5},"action":"insert","lines":["f"]},{"start":{"row":4,"column":5},"end":{"row":4,"column":6},"action":"insert","lines":["i"]},{"start":{"row":4,"column":6},"end":{"row":4,"column":7},"action":"insert","lines":["r"]}],[{"start":{"row":4,"column":7},"end":{"row":4,"column":8},"action":"insert","lines":["s"],"id":10},{"start":{"row":4,"column":8},"end":{"row":4,"column":9},"action":"insert","lines":["t"]},{"start":{"row":4,"column":9},"end":{"row":4,"column":10},"action":"insert","lines":["_"]},{"start":{"row":4,"column":10},"end":{"row":4,"column":11},"action":"insert","lines":["n"]},{"start":{"row":4,"column":11},"end":{"row":4,"column":12},"action":"insert","lines":["a"]},{"start":{"row":4,"column":12},"end":{"row":4,"column":13},"action":"insert","lines":["m"]},{"start":{"row":4,"column":13},"end":{"row":4,"column":14},"action":"insert","lines":["e"]}],[{"start":{"row":4,"column":14},"end":{"row":4,"column":15},"action":"insert","lines":[" "],"id":11},{"start":{"row":4,"column":15},"end":{"row":4,"column":16},"action":"insert","lines":["="]}],[{"start":{"row":4,"column":16},"end":{"row":4,"column":17},"action":"insert","lines":[" "],"id":12},{"start":{"row":4,"column":17},"end":{"row":4,"column":18},"action":"insert","lines":["m"]},{"start":{"row":4,"column":18},"end":{"row":4,"column":19},"action":"insert","lines":["o"]},{"start":{"row":4,"column":19},"end":{"row":4,"column":20},"action":"insert","lines":["d"]},{"start":{"row":4,"column":20},"end":{"row":4,"column":21},"action":"insert","lines":["e"]},{"start":{"row":4,"column":21},"end":{"row":4,"column":22},"action":"insert","lines":["l"]},{"start":{"row":4,"column":22},"end":{"row":4,"column":23},"action":"insert","lines":["s"]},{"start":{"row":4,"column":23},"end":{"row":4,"column":24},"action":"insert","lines":["."]},{"start":{"row":4,"column":24},"end":{"row":4,"column":25},"action":"insert","lines":["T"]}],[{"start":{"row":4,"column":25},"end":{"row":4,"column":26},"action":"insert","lines":["e"],"id":13},{"start":{"row":4,"column":26},"end":{"row":4,"column":27},"action":"insert","lines":["s"]}],[{"start":{"row":4,"column":26},"end":{"row":4,"column":27},"action":"remove","lines":["s"],"id":14}],[{"start":{"row":4,"column":26},"end":{"row":4,"column":27},"action":"insert","lines":["x"],"id":15},{"start":{"row":4,"column":27},"end":{"row":4,"column":28},"action":"insert","lines":["t"]},{"start":{"row":4,"column":28},"end":{"row":4,"column":29},"action":"insert","lines":["F"]},{"start":{"row":4,"column":29},"end":{"row":4,"column":30},"action":"insert","lines":["i"]},{"start":{"row":4,"column":30},"end":{"row":4,"column":31},"action":"insert","lines":["e"]},{"start":{"row":4,"column":31},"end":{"row":4,"column":32},"action":"insert","lines":["l"]},{"start":{"row":4,"column":32},"end":{"row":4,"column":33},"action":"insert","lines":["d"]}],[{"start":{"row":4,"column":33},"end":{"row":4,"column":35},"action":"insert","lines":["()"],"id":16}],[{"start":{"row":4,"column":35},"end":{"row":5,"column":0},"action":"insert","lines":["",""],"id":17},{"start":{"row":5,"column":0},"end":{"row":5,"column":4},"action":"insert","lines":[" "]},{"start":{"row":5,"column":4},"end":{"row":5,"column":5},"action":"insert","lines":["l"]},{"start":{"row":5,"column":5},"end":{"row":5,"column":6},"action":"insert","lines":["a"]},{"start":{"row":5,"column":6},"end":{"row":5,"column":7},"action":"insert","lines":["s"]},{"start":{"row":5,"column":7},"end":{"row":5,"column":8},"action":"insert","lines":["t"]},{"start":{"row":5,"column":8},"end":{"row":5,"column":9},"action":"insert","lines":["_"]},{"start":{"row":5,"column":9},"end":{"row":5,"column":10},"action":"insert","lines":["n"]},{"start":{"row":5,"column":10},"end":{"row":5,"column":11},"action":"insert","lines":["a"]},{"start":{"row":5,"column":11},"end":{"row":5,"column":12},"action":"insert","lines":["m"]},{"start":{"row":5,"column":12},"end":{"row":5,"column":13},"action":"insert","lines":["e"]}],[{"start":{"row":5,"column":13},"end":{"row":5,"column":14},"action":"insert","lines":[" "],"id":18},{"start":{"row":5,"column":14},"end":{"row":5,"column":15},"action":"insert","lines":["="]}],[{"start":{"row":5,"column":15},"end":{"row":5,"column":16},"action":"insert","lines":[" "],"id":19},{"start":{"row":5,"column":16},"end":{"row":5,"column":17},"action":"insert","lines":["m"]},{"start":{"row":5,"column":17},"end":{"row":5,"column":18},"action":"insert","lines":["o"]}],[{"start":{"row":5,"column":16},"end":{"row":5,"column":18},"action":"remove","lines":["mo"],"id":20},{"start":{"row":5,"column":16},"end":{"row":5,"column":22},"action":"insert","lines":["models"]}],[{"start":{"row":5,"column":22},"end":{"row":5,"column":23},"action":"insert","lines":["."],"id":21},{"start":{"row":5,"column":23},"end":{"row":5,"column":24},"action":"insert","lines":["T"]},{"start":{"row":5,"column":24},"end":{"row":5,"column":25},"action":"insert","lines":["e"]},{"start":{"row":5,"column":25},"end":{"row":5,"column":26},"action":"insert","lines":["x"]}],[{"start":{"row":5,"column":23},"end":{"row":5,"column":26},"action":"remove","lines":["Tex"],"id":22},{"start":{"row":5,"column":23},"end":{"row":5,"column":32},"action":"insert","lines":["TextField"]}],[{"start":{"row":5,"column":32},"end":{"row":5,"column":34},"action":"insert","lines":["()"],"id":23}],[{"start":{"row":3,"column":0},"end":{"row":3,"column":2},"action":"insert","lines":["# "],"id":24}],[{"start":{"row":4,"column":4},"end":{"row":4,"column":6},"action":"insert","lines":["# "],"id":25}],[{"start":{"row":5,"column":4},"end":{"row":5,"column":6},"action":"insert","lines":["# "],"id":26}],[{"start":{"row":3,"column":0},"end":{"row":3,"column":2},"action":"remove","lines":["# "],"id":27}],[{"start":{"row":4,"column":4},"end":{"row":4,"column":6},"action":"remove","lines":["# "],"id":28}],[{"start":{"row":5,"column":4},"end":{"row":5,"column":6},"action":"remove","lines":["# "],"id":29}]]},"ace":{"folds":[],"scrolltop":0,"scrollleft":0,"selection":{"start":{"row":5,"column":29},"end":{"row":5,"column":29},"isBackwards":false},"options":{"guessTabSize":true,"useWrapMode":false,"wrapToView":true},"firstLineState":0},"timestamp":1556169646343,"hash":"7e29388dfa23c1bca3ba6e399f9ffbd3928e60cd"}
|
[
"[email protected]"
] | |
e78104325761a22e41990587c6657fa0264a1470
|
117f066c80f3863ebef74463292bca6444f9758a
|
/finnhub_swagger_api/test/test_economic_event.py
|
0680dd1b78155cbf3a758cdb20d37aad4454646f
|
[] |
no_license
|
cottrell/notebooks
|
c6de3842cbaeb71457d270cbe6fabc8695a6ee1b
|
9eaf3d0500067fccb294d064ab78d7aaa03e8b4d
|
refs/heads/master
| 2023-08-09T22:41:01.996938 | 2023-08-04T22:41:51 | 2023-08-04T22:41:51 | 26,830,272 | 3 | 1 | null | 2023-03-04T03:58:03 | 2014-11-18T21:14:23 |
Python
|
UTF-8
|
Python
| false | false | 945 |
py
|
# coding: utf-8
"""
Finnhub API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import finnhub_swagger_api
from finnhub_swagger_api.models.economic_event import EconomicEvent # noqa: E501
from finnhub_swagger_api.rest import ApiException
class TestEconomicEvent(unittest.TestCase):
"""EconomicEvent unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testEconomicEvent(self):
"""Test EconomicEvent"""
# FIXME: construct object with mandatory attributes with example values
# model = finnhub_swagger_api.models.economic_event.EconomicEvent() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
e2ffbcc78019f39efbb8a5987a61e52a6a20f3a7
|
5bc369d49b16bc46e23b76621144223dc4226997
|
/model/backboneelement.py
|
758f0a7a409f6cfe67910f0eba872570df4ea431
|
[
"MIT"
] |
permissive
|
beda-software/fhir-py-experements
|
90d8e802f92f9e691d47d6ea4b33fda47957383a
|
363cfb894fa6f971b9be19340cae1b0a3a4377d8
|
refs/heads/master
| 2022-12-17T05:19:59.294901 | 2020-02-26T03:54:13 | 2020-02-26T03:54:13 | 241,292,789 | 0 | 0 |
MIT
| 2022-12-08T03:38:55 | 2020-02-18T06:53:02 |
Python
|
UTF-8
|
Python
| false | false | 610 |
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 4.0.1-9346c8cc45 (http://hl7.org/fhir/StructureDefinition/BackboneElement) on 2020-02-03.
# 2020, SMART Health IT.
import sys
from dataclasses import dataclass, field
from typing import ClassVar, Optional, List
from .element import Element
@dataclass
class BackboneElement(Element):
""" Base for elements defined inside a resource.
Base definition for all elements that are defined inside a resource - but
not those in a data type.
"""
resource_type: ClassVar[str] = "BackboneElement"
modifierExtension = None
|
[
"[email protected]"
] | |
222d5f8b367a6c13cd3e6b528fbb411a5aabb6af
|
d768f07ed90c0274e2d9d935eaf5ecfe734a1f56
|
/export_analysis_data.py
|
a6837acdf7c614c00fc7a18f92724da0cf03ffb4
|
[] |
no_license
|
bvillasen/simulation_analysis
|
cfd0b5de865d2fb5992d828b2824079e6798774b
|
645f0c397172ed30a713368942eec9ca68a9761a
|
refs/heads/master
| 2023-06-02T19:06:39.851760 | 2021-06-25T18:40:58 | 2021-06-25T18:40:58 | 298,894,454 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,053 |
py
|
import os, sys
import numpy as np
import pickle
sys.path.append('tools')
from tools import *
#Append analysis directories to path
extend_path()
from parameters_UVB_rates import param_UVB_Rates
from simulation_grid import Simulation_Grid
from simulation_parameters import *
from plot_UVB_Rates import Plot_Grid_UVB_Rates
create_directory( root_dir )
create_directory( figures_dir )
SG = Simulation_Grid( parameters=param_UVB_Rates, sim_params=sim_params, job_params=job_params, dir=root_dir )
SG.Get_Grid_Status( check_queue=False )
SG.Load_Grid_Analysis_Data( )
sim_ids = SG.sim_ids
data_out = {}
for sim_id in sim_ids:
data_out[sim_id] = {}
data_out[sim_id]['z'] = SG.Grid[sim_id]['analysis']['z']
data_out[sim_id]['T0'] = SG.Grid[sim_id]['analysis']['T0']
data_out[sim_id]['F_mean'] = SG.Grid[sim_id]['analysis']['F_mean']
data_out[sim_id]['parameters'] = SG.Grid[sim_id]['parameters']
out_file_name = root_dir + 'scale_H_T0.pkl'
f = open( out_file_name, "wb")
pickle.dump( data_out, f)
print ( f'Saved File: {out_file_name}' )
|
[
"[email protected]"
] | |
bdb5ac7c0f0dce9ca493553586d860621320d1fa
|
793d8e06dd50b9e211833f962ac1d10fd425a1df
|
/tests/test_component.py
|
981987983cbf7af2fae75115e0c7f8fab43c5b0a
|
[
"Apache-2.0"
] |
permissive
|
iqduke/fruit
|
7e715dae9e9dd56e7cd9cce8bba7de3504659ed4
|
56c39c6eae1312a2e71052602c21079de08b57d8
|
refs/heads/master
| 2021-01-12T04:19:35.897109 | 2016-12-27T10:09:08 | 2016-12-27T10:09:08 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,434 |
py
|
#!/usr/bin/env python3
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nose2.tools import params
from fruit_test_common import *
COMMON_DEFINITIONS = '''
#include "test_common.h"
struct X;
struct Annotation1 {};
using XAnnot1 = fruit::Annotated<Annotation1, X>;
struct Annotation2 {};
using XAnnot2 = fruit::Annotated<Annotation2, X>;
'''
@params(
('X', 'X*'),
('fruit::Annotated<Annotation1, X>', 'fruit::Annotated<Annotation1, X*>'))
def test_component_conversion(XAnnot, XPtrAnnot):
source = '''
struct X {
using Inject = X();
};
fruit::Component<> getComponent() {
return fruit::createComponent();
}
fruit::Component<XAnnot> getXComponent() {
return getComponent();
}
int main() {
fruit::Component<XAnnot> component = getXComponent();
fruit::Injector<XAnnot> injector(component);
injector.get<XPtrAnnot>();
}
'''
expect_success(
COMMON_DEFINITIONS,
source,
locals())
@params('X', 'fruit::Annotated<Annotation1, X>')
def test_copy(XAnnot):
source = '''
struct X {
using Inject = X();
};
fruit::Component<XAnnot> getComponent() {
fruit::Component<XAnnot> c = fruit::createComponent();
fruit::Component<XAnnot> copy = c;
return copy;
}
int main() {
fruit::Component<XAnnot> component = getComponent();
fruit::Injector<XAnnot> injector(component);
injector.get<XAnnot>();
}
'''
expect_success(
COMMON_DEFINITIONS,
source,
locals())
@params('X*', 'fruit::Annotated<Annotation1, X*>')
def test_error_non_class_type(XPtrAnnot):
source = '''
struct X {};
InstantiateType(fruit::Component<XPtrAnnot>)
'''
expect_compile_error(
'NonClassTypeError<X\*,X>',
'A non-class type T was specified. Use C instead.',
COMMON_DEFINITIONS,
source,
locals())
@params('X', 'fruit::Annotated<Annotation1, X>')
def test_error_repeated_type(XAnnot):
source = '''
struct X {};
InstantiateType(fruit::Component<XAnnot, XAnnot>)
'''
expect_compile_error(
'RepeatedTypesError<XAnnot, XAnnot>',
'A type was specified more than once.',
COMMON_DEFINITIONS,
source,
locals())
def test_repeated_type_with_different_annotation_ok():
source = '''
struct X {};
InstantiateType(fruit::Component<XAnnot1, XAnnot2>)
'''
expect_success(
COMMON_DEFINITIONS,
source)
@params('X', 'fruit::Annotated<Annotation1, X>')
def test_error_type_required_and_provided(XAnnot):
source = '''
struct X {};
InstantiateType(fruit::Component<fruit::Required<XAnnot>, XAnnot>)
'''
expect_compile_error(
'RepeatedTypesError<XAnnot, XAnnot>',
'A type was specified more than once.',
COMMON_DEFINITIONS,
source,
locals())
def test_type_required_and_provided_with_different_annotations_ok():
source = '''
struct X {};
InstantiateType(fruit::Component<fruit::Required<XAnnot1>, XAnnot2>)
'''
expect_success(
COMMON_DEFINITIONS,
source)
@params('X', 'fruit::Annotated<Annotation1, X>')
def test_error_no_binding_found(XAnnot):
source = '''
struct X {};
fruit::Component<XAnnot> getComponent() {
return fruit::createComponent();
}
'''
expect_compile_error(
'NoBindingFoundError<XAnnot>',
'No explicit binding nor C::Inject definition was found for T.',
COMMON_DEFINITIONS,
source,
locals())
def test_error_no_factory_binding_found():
source = '''
struct X {};
fruit::Component<std::function<std::unique_ptr<X>()>> getComponent() {
return fruit::createComponent();
}
'''
expect_compile_error(
'NoBindingFoundError<std::function<std::unique_ptr<X(,std::default_delete<X>)?>\(\)>',
'No explicit binding nor C::Inject definition was found for T.',
COMMON_DEFINITIONS,
source)
def test_error_no_factory_binding_found_with_annotation():
source = '''
struct X {};
fruit::Component<fruit::Annotated<Annotation1, std::function<std::unique_ptr<X>()>>> getComponent() {
return fruit::createComponent();
}
'''
expect_compile_error(
'NoBindingFoundError<fruit::Annotated<Annotation1,std::function<std::unique_ptr<X(,std::default_delete<X>)?>\(\)>>',
'No explicit binding nor C::Inject definition was found for T.',
COMMON_DEFINITIONS,
source)
if __name__ == '__main__':
import nose2
nose2.main()
|
[
"[email protected]"
] | |
28f1e50f76dfb843b32fc6522e966b977acac0d5
|
7a402a4e06eb728f36923816748c210122fc383c
|
/C/0207.py
|
6f212e28bcec2de71e9e7a5ddfef745f0c4e059a
|
[] |
no_license
|
au-aist2120-19sp/york-lecture-files
|
8eb6a0bb7d9f7ff9984262efc95fb87b3a59c3ab
|
a38cf780f3deda84663355812d765ab40ee6ea0c
|
refs/heads/master
| 2020-04-19T18:59:46.387355 | 2019-04-25T21:17:45 | 2019-04-25T21:17:45 | 168,377,456 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,982 |
py
|
menu = '''
1) Burger
2) Fries
3) Checkout
'''
while True:
print(menu)
choice = input('Enter choice: ')
if choice == '3':
break
elif choice == '1':
print('Mmm. Wouldn\'t you like some fries?')
elif choice == '2':
print('Okay, but what about a juicy burger?')
else:
print('Enter a valid choice, dear customer')
print('enjoy your meal')
menu = '''
1) Burger ($3)
2) Fries ($1)
3) Checkout
'''
subtotal = 0
while True:
print(menu)
choice = input('Enter choice: ')
if choice == '3':
break
elif choice == '1':
print('Mmm. Wouldn\'t you like some fries?')
subtotal = subtotal + 3 # subtotal += 3
elif choice == '2':
print('Okay, but what about a juicy burger?')
subtotal += 1
else:
print('Enter a valid choice, dear customer')
print(f'You subtotal is {subtotal}')
print('enjoy your meal')
print(f'but first give me ${subtotal}')
# Summation
sun = 0
for n in range(1,6): #15
print(f'{sun} + {n} = {sun + n}')
sun += n
print(sun)
# Factorial
prod = 1
for n in range(1,6): #120
print(f'{prod} x {n} = {prod * n}')
prod *= n
print(prod)
tofind = 'o'
for c in "hello world":
if c == tofind:
print('yeah I found ' + tofind)
break
else:
print('boo I didn\'t find ' + tofind)
from math import sqrt as squirt
for n in range(26,37):
sq = squirt(n)
isq = int(sq)
if sq == isq:
print(f'yeah, {n} has an int root: {sq:.0f}')
break
else:
print('NOT FOUND')
str = "what's up?"
seq = []
for c in str:
if c != "'":
seq.append(c)
print(seq)
print(''.join(seq))
str = "what's up?"
seq = []
for c in str:
if c not in 'tsp':
seq.append(c)
print(seq)
print(''.join(seq))
# LIST COMPREHENSIONS
# [EXPR for x in SEQ (if EXPR2)]
seq = [c for c in str if c not in 'tsp']
print(seq)
[True for x in range(10)]
['default' for x in range(10)]
[x % 2 == True for x in range(10)]
|
[
"[email protected]"
] | |
1ba1dd25a4e97e2b5e0c797f2fd932dc29cb1d92
|
d24f81b52917a7b0629fe615149ef4ac8a0bd049
|
/backend/backend/urls.py
|
ff3520d0fb21d4109441eb4412346d7446d3cff9
|
[] |
no_license
|
ScrollPage/Test-Chat
|
f533c8d1112a4bc639d9659a126b9a9f886f68b2
|
3911b7555ca684b3eb31e9857d007fda3b6c7cd3
|
refs/heads/master
| 2023-01-03T13:37:44.600044 | 2020-10-30T08:43:27 | 2020-10-30T08:43:27 | 288,795,592 | 0 | 0 | null | 2020-08-24T13:35:51 | 2020-08-19T17:31:59 |
JavaScript
|
UTF-8
|
Python
| false | false | 994 |
py
|
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
from .yasg import urlpatterns as doc_urls
urlpatterns = [
path('admin/', admin.site.urls),
path('api/v1/', include('contact.api.urls')),
path('api/v1/', include('chat.api.urls')),
path('api/v1/', include('community.api.urls')),
path('api/v1/', include('feed.api.urls')),
path('api/v1/', include('score.api.urls')),
path('api/v1/', include('notifications.api.urls')),
path('api/v1/', include('parties.api.urls')),
path('api/v1/', include('like.api.urls')),
path('api/v1/', include('comments.api.urls')),
path('api/v1/', include('photos.api.urls')),
path('api-auth/', include('rest_framework.urls')),
path('auth/', include('djoser.urls')),
path('auth/', include('djoser.urls.jwt')),
]
urlpatterns += doc_urls
urlpatterns += static(settings.MEDIA_URL, document_root = settings.MEDIA_ROOT)
|
[
"[email protected]"
] | |
57f77d61bfe442fe9917a5c076d507d598a55945
|
ac83d1ddb84ecc904c73bdf779f458bd77efc98c
|
/test/programytest/dynamic/maps/test_successormap.py
|
0a3b5850cb2d148fe8b8e20bd8184f4a9557f69f
|
[
"MIT"
] |
permissive
|
secrecy27/chatbot
|
77829f32a15e17563f038663aebebdb71e52c5a7
|
e65a753cf665a4d6d97b57703431cba5331e4f0b
|
refs/heads/master
| 2022-07-24T08:39:57.788009 | 2020-07-16T03:55:21 | 2020-07-16T03:55:21 | 130,678,143 | 4 | 4 |
NOASSERTION
| 2022-07-06T19:49:14 | 2018-04-23T10:12:01 |
Python
|
UTF-8
|
Python
| false | false | 586 |
py
|
import unittest
from programy.dynamic.maps.successor import SuccessorMap
from programy.context import ClientContext
from programytest.aiml_tests.client import TestClient
class TestSingularMaps(unittest.TestCase):
def setUp(self):
self._client_context = ClientContext(TestClient(), "testid")
def test_successor(self):
map = SuccessorMap(None)
self.assertEqual("2", map.map_value(self._client_context, "1"))
def test_successor_text(self):
map = SuccessorMap(None)
self.assertEqual("", map.map_value(self._client_context, "one"))
|
[
"[email protected]"
] | |
f2d4383de12f63d69d6500687ee40ae98197b0fd
|
82770c7bc5e2f27a48b8c370b0bab2ee41f24d86
|
/microblog/flask/venv/lib/python2.7/site-packages/ws4py/client/__init__.py
|
8e7ca0e86501ef31a0e9f07fe88de7042e5bf238
|
[
"Apache-2.0"
] |
permissive
|
johankaito/fufuka
|
77ddb841f27f6ce8036d7b38cb51dc62e85b2679
|
32a96ecf98ce305c2206c38443e58fdec88c788d
|
refs/heads/master
| 2022-07-20T00:51:55.922063 | 2015-08-21T20:56:48 | 2015-08-21T20:56:48 | 39,845,849 | 2 | 0 |
Apache-2.0
| 2022-06-29T23:30:11 | 2015-07-28T16:39:54 |
Python
|
UTF-8
|
Python
| false | false | 10,761 |
py
|
# -*- coding: utf-8 -*-
from base64 import b64encode
from hashlib import sha1
import os
import socket
import ssl
from ws4py import WS_KEY, WS_VERSION
from ws4py.exc import HandshakeError
from ws4py.websocket import WebSocket
from ws4py.compat import urlsplit
__all__ = ['WebSocketBaseClient']
class WebSocketBaseClient(WebSocket):
def __init__(self, url, protocols=None, extensions=None,
heartbeat_freq=None, ssl_options=None, headers=None):
"""
A websocket client that implements :rfc:`6455` and provides a simple
interface to communicate with a websocket server.
This class works on its own but will block if not run in
its own thread.
When an instance of this class is created, a :py:mod:`socket`
is created. If the connection is a TCP socket,
the nagle's algorithm is disabled.
The address of the server will be extracted from the given
websocket url.
The websocket key is randomly generated, reset the
`key` attribute if you want to provide yours.
For instance to create a TCP client:
.. code-block:: python
>>> from websocket.client import WebSocketBaseClient
>>> ws = WebSocketBaseClient('ws://localhost/ws')
Here is an example for a TCP client over SSL:
.. code-block:: python
>>> from websocket.client import WebSocketBaseClient
>>> ws = WebSocketBaseClient('wss://localhost/ws')
Finally an example of a Unix-domain connection:
.. code-block:: python
>>> from websocket.client import WebSocketBaseClient
>>> ws = WebSocketBaseClient('ws+unix:///tmp/my.sock')
Note that in this case, the initial Upgrade request
will be sent to ``/``. You may need to change this
by setting the resource explicitely before connecting:
.. code-block:: python
>>> from websocket.client import WebSocketBaseClient
>>> ws = WebSocketBaseClient('ws+unix:///tmp/my.sock')
>>> ws.resource = '/ws'
>>> ws.connect()
You may provide extra headers by passing a list of tuples
which must be unicode objects.
"""
self.url = url
self.host = None
self.scheme = None
self.port = None
self.unix_socket_path = None
self.resource = None
self.ssl_options = ssl_options or {}
self.extra_headers = headers or []
self._parse_url()
if self.unix_socket_path:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM, 0)
else:
# Let's handle IPv4 and IPv6 addresses
# Simplified from CherryPy's code
try:
family, socktype, proto, canonname, sa = socket.getaddrinfo(self.host, self.port,
socket.AF_UNSPEC,
socket.SOCK_STREAM,
0, socket.AI_PASSIVE)[0]
except socket.gaierror:
family = socket.AF_INET
if self.host.startswith('::'):
family = socket.AF_INET6
socktype = socket.SOCK_STREAM
proto = 0
canonname = ""
sa = (self.host, self.port, 0, 0)
sock = socket.socket(family, socktype, proto)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if hasattr(socket, 'AF_INET6') and family == socket.AF_INET6 and \
self.host.startswith('::'):
try:
sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
except (AttributeError, socket.error):
pass
WebSocket.__init__(self, sock, protocols=protocols,
extensions=extensions,
heartbeat_freq=heartbeat_freq)
self.stream.always_mask = True
self.stream.expect_masking = False
self.key = b64encode(os.urandom(16))
# Adpated from: https://github.com/liris/websocket-client/blob/master/websocket.py#L105
def _parse_url(self):
"""
Parses a URL which must have one of the following forms:
- ws://host[:port][path]
- wss://host[:port][path]
- ws+unix:///path/to/my.socket
In the first two cases, the ``host`` and ``port``
attributes will be set to the parsed values. If no port
is explicitely provided, it will be either 80 or 443
based on the scheme. Also, the ``resource`` attribute is
set to the path segment of the URL (alongside any querystring).
In addition, if the scheme is ``ws+unix``, the
``unix_socket_path`` attribute is set to the path to
the Unix socket while the ``resource`` attribute is
set to ``/``.
"""
# Python 2.6.1 and below don't parse ws or wss urls properly. netloc is empty.
# See: https://github.com/Lawouach/WebSocket-for-Python/issues/59
scheme, url = self.url.split(":", 1)
parsed = urlsplit(url, scheme="http")
if parsed.hostname:
self.host = parsed.hostname
elif '+unix' in scheme:
self.host = 'localhost'
else:
raise ValueError("Invalid hostname from: %s", self.url)
if parsed.port:
self.port = parsed.port
if scheme == "ws":
if not self.port:
self.port = 80
elif scheme == "wss":
if not self.port:
self.port = 443
elif scheme in ('ws+unix', 'wss+unix'):
pass
else:
raise ValueError("Invalid scheme: %s" % scheme)
if parsed.path:
resource = parsed.path
else:
resource = "/"
if '+unix' in scheme:
self.unix_socket_path = resource
resource = '/'
if parsed.query:
resource += "?" + parsed.query
self.scheme = scheme
self.resource = resource
@property
def bind_addr(self):
"""
Returns the Unix socket path if or a tuple
``(host, port)`` depending on the initial
URL's scheme.
"""
return self.unix_socket_path or (self.host, self.port)
def close(self, code=1000, reason=''):
"""
Initiate the closing handshake with the server.
"""
if not self.client_terminated:
self.client_terminated = True
self._write(self.stream.close(code=code, reason=reason).single(mask=True))
def connect(self):
"""
Connects this websocket and starts the upgrade handshake
with the remote endpoint.
"""
if self.scheme == "wss":
# default port is now 443; upgrade self.sender to send ssl
self.sock = ssl.wrap_socket(self.sock, **self.ssl_options)
self.sock.connect(self.bind_addr)
self._write(self.handshake_request)
response = b''
doubleCLRF = b'\r\n\r\n'
while True:
bytes = self.sock.recv(128)
if not bytes:
break
response += bytes
if doubleCLRF in response:
break
if not response:
self.close_connection()
raise HandshakeError("Invalid response")
headers, _, body = response.partition(doubleCLRF)
response_line, _, headers = headers.partition(b'\r\n')
try:
self.process_response_line(response_line)
self.protocols, self.extensions = self.process_handshake_header(headers)
except HandshakeError:
self.close_connection()
raise
self.handshake_ok()
if body:
self.process(body)
@property
def handshake_headers(self):
"""
List of headers appropriate for the upgrade
handshake.
"""
headers = [
('Host', self.host),
('Connection', 'Upgrade'),
('Upgrade', 'websocket'),
('Sec-WebSocket-Key', self.key.decode('utf-8')),
('Origin', self.url),
('Sec-WebSocket-Version', str(max(WS_VERSION)))
]
if self.protocols:
headers.append(('Sec-WebSocket-Protocol', ','.join(self.protocols)))
if self.extra_headers:
headers.extend(self.extra_headers)
return headers
@property
def handshake_request(self):
"""
Prepare the request to be sent for the upgrade handshake.
"""
headers = self.handshake_headers
request = [("GET %s HTTP/1.1" % self.resource).encode('utf-8')]
for header, value in headers:
request.append(("%s: %s" % (header, value)).encode('utf-8'))
request.append(b'\r\n')
return b'\r\n'.join(request)
def process_response_line(self, response_line):
"""
Ensure that we received a HTTP `101` status code in
response to our request and if not raises :exc:`HandshakeError`.
"""
protocol, code, status = response_line.split(b' ', 2)
if code != b'101':
raise HandshakeError("Invalid response status: %s %s" % (code, status))
def process_handshake_header(self, headers):
"""
Read the upgrade handshake's response headers and
validate them against :rfc:`6455`.
"""
protocols = []
extensions = []
headers = headers.strip()
for header_line in headers.split(b'\r\n'):
header, value = header_line.split(b':', 1)
header = header.strip().lower()
value = value.strip().lower()
if header == 'upgrade' and value != 'websocket':
raise HandshakeError("Invalid Upgrade header: %s" % value)
elif header == 'connection' and value != 'upgrade':
raise HandshakeError("Invalid Connection header: %s" % value)
elif header == 'sec-websocket-accept':
match = b64encode(sha1(self.key.encode('utf-8') + WS_KEY).digest())
if value != match.lower():
raise HandshakeError("Invalid challenge response: %s" % value)
elif header == 'sec-websocket-protocol':
protocols = ','.join(value)
elif header == 'sec-websocket-extensions':
extensions = ','.join(value)
return protocols, extensions
|
[
"[email protected]"
] | |
7e05009aba2e6b6f03f7c0b0439a1d17bbfc4249
|
bf6f19c55f3af3704161e176384bcaf1c2b37537
|
/pandas_demo/intro.py
|
2b6a18f702a5dee13d47465865a4aa05bef0ffd6
|
[] |
no_license
|
huqinwei/python_demo
|
b0ba32e832533625b5ab55d56d3f3b2f6801d645
|
92da5ea2e480a9f57f8fbd619340bd9c9779cfd4
|
refs/heads/master
| 2021-07-18T19:23:09.914949 | 2019-01-26T13:37:47 | 2019-01-26T13:37:47 | 148,871,279 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,861 |
py
|
# View more python tutorials on my Youtube and Youku channel!!!
# Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg
# Youku video tutorial: http://i.youku.com/pythontutorial
from __future__ import print_function
import pandas as pd
import numpy as np
s = pd.Series([1,3,6,np.nan,4,1]) # similar with 1D numpy
print(s)
dates = pd.date_range('20160101', periods=6)
dates2 = ['20160102','20160101','20160103','20140101','20160103','20160401',]
df = pd.DataFrame(np.random.randn(6,4), index=dates2, columns=['A','B','C','D'])
print('df:\n',df)
print('df[B]:\n',df['B'])
df2 = pd.DataFrame({'A' : 1.,
'B' : pd.Timestamp('20130102'),
'C' : pd.Series(1,index=list(range(4)),dtype='float32'),
'D' : np.array([3] * 4,dtype='int32'),
'E' : pd.Categorical(["test","train","test","train"]),
'F' : 'foo'})
print(df2)
print(df2.dtypes)
print(df.index)
print(df.columns)
print(df.values)
#print(df.describe())
#print(df.T)
print('##############################')
print('index axis=0:\n',df.sort_index(axis=0, ascending=False))
print('index axis=0 ascending:\n',df.sort_index(axis=0, ascending=True))
print('index axis=1:\n',df.sort_index(axis=1, ascending=False))
print('index axis=1 ascending:\n',df.sort_index(axis=1, ascending=True))
#print(df.sort_index(axis=2, ascending=False))#no axis
#print(df.sort_values(by='B'))
df3 = pd.DataFrame({'A' : 1.,
'B' : pd.Timestamp('20130102'),
'C' : pd.Series(1,index=list(range(4)),dtype='float32'),
'D' : np.array([3] * 4,dtype='int32'),
'E' : pd.Categorical(["test","train","test","train"]),
'F' : 'foo'},columns=dates)
print('df3:\n',df3)
print(df3.dtypes)
|
[
"[email protected]"
] | |
e85a1e5147376b3a1c8fd9465a3d03454be0361b
|
defae005fad18f91be83de5b8e2fd51e3cbfbb35
|
/base_app/utilities/menus/base_main_window.py
|
172ccfe8b969003eb58def704e46bc63c3021cea
|
[] |
no_license
|
MRedAero/BaseApp
|
018f9887cd89abbd6117bcc43f8f8dca281220b0
|
8e16ff2f2d0dbc322e4c35a61ce298abe31797b0
|
refs/heads/master
| 2021-03-19T10:13:01.245176 | 2015-04-27T10:08:15 | 2015-04-27T10:08:15 | 32,956,309 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 400 |
py
|
from PyQt4 import QtGui
from base_main_window_ui import Ui_MainWindow
class BaseMainWindow(QtGui.QMainWindow):
def __init__(self):
super(BaseMainWindow, self).__init__()
self.base_ui = Ui_MainWindow().setupUi(self)
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
main_window = BaseMainWindow()
main_window.show()
app.exec_()
|
[
"[email protected]"
] | |
c044bc1d86e1de9fa6116743028140440a45ae37
|
6c547e3312e2d1bd3dab123b831053ed7aef7b6d
|
/tests/test_QTML-T362_Chrome.py
|
2dd0f705b88231fc31ba6ece6341109dce4889eb
|
[] |
no_license
|
kenito2050/BICL
|
8c4239f1e897e4dfc04aa35e827816242b41d5dd
|
82891aba56cc49c9cf96ce82472847c4cb10828f
|
refs/heads/master
| 2020-12-31T22:10:44.784193 | 2020-02-10T23:00:10 | 2020-02-10T23:00:10 | 239,039,817 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,585 |
py
|
import pytest
from selenium import webdriver
import time
from config_globals import *
from pages.BICL.login.LoginPage import LoginPage
from pages.generic_page.generic_page import generic_page
from pages.BICL.default_page.user_drop_down.user_drop_down import user_drop_down
from pages.BICL.default_page.default_page import default_page
from pages.BICL.housekeeping.housekeeping import housekeeping
from utilities.environments.environments_BICL import Environments_BICL
from utilities.date_time_generator.date_time_generator import date_time_generator
import pandas as pd
class Test_login_Chrome:
@pytest.mark.smoke
@pytest.mark.bicl
def test_login_chrome(self, browser, env):
driver = browser
# Create Time Stamp Variable (using Date Time Generator Class in utilities)
dg = date_time_generator()
time_stamp = dg.return_time_stamp()
# This section reads in values from csv file using Pandas Library
# Declare Test Case ID
test_case_ID = 'QTML-T362'
# Declare csv directory
df = pd.read_csv(csv_directory)
# print(df)
# Select Row where "Test_Case_ID" Column Matches the test_case_ID declared above (Line 31)
# This is the row that contains the data values for this test scenario
test_case_row = df.loc[df.Test_Case_ID == test_case_ID]
# print(test_case_row)
# Read in Values from "test_case_row" object
test_scenario = test_case_row['Test_Scenario'].values[0]
username = test_case_row['User'].values[0]
password = test_case_row['Password'].values[0]
browser = test_case_row['Browser'].values[0]
account_number = test_case_row['account_number'].values[0]
rep_code = test_case_row['rep_code'].values[0]
test_data1 = test_case_row['test_data1'].values[0]
test_data2 = test_case_row['test_data_2'].values[0]
control_point_1 = test_case_row['control_point_1'].values[0]
control_point_2 = test_case_row['control_point_2'].values[0]
control_point_3 = test_case_row['control_point_3'].values[0]
control_point_4 = test_case_row['control_point_4'].values[0]
# To DEBUG, Uncomment this NEXT line AND Comment lines 13, 15 and 18. Also, SHIFT + TAB lines 17 - 86 (This will remove indents)
# driver = webdriver.Chrome(str(CONFIG_PATH / 'chromedriver.exe'))
## Select Appropriate URL based on the Environment Value (env)
# env = "UAT"
baseURL = Environments_BICL.return_environments(env)
# baseURL = "https://beta.bi.dev.wedbus.com"
driver.get(baseURL)
driver.maximize_window()
time.sleep(5)
# Login to Site
lp = LoginPage(driver)
# Verify if page loads (username_field should be clickable), if not, throw exception and take screenshot
lp.verify_username_field_displays(test_case_ID, browser, env, time_stamp)
lp.login(username, password)
lp.click_login_button()
time.sleep(10)
gp = generic_page(driver)
# Take Screenshot 1
screenshot_number = "1"
time_stamp_1 = dg.return_time_stamp()
gp.take_screenshot(test_case_ID, browser, control_point_1, screenshot_number, env, time_stamp_1)
dp = default_page(driver)
# Timeout method for page to load, timeout set to 30 seconds
gp.verify_page_loads(test_case_ID, browser, env, time_stamp)
time.sleep(15)
# Click Housekeeping
dp.click_housekeeping_icon()
time.sleep(15)
h = housekeeping(driver)
time.sleep(5)
# Click ACAT Status
h.click_acat_status()
time.sleep(5)
# Validate that correct panel displays (correct title displays on screen)
gp.validate_correct_text_displays(test_data1, test_case_ID, browser, env, time_stamp)
time.sleep(5)
# Take Screenshot 2
screenshot_number = "2"
time_stamp_2 = dg.return_time_stamp()
gp.take_screenshot(test_case_ID, browser, control_point_2, screenshot_number, env, time_stamp_2)
time.sleep(5)
# Click User Drop Down (on BICL Default Page)
dp.click_user_drop_down()
udd = user_drop_down(driver)
udd.click_logout()
# Take Screenshot 3
screenshot_number = "3"
time_stamp_3 = dg.return_time_stamp()
gp.take_screenshot(test_case_ID, browser, control_point_3, screenshot_number, env, time_stamp_3)
time.sleep(5)
# Close Browser
driver.quit()
|
[
"[email protected]"
] | |
b1d51f180d8265f912b4de42692b767d6bb4182f
|
f85ce2baf753d65e8666bbda062acbdb0ccdb5ad
|
/leetcode/venv/lib/python2.7/site-packages/pyutil/ss/stream_core.py
|
9ca853266c8734b7be2a2e8d21ba9627aff2e949
|
[] |
no_license
|
KqSMea8/PycharmProjects
|
2a9d3fa59d08c77daf63be427da27695d4dea471
|
c592d879fd79da4e0816a4f909e5725e385b6160
|
refs/heads/master
| 2020-04-14T11:54:56.435247 | 2019-01-02T10:15:36 | 2019-01-02T10:15:36 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 212 |
py
|
# -*- coding: utf-8 -*-
from pyutil.etcd.etcd_util import get as etcd_get
# 服务降级策略:0 正常 1 降级
def get_stream_core_webdb_level():
return int(etcd_get("/web/stream/core/webdb/level", 0))
|
[
"[email protected]"
] | |
d1d3507dbf447a62f4869036ced20c2b6fc6cccc
|
fcc9c7d713179694d6e19c98693185789a1d90b9
|
/CMGTools/H2TauTau/python/proto/analyzers/MuEleAnalyzer.py
|
b68e41c2a4d2462d41412fa681438a8bc5c6d5a9
|
[] |
no_license
|
hengne/cmg-cmssw
|
aedada0ce15562380d42dcc5c241d67fb8c65965
|
03946e4ce821b6186c8f881e2bbe4b2a30917b2d
|
refs/heads/CMGTools-from-CMSSW_7_4_7
| 2020-04-05T23:12:59.317945 | 2015-07-31T08:55:33 | 2015-07-31T08:55:33 | 40,015,437 | 0 | 1 | null | 2015-07-31T16:55:04 | 2015-07-31T16:55:04 | null |
UTF-8
|
Python
| false | false | 6,679 |
py
|
import operator
from PhysicsTools.Heppy.analyzers.core.AutoHandle import AutoHandle
from PhysicsTools.Heppy.physicsobjects.PhysicsObjects import Muon, GenParticle
# RIC: 16/2/15 need to fix the Electron object first
# from PhysicsTools.Heppy.physicsobjects.HTauTauElectron import HTauTauElectron as Electron
from PhysicsTools.Heppy.physicsobjects.Electron import Electron
from CMGTools.H2TauTau.proto.analyzers.DiLeptonAnalyzer import DiLeptonAnalyzer
from CMGTools.H2TauTau.proto.physicsobjects.DiObject import MuonElectron
class MuEleAnalyzer( DiLeptonAnalyzer ):
DiObjectClass = MuonElectron
LeptonClass = Muon
OtherLeptonClass = Electron
def declareHandles(self):
super(MuEleAnalyzer, self).declareHandles()
self.handles ['diLeptons' ] = AutoHandle('cmgMuEleCorSVFitFullSel', 'std::vector<pat::CompositeCandidate>')
self.handles ['otherLeptons'] = AutoHandle('slimmedElectrons' , 'std::vector<pat::Electron>' )
self.handles ['leptons' ] = AutoHandle('slimmedMuons' , 'std::vector<pat::Muon>' )
self.mchandles['genParticles'] = AutoHandle('prunedGenParticles' , 'std::vector<reco::GenParticle>' )
def buildDiLeptons(self, cmgDiLeptons, event):
'''Build di-leptons, associate best vertex to both legs,
select di-leptons with a tight ID muon.
The tight ID selection is done so that dxy and dz can be computed
(the muon must not be standalone).
'''
diLeptons = []
for index, dil in enumerate(cmgDiLeptons):
pydil = self.__class__.DiObjectClass(dil)
# pydil = MuonElectron(dil)
pydil.leg1().associatedVertex = event.goodVertices[0]
pydil.leg2().associatedVertex = event.goodVertices[0]
pydil.leg2().rho = event.rho
if not self.testLeg2( pydil.leg2(), 999999 ):
continue
# pydil.mvaMetSig = pydil.met().getSignificanceMatrix()
diLeptons.append( pydil )
pydil.mvaMetSig = pydil.met().getSignificanceMatrix()
return diLeptons
def buildLeptons(self, cmgLeptons, event):
'''Build muons for veto, associate best vertex, select loose ID muons.
The loose ID selection is done to ensure that the muon has an inner track.'''
leptons = []
for index, lep in enumerate(cmgLeptons):
pyl = self.__class__.LeptonClass(lep)
#pyl = Muon(lep)
pyl.associatedVertex = event.goodVertices[0]
leptons.append( pyl )
return leptons
def buildOtherLeptons(self, cmgOtherLeptons, event):
'''Build electrons for third lepton veto, associate best vertex.
'''
otherLeptons = []
for index, lep in enumerate(cmgOtherLeptons):
pyl = self.__class__.OtherLeptonClass(lep)
#import pdb ; pdb.set_trace()
#pyl = Electron(lep)
pyl.associatedVertex = event.goodVertices[0]
pyl.rho = event.rho
otherLeptons.append( pyl )
return otherLeptons
def process(self, event):
result = super(MuEleAnalyzer, self).process(event)
if result is False:
# trying to get a dilepton from the control region.
# it must have well id'ed and trig matched legs,
# di-lepton and tri-lepton veto must pass
result = self.selectionSequence(event, fillCounter = False,
leg1IsoCut = self.cfg_ana.looseiso1,
leg2IsoCut = self.cfg_ana.looseiso2)
if result is False:
# really no way to find a suitable di-lepton,
# even in the control region
return False
event.isSignal = False
else:
event.isSignal = True
event.genMatched = None
if self.cfg_comp.isMC:
# print event.eventId
genParticles = self.mchandles['genParticles'].product()
event.genParticles = map( GenParticle, genParticles)
leg1DeltaR, leg2DeltaR = event.diLepton.match( event.genParticles )
if leg1DeltaR>-1 and leg1DeltaR < 0.1 and \
leg2DeltaR>-1 and leg2DeltaR < 0.1:
event.genMatched = True
else:
event.genMatched = False
return True
def testLeg1ID(self, muon):
'''Tight muon selection, no isolation requirement'''
# RIC: 9 March 2015
return muon.muonID('POG_ID_Medium')
def testLeg1Iso(self, muon, isocut):
'''Muon isolation to be implemented'''
# RIC: this leg is the muon,
# needs to be implemented here
# For now taken straight from mt channel
if isocut is None:
isocut = self.cfg_ana.iso1
return muon.relIso(dBetaFactor=0.5, allCharged=0)<isocut
def testVertex(self, lepton):
'''Tests vertex constraints, for mu and electron'''
return abs(lepton.dxy()) < 0.045 and abs(lepton.dz ()) < 0.2
def testLeg2ID(self, electron):
'''Electron ID. To be implemented'''
# RIC: this leg is the electron,
# needs to be implemented here
# For now taken straight from et channel
return electron.electronID('POG_MVA_ID_Run2_NonTrig_Tight') and \
self.testVertex(electron)
def testLeg2Iso(self, electron, isocut):
'''Electron Isolation. Relative isolation
dB corrected factor 0.5
all charged aprticles
'''
# RIC: this leg is the electron,
# needs to be implemented here
# For now taken straight from et channel
if isocut is None:
isocut = self.cfg_ana.iso2
return electron.relIso(dBetaFactor=0.5, allCharged=0) < isocut
def thirdLeptonVeto(self, leptons, otherLeptons, ptcut = 10, isocut = 0.3) :
'''The tri-lepton veto. To be implemented'''
return True
def leptonAccept(self, leptons):
'''The di-lepton veto, returns false if > one lepton.
e.g. > 1 mu in the mu tau channel.
To be implemented.'''
return True
def bestDiLepton(self, diLeptons):
'''Returns the best diLepton (1st precedence opposite-sign,
2nd precedence highest pt1 + pt2).'''
osDiLeptons = [dl for dl in diLeptons if dl.leg1().charge() != dl.leg2().charge()]
if osDiLeptons : return max( osDiLeptons, key=operator.methodcaller( 'sumPt' ) )
else : return max( diLeptons, key=operator.methodcaller( 'sumPt' ) )
|
[
"[email protected]"
] | |
c43ed3e2de1b6a302d4e2050a585230e34d3f23d
|
6ec209c1f6f3ca8017a5373ba2e85da38dfda90c
|
/tree/114.py
|
42e297bdf6ee4dc3cb36dd8fa348e0e637032d34
|
[
"Apache-2.0"
] |
permissive
|
MingfeiPan/leetcode
|
a70192233f7112ce39cc7b09d782bdcc52d29d06
|
057d9f014cf207ab4e50e14e5a9e015724de1386
|
refs/heads/master
| 2022-05-09T01:40:39.599374 | 2022-04-10T15:03:07 | 2022-04-10T15:03:07 | 60,593,146 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 716 |
py
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def flatten(self, root: TreeNode) -> None:
"""
Do not return anything, modify root in-place instead.
"""
if not root:
return
# pre-order traversal
l = []
l.append(root)
while l:
cur = l.pop()
if cur.right:
l.append(cur.right)
if cur.left:
l.append(cur.left)
if l:
cur.right = l[-1] #如果left 存在则指向left 否则指向right
cur.left = None
|
[
"[email protected]"
] | |
1295ef0e561f0cce1242427cda2074929386f143
|
eb15c60ef607040fb9f7fbca789405b8b26a86b1
|
/batchpr/updater.py
|
32140c6de55b9ea30721e056b19016c6004637b4
|
[
"BSD-2-Clause"
] |
permissive
|
bsipocz/batchpr
|
5d2c32c3872e8b9b62d9e5ad51c3e041d661a330
|
5fbce1d357bf6e4f26ed3fcb0bda6f0a9447c614
|
refs/heads/master
| 2020-04-15T18:30:59.436646 | 2017-08-29T13:58:00 | 2017-08-29T13:58:00 | 164,914,948 | 0 | 0 |
BSD-2-Clause
| 2019-01-09T18:19:52 | 2019-01-09T18:19:46 |
Python
|
UTF-8
|
Python
| false | false | 6,396 |
py
|
import os
import abc
import six
import sys
import time
import shutil
import tempfile
import subprocess
from textwrap import indent
from github import Github
from termcolor import colored
import requests
GITHUB_RAW_FILENAME = "https://raw.githubusercontent.com/{repo}/master/{filename}"
class BranchExistsException(Exception):
pass
@six.add_metaclass(abc.ABCMeta)
class Updater(object):
def __init__(self, token, author_name=None, author_email=None):
self.github = Github(token)
self.token = token
self.user = self.github.get_user()
self.author_name = author_name
self.author_email = author_email
self.repo = None
self.fork = None
def info(self, message):
print(message)
def run(self, repositories, delay=0):
if isinstance(repositories, six.string_types):
repositories = [repositories]
start_dir = os.path.abspath('.')
for ir, repository in enumerate(repositories):
if ir > 0:
time.sleep(delay)
print(colored('Processing repository: {0}'.format(repository), 'cyan'))
self.repo_name = repository
try:
print(' > Ensuring repository exists')
self.ensure_repo_set_up()
except Exception:
self.error(" An error occurred when trying to get the repository")
continue
try:
print(' > Ensuring fork exists (and creating if not)')
self.ensure_fork_set_up()
except Exception:
self.error(" An error occurred when trying to set up a fork")
continue
# Go to temporary directory
directory = tempfile.mkdtemp()
try:
os.chdir(directory)
try:
self.clone_fork()
except BranchExistsException:
self.error(" Branch {0} already exists - skipping repository".format(self.branch_name))
continue
except Exception:
self.error(" An error occurred when cloning fork - skipping repository")
continue
if not self.process_repo():
self.warn(" Skipping repository")
return
self.commit_changes()
if '--dry' not in sys.argv:
try:
url = self.open_pull_request()
print(colored(' Pull request opened: {0}'.format(url), 'green'))
except Exception:
self.error(" An error occurred when opening pull request - skipping repository")
continue
finally:
os.chdir(start_dir)
def add(self, filename):
self.run_command('git add {0}'.format(filename))
def copy(self, filename1, filename2):
shutil.copy(filename1, filename2)
def warn(self, message):
print(colored(message, 'magenta'))
def error(self, message):
print(colored(message, 'red'))
def check_file_exists(self, filename):
r = requests.get(GITHUB_RAW_FILENAME.format(repo=self.repo_name, filename=filename))
return r.status_code == 200
def ensure_repo_set_up(self):
self.repo = self.github.get_repo(self.repo_name)
def ensure_fork_set_up(self):
if self.repo.owner.login != self.user.login:
self.fork = self.user.create_fork(self.repo)
else:
self.fork = self.repo
def clone_fork(self, dirname='.'):
# Go to working directory
os.chdir(dirname)
# Clone the repository
self.run_command('git clone --depth 1 {0}'.format(self.fork.ssh_url))
os.chdir(self.repo.name)
# Make sure the branch doesn't already exist
try:
self.run_command('git checkout origin/{0}'.format(self.branch_name))
except:
pass
else:
raise BranchExistsException()
# Update to the latest upstream master
self.run_command('git remote add upstream {0}'.format(self.repo.clone_url))
self.run_command('git fetch upstream')
self.run_command('git checkout upstream/master')
self.run_command('git checkout -b {0}'.format(self.branch_name))
# Initialize submodules
self.run_command('git submodule init')
self.run_command('git submodule update')
def commit_changes(self):
if self.author_name:
self.run_command('git -c "user.name={0}" '
' -c "user.email={1}" '
' commit -m "{2}"'.format(self.author_name,
self.author_email,
self.commit_message))
else:
self.run_command('git commit -m "{0}"'.format(self.commit_message))
def open_pull_request(self):
self.run_command('git push https://astrobot:{0}@github.com/{1} {2}'.format(self.token, self.fork.full_name, self.branch_name))
result = self.repo.create_pull(title=self.pull_request_title,
body=self.pull_request_body,
base='master',
head='{0}:{1}'.format(self.fork.owner.login, self.branch_name))
return result.html_url
def run_command(self, command):
print(" > {0}".format(command))
p = subprocess.Popen(command, shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
p.wait()
output = p.communicate()[0].decode('utf-8').strip()
if ('--verbose' in sys.argv or p.returncode != 0) and output:
print(indent(output, ' ' * 4))
if p.returncode == 0:
return output
else:
raise Exception("Command '{0}' failed".format(command))
@abc.abstractmethod
def process_repo(self):
pass
@abc.abstractproperty
def branch_name(self):
pass
@abc.abstractproperty
def commit_message(self):
pass
@abc.abstractproperty
def pull_request_body(self):
pass
|
[
"[email protected]"
] | |
d5cc595979455d5e2cefd473cdc1d313daa2943f
|
ea872f0a2bcc4270b7089120e3eb2f8dd32a165e
|
/Baxter/devel/lib/python2.7/dist-packages/ric_board/msg/_Named_Data.py
|
ef3e76ac2b7e06e7ba4ae90f31c0d93c6b47d1fb
|
[] |
no_license
|
ZhenYaGuo/Warehouse-Robotic-System
|
2def137478911f499c45276aa3103a0b68ebb8d7
|
47b78d111b387102e29d2596bd5dc7c704f74f8f
|
refs/heads/master
| 2021-08-24T04:12:43.379580 | 2017-12-08T01:48:09 | 2017-12-08T01:48:09 | 113,405,332 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,452 |
py
|
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from ric_board/Named_Data.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class Named_Data(genpy.Message):
_md5sum = "14a4844ed23f715d29194ae2fc141a58"
_type = "ric_board/Named_Data"
_has_header = False #flag to mark the presence of a Header object
_full_text = """float32 data
string name
"""
__slots__ = ['data','name']
_slot_types = ['float32','string']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
data,name
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(Named_Data, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.data is None:
self.data = 0.
if self.name is None:
self.name = ''
else:
self.data = 0.
self.name = ''
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
buff.write(_struct_f.pack(self.data))
_x = self.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
start = end
end += 4
(self.data,) = _struct_f.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.name = str[start:end].decode('utf-8')
else:
self.name = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
buff.write(_struct_f.pack(self.data))
_x = self.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
start = end
end += 4
(self.data,) = _struct_f.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.name = str[start:end].decode('utf-8')
else:
self.name = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_f = struct.Struct("<f")
|
[
"[email protected]"
] | |
f81673adb2493b5d2d8ac7090a4f0b33f6e6cdc1
|
e9e91f17b6ad129923ddb9e5cd086d0cef150a1f
|
/novel_site/apps/novel/migrations/0002_auto_20181012_1146.py
|
b8fca43b376cbfb2df558c1e3b976142b8ecd4d0
|
[] |
no_license
|
gzgdouru/novel_site
|
45148b56521e23399d3289bee9c73b1c46145cd3
|
8879217f6dcc5f657adefaeb9d18914ce1dd9d90
|
refs/heads/master
| 2020-04-06T12:50:46.436608 | 2018-11-14T01:46:12 | 2018-11-14T01:46:12 | 146,424,571 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 540 |
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-10-12 11:46
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('novel', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='novel',
name='image',
field=models.ImageField(blank=True, default='default_novel.jpg', max_length=200, null=True, upload_to='novel/', verbose_name='小说图片'),
),
]
|
[
"[email protected]"
] | |
f141b66fb751e15b9e5d356d965e6aa9723b10f0
|
bb6ebff7a7f6140903d37905c350954ff6599091
|
/chrome/common/extensions/docs/server2/whats_new_data_source.py
|
fd26764017dd1bd3f012009ae3dd3526d222e101
|
[
"BSD-3-Clause"
] |
permissive
|
PDi-Communication-Systems-Inc/lollipop_external_chromium_org
|
faa6602bd6bfd9b9b6277ce3cd16df0bd26e7f2f
|
ccadf4e63dd34be157281f53fe213d09a8c66d2c
|
refs/heads/master
| 2022-12-23T18:07:04.568931 | 2016-04-11T16:03:36 | 2016-04-11T16:03:36 | 53,677,925 | 0 | 1 |
BSD-3-Clause
| 2022-12-09T23:46:46 | 2016-03-11T15:49:07 |
C++
|
UTF-8
|
Python
| false | false | 3,498 |
py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from itertools import groupby
from operator import itemgetter
import posixpath
from data_source import DataSource
from extensions_paths import JSON_TEMPLATES, PUBLIC_TEMPLATES
from future import Future
class WhatsNewDataSource(DataSource):
''' This class creates a list of "what is new" by chrome version.
'''
def __init__(self, server_instance, _):
self._parse_cache = server_instance.compiled_fs_factory.ForJson(
server_instance.host_file_system_provider.GetTrunk())
self._object_store = server_instance.object_store_creator.Create(
WhatsNewDataSource)
self._api_models = server_instance.api_models
self._availability_finder = server_instance.availability_finder
self._api_categorizer = server_instance.api_categorizer
def _GenerateChangesListWithVersion(self, platform, whats_new_json):
return [{
'id': change_id,
'type': change['type'],
'description': change['description'],
'version': change['version']
} for change_id, change in whats_new_json.iteritems()]
def _GetAPIVersion(self, platform, api_name):
version = None
category = self._api_categorizer.GetCategory(platform, api_name)
if category == 'chrome':
channel_info = self._availability_finder.GetAPIAvailability(
api_name).channel_info
channel = channel_info.channel
if channel == 'stable':
version = channel_info.version
return version
def _GenerateAPIListWithVersion(self, platform):
data = []
for api_name, api_model in self._api_models.IterModels():
version = self._GetAPIVersion(platform, api_name)
if version:
api = {
'name': api_name,
'description': api_model.description,
'version' : version,
'type': 'apis',
}
data.append(api)
data.sort(key=itemgetter('version'))
return data
def _GenerateWhatsNewDict(self):
whats_new_json_future = self._parse_cache.GetFromFile(
posixpath.join(JSON_TEMPLATES, 'whats_new.json'))
def _MakeDictByPlatform(platform):
whats_new_json = whats_new_json_future.Get()
platform_list = []
apis = self._GenerateAPIListWithVersion(platform)
apis.extend(self._GenerateChangesListWithVersion(platform,
whats_new_json))
apis.sort(key=itemgetter('version'), reverse=True)
for version, group in groupby(apis, key=itemgetter('version')):
whats_new_by_version = {
'version': version,
}
for item in group:
item_type = item['type']
if item_type not in whats_new_by_version:
whats_new_by_version[item_type] = []
whats_new_by_version[item_type].append(item)
platform_list.append(whats_new_by_version)
return platform_list
def resolve():
return {
'apps': _MakeDictByPlatform('apps'),
'extensions': _MakeDictByPlatform('extensions')
}
return Future(callback=resolve)
def _GetCachedWhatsNewData(self):
data = self._object_store.Get('whats_new_data').Get()
if data is None:
data = self._GenerateWhatsNewDict().Get()
self._object_store.Set('whats_new_data', data)
return data
def get(self, key):
return self._GetCachedWhatsNewData().get(key)
def Cron(self):
return self._GenerateWhatsNewDict()
|
[
"[email protected]"
] | |
5b483961bb98aa1739b96e6ff1a6efe36fa4b5a5
|
77c4ca9b33e007daecfc4318537d7babea5dde84
|
/tensorflow/python/ops/ragged/string_ngrams_op_test.py
|
6b3b3777cb5ab9cc1a16d444b6af4b3cd0764392
|
[
"Apache-2.0"
] |
permissive
|
RJ722/tensorflow
|
308eede8e911e2b6a6930fef3e24a493ab9a2a61
|
6c935289da11da738f2eaed18644082f3a6938d6
|
refs/heads/master
| 2020-12-20T16:51:12.767583 | 2020-01-25T06:46:50 | 2020-01-25T06:51:20 | 236,138,137 | 2 | 3 |
Apache-2.0
| 2020-01-25T07:12:41 | 2020-01-25T07:12:40 | null |
UTF-8
|
Python
| false | false | 11,964 |
py
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the b"License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an b"AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the Tensorflow strings.ngrams op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.ops.ragged import ragged_string_ops
from tensorflow.python.platform import test
class StringNgramsTest(test_util.TensorFlowTestCase):
def test_unpadded_ngrams(self):
data = [[b"aa", b"bb", b"cc", b"dd"], [b"ee", b"ff"]]
data_tensor = ragged_factory_ops.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor, ngram_width=3, separator=b"|")
result = self.evaluate(ngram_op)
expected_ngrams = [[b"aa|bb|cc", b"bb|cc|dd"], []]
self.assertAllEqual(expected_ngrams, result)
def test_tuple_multi_ngrams(self):
data = [[b"aa", b"bb", b"cc", b"dd"], [b"ee", b"ff"]]
data_tensor = ragged_factory_ops.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor, ngram_width=(2, 3), separator=b"|")
result = self.evaluate(ngram_op)
expected_ngrams = [[b"aa|bb", b"bb|cc", b"cc|dd", b"aa|bb|cc", b"bb|cc|dd"],
[b"ee|ff"]]
self.assertAllEqual(expected_ngrams, result)
def test_tuple_multi_ngrams_inverted_order(self):
data = [[b"aa", b"bb", b"cc", b"dd"], [b"ee", b"ff"]]
data_tensor = ragged_factory_ops.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor, ngram_width=(3, 2), separator=b"|")
result = self.evaluate(ngram_op)
expected_ngrams = [[b"aa|bb|cc", b"bb|cc|dd", b"aa|bb", b"bb|cc", b"cc|dd"],
[b"ee|ff"]]
self.assertAllEqual(expected_ngrams, result)
def test_list_multi_ngrams(self):
data = [[b"aa", b"bb", b"cc", b"dd"], [b"ee", b"ff"]]
data_tensor = ragged_factory_ops.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor, ngram_width=[2, 3], separator=b"|")
result = self.evaluate(ngram_op)
expected_ngrams = [[b"aa|bb", b"bb|cc", b"cc|dd", b"aa|bb|cc", b"bb|cc|dd"],
[b"ee|ff"]]
self.assertAllEqual(expected_ngrams, result)
def test_multi_ngram_ordering(self):
data = [[b"aa", b"bb", b"cc", b"dd"], [b"ee", b"ff"]]
data_tensor = ragged_factory_ops.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor, ngram_width=[3, 2], separator=b"|")
result = self.evaluate(ngram_op)
expected_ngrams = [[b"aa|bb|cc", b"bb|cc|dd", b"aa|bb", b"bb|cc", b"cc|dd"],
[b"ee|ff"]]
self.assertAllEqual(expected_ngrams, result)
def test_fully_padded_ngrams(self):
data = [[b"a"], [b"b", b"c", b"d"], [b"e", b"f"]]
data_tensor = ragged_factory_ops.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor, ngram_width=3, separator=b"|", pad_values=(b"LP", b"RP"))
result = self.evaluate(ngram_op)
expected_ngrams = [
[b"LP|LP|a", b"LP|a|RP", b"a|RP|RP"], # 0
[b"LP|LP|b", b"LP|b|c", b"b|c|d", b"c|d|RP", b"d|RP|RP"], # 1
[b"LP|LP|e", b"LP|e|f", b"e|f|RP", b"f|RP|RP"] # 2
]
self.assertAllEqual(expected_ngrams, result)
def test_ngram_padding_size_cap(self):
# Validate that the padding size is never greater than ngram_size - 1.
data = [[b"a"], [b"b", b"c", b"d"], [b"e", b"f"]]
data_tensor = ragged_factory_ops.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor,
ngram_width=3,
separator=b"|",
pad_values=(b"LP", b"RP"),
padding_width=10)
result = self.evaluate(ngram_op)
expected_ngrams = [
[b"LP|LP|a", b"LP|a|RP", b"a|RP|RP"], # 0
[b"LP|LP|b", b"LP|b|c", b"b|c|d", b"c|d|RP", b"d|RP|RP"], # 1
[b"LP|LP|e", b"LP|e|f", b"e|f|RP", b"f|RP|RP"] # 2
]
self.assertAllEqual(expected_ngrams, result)
def test_singly_padded_ngrams(self):
data = [[b"a"], [b"b", b"c", b"d"], [b"e", b"f"]]
data_tensor = ragged_factory_ops.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor,
ngram_width=5,
separator=b"|",
pad_values=(b"LP", b"RP"),
padding_width=1)
result = self.evaluate(ngram_op)
expected_ngrams = [[], [b"LP|b|c|d|RP"], []]
self.assertAllEqual(expected_ngrams, result)
def test_singly_padded_ngrams_with_preserve_short(self):
data = [[b"a"], [b"b", b"c", b"d"], [b"e", b"f"]]
data_tensor = ragged_factory_ops.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor,
ngram_width=5,
separator=b"|",
pad_values=(b"LP", b"RP"),
padding_width=1,
preserve_short_sequences=True)
result = self.evaluate(ngram_op)
expected_ngrams = [[b"LP|a|RP"], [b"LP|b|c|d|RP"], [b"LP|e|f|RP"]]
self.assertAllEqual(expected_ngrams, result)
def test_singly_padded_multiple_ngrams(self):
data = [[b"a"], [b"b", b"c", b"d"], [b"e", b"f"]]
data_tensor = ragged_factory_ops.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor,
ngram_width=(1, 5),
separator=b"|",
pad_values=(b"LP", b"RP"),
padding_width=1)
result = self.evaluate(ngram_op)
expected_ngrams = [[b"a"], [b"b", b"c", b"d", b"LP|b|c|d|RP"], [b"e", b"f"]]
self.assertAllEqual(expected_ngrams, result)
def test_single_padding_string(self):
data = [[b"a"], [b"b", b"c", b"d"], [b"e", b"f"]]
data_tensor = ragged_factory_ops.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor,
ngram_width=5,
separator=b"|",
pad_values=b"[PAD]",
padding_width=1)
result = self.evaluate(ngram_op)
expected_ngrams = [[], [b"[PAD]|b|c|d|[PAD]"], []]
self.assertAllEqual(expected_ngrams, result)
def test_explicit_multiply_padded_ngrams(self):
data = [[b"a"]]
data_tensor = ragged_factory_ops.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor,
ngram_width=5,
separator=b"|",
pad_values=(b"LP", b"RP"),
padding_width=2)
result = self.evaluate(ngram_op)
expected_ngrams = [[b"LP|LP|a|RP|RP"]]
self.assertAllEqual(expected_ngrams, result)
def test_ragged_inputs_with_multiple_ragged_dimensions(self):
data = [[[[b"aa", b"bb", b"cc", b"dd"]], [[b"ee", b"ff"]]]]
data_tensor = ragged_factory_ops.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor, ngram_width=3, separator=b"|")
result = self.evaluate(ngram_op)
expected_ngrams = [[[[b"aa|bb|cc", b"bb|cc|dd"]], [[]]]]
self.assertAllEqual(expected_ngrams, result)
def test_ragged_inputs_with_multiple_ragged_dimensions_and_preserve(self):
data = [[[[b"aa", b"bb", b"cc", b"dd"]], [[b"ee", b"ff"]]]]
data_tensor = ragged_factory_ops.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor,
ngram_width=3,
separator=b"|",
preserve_short_sequences=True)
result = self.evaluate(ngram_op)
expected_ngrams = [[[[b"aa|bb|cc", b"bb|cc|dd"]], [[b"ee|ff"]]]]
self.assertAllEqual(expected_ngrams, result)
def test_ragged_inputs_with_multiple_ragged_dimensions_bigrams(self):
data = [[[[b"aa", b"bb", b"cc", b"dd"]], [[b"ee", b"ff"]]]]
data_tensor = ragged_factory_ops.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor, ngram_width=2, separator=b"|")
result = self.evaluate(ngram_op)
expected_ngrams = [[[[b"aa|bb", b"bb|cc", b"cc|dd"]], [[b"ee|ff"]]]]
self.assertAllEqual(expected_ngrams, result)
def test_ragged_inputs_with_multiple_ragged_dimensions_and_multiple_ngrams(
self):
data = [[[[b"aa", b"bb", b"cc", b"dd"]], [[b"ee", b"ff"]]]]
data_tensor = ragged_factory_ops.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor, ngram_width=(3, 4), separator=b"|")
result = self.evaluate(ngram_op)
expected_ngrams = [[[[b"aa|bb|cc", b"bb|cc|dd", b"aa|bb|cc|dd"]], [[]]]]
self.assertAllEqual(expected_ngrams, result)
def test_dense_input_rank_3(self):
data = [[[b"a", b"z"], [b"b", b""]], [[b"b", b""], [b"e", b"f"]]]
data_tensor = constant_op.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor, ngram_width=3, separator=b"|", pad_values=(b"LP", b"RP"))
result = self.evaluate(ngram_op)
expected_ngrams = [[[b"LP|LP|a", b"LP|a|z", b"a|z|RP", b"z|RP|RP"],
[b"LP|LP|b", b"LP|b|", b"b||RP", b"|RP|RP"]],
[[b"LP|LP|b", b"LP|b|", b"b||RP", b"|RP|RP"],
[b"LP|LP|e", b"LP|e|f", b"e|f|RP", b"f|RP|RP"]]]
self.assertIsInstance(ngram_op, ops.Tensor)
self.assertAllEqual(expected_ngrams, result)
def test_dense_input(self):
data = [[b"a", b"z"], [b"b", b""], [b"e", b"f"]]
data_tensor = constant_op.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor, ngram_width=3, separator=b"|", pad_values=(b"LP", b"RP"))
result = self.evaluate(ngram_op)
expected_ngrams = [
[b"LP|LP|a", b"LP|a|z", b"a|z|RP", b"z|RP|RP"],
[b"LP|LP|b", b"LP|b|", b"b||RP", b"|RP|RP"],
[b"LP|LP|e", b"LP|e|f", b"e|f|RP", b"f|RP|RP"],
]
self.assertIsInstance(ngram_op, ops.Tensor)
self.assertAllEqual(expected_ngrams, result)
def test_input_list_input(self):
data = [[b"a", b"z"], [b"b", b""], [b"e", b"f"]]
ngram_op = ragged_string_ops.ngrams(
data, ngram_width=3, separator=b"|", pad_values=(b"LP", b"RP"))
result = self.evaluate(ngram_op)
expected_ngrams = [
[b"LP|LP|a", b"LP|a|z", b"a|z|RP", b"z|RP|RP"],
[b"LP|LP|b", b"LP|b|", b"b||RP", b"|RP|RP"],
[b"LP|LP|e", b"LP|e|f", b"e|f|RP", b"f|RP|RP"],
]
self.assertAllEqual(expected_ngrams, result)
def test_vector_input(self):
data = [b"a", b"z"]
data_tensor = ragged_factory_ops.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor, ngram_width=3, separator=b"|", pad_values=(b"LP", b"RP"))
result = self.evaluate(ngram_op)
expected_ngrams = [b"LP|LP|a", b"LP|a|z", b"a|z|RP", b"z|RP|RP"]
self.assertAllEqual(expected_ngrams, result)
def test_dense_input_with_multiple_ngrams(self):
data = [[b"a", b"b", b"c", b"d"], [b"e", b"f", b"g", b"h"]]
data_tensor = ragged_factory_ops.constant(data)
ngram_op = ragged_string_ops.ngrams(
data_tensor, ngram_width=(1, 2, 3), separator=b"|")
result = self.evaluate(ngram_op)
expected_ngrams = [[
b"a", b"b", b"c", b"d", b"a|b", b"b|c", b"c|d", b"a|b|c", b"b|c|d"
], [b"e", b"f", b"g", b"h", b"e|f", b"f|g", b"g|h", b"e|f|g", b"f|g|h"]]
self.assertAllEqual(expected_ngrams, result)
def test_input_with_no_values(self):
data = ragged_factory_ops.constant([[], [], []], dtype=dtypes.string)
ngram_op = ragged_string_ops.ngrams(data, (1, 2))
result = self.evaluate(ngram_op)
self.assertAllEqual([0, 0, 0, 0], result.row_splits)
self.assertAllEqual(constant_op.constant([], dtype=dtypes.string),
result.values)
if __name__ == "__main__":
test.main()
|
[
"[email protected]"
] | |
087ad34833a0b4b6cd390d4910591bafd8977bfd
|
8636c0fba825b4b7c5ca30c202e2e09afaf05e95
|
/practicing_apps/tkinterTermoJuevesII/main.py
|
83fcef9d568ca215f8376ab16bbc5dd075c81e6c
|
[] |
no_license
|
manuetov/m02_boot_0
|
33fe5876ab0fbbcfcb08e2ccaee5144a57d71c57
|
04d3dceffda78b37a709cd08ef7a1dc1d31478b4
|
refs/heads/master
| 2021-10-12T02:45:55.161385 | 2019-01-31T20:27:14 | 2019-01-31T20:27:14 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,039 |
py
|
from tkinter import *
from tkinter import ttk
class NumberEntry(ttk.Entry)
class mainApp(Tk):
entrada = None
tipoUnidad = None
__temperaturaAnt = ""
def __init__(self):
Tk.__init__(self)
self.title("Termómetro")
self.geometry("210x150")
self.configure(bg="#ECECEC")
self.resizable(0,0)
self.temperatura = StringVar(value="")
self.temperatura.trace("w", self.validateTemperature)
self.tipoUnidad = StringVar(value="F")
self.createLayout()
def createLayout(self):
self.entrada = ttk.Entry(self, textvariable=self.temperatura).place(x=10, y=10)
self.lblUnidad = ttk.Label(self, text="Grados:").place(x=10, y=50)
self.rb1 = ttk.Radiobutton(self, text="Fahrenheit", variable=self.tipoUnidad, value="F",command=self.selected).place(x=20, y=70)
self.rb2 = ttk.Radiobutton(self, text="Celsius", variable=self.tipoUnidad, value="C", command=self.selected).place(x=20, y=95)
def start(self):
self.mainloop()
def validateTemperature(self, *args):
nuevoValor = self.temperatura.get()
print("nuevoValor", nuevoValor,"vs valorAnterior", self.__temperaturaAnt)
try:
float(nuevoValor)
self.__temperaturaAnt = nuevoValor
print("fija valor anterior a", self.__temperaturaAnt)
except:
self.temperatura.set(self.__temperaturaAnt)
print("recupera valor anterior ", self.__temperaturaAnt)
def selected(self):
resultado = 0
toUnidad = self.tipoUnidad.get()
grados = float(self.temperatura.get())
if toUnidad == 'F':
resultado = grados * 9/5 + 32
elif toUnidad == 'C':
resultado = (grados - 32) * 5/9
else:
resultado = grados
self.temperatura.set(resultado)
if __name__ == '__main__':
app = mainApp()
app.start()
|
[
"[email protected]"
] | |
9b13a527404e03d33c008e8e26f41f9477864f2a
|
b99195cf2d181dec5c31aa7e58d747f474153802
|
/Decision making and loop/Find Armstrong Number in an Interval.py
|
535db3c64285194403e3dc59dbe320ff35181beb
|
[] |
no_license
|
eldadpuzach/MyPythonProjects
|
b1b4d56a822fd781c7c4c7a9e4bb5408c180c187
|
3a961a7c265caf1369067d98e94564f01f1bde74
|
refs/heads/master
| 2020-03-20T18:07:43.319331 | 2019-02-13T22:07:10 | 2019-02-13T22:07:10 | 137,570,971 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 475 |
py
|
# check Armstrong numbers in certain interval
# To take input from the user
lower = int(input("Enter lower range: "))
upper = int(input("Enter upper range: "))
for num in range(lower, upper + 1):
# order of number
order = len(str(num))
# initialize sum
sum = 0
# find the sum of the cube of each digit
temp = num
while temp > 0:
digit = temp % 10
sum += digit ** order
temp //= 10
if num == sum:
print(num)
|
[
"[email protected]"
] | |
ef2f06d85a6c61ac5b042627f79667fcc7c3e334
|
82ffae21bc27e91643bcc344c89d2684b1105e56
|
/train.py
|
bd024a2c5a3e7d90fe3700f9cf7ac95daa909997
|
[] |
no_license
|
liuchongwei/IB-INN
|
4f4df0a0f9295fa4d9cc900170d1b7b59217dd7e
|
440b75d5fbafec75842b18c9e7b6f03a8d76d16d
|
refs/heads/master
| 2023-04-08T11:51:39.745227 | 2021-04-15T17:25:30 | 2021-04-15T17:25:30 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,366 |
py
|
from os.path import join
from time import time
import sys
import numpy as np
from tqdm import tqdm
import torch.optim
import data
from model import GenerativeClassifier
from VIB import WrapperVIB
import evaluation
def train(args):
N_epochs = eval(args['training']['n_epochs'])
beta = eval(args['training']['beta_IB'])
train_nll = bool(not eval(args['ablations']['no_NLL_term']))
train_class_nll = eval(args['ablations']['class_NLL'])
label_smoothing = eval(args['data']['label_smoothing'])
grad_clip = eval(args['training']['clip_grad_norm'])
train_vib = eval(args['ablations']['vib'])
interval_log = eval(args['checkpoints']['interval_log'])
interval_checkpoint = eval(args['checkpoints']['interval_checkpoint'])
interval_figure = eval(args['checkpoints']['interval_figure'])
save_on_crash = eval(args['checkpoints']['checkpoint_when_crash'])
output_dir = args['checkpoints']['output_dir']
resume = args['checkpoints']['resume_checkpoint']
ensemble_index = eval(args['checkpoints']['ensemble_index'])
if ensemble_index is None:
ensemble_str = ''
else:
ensemble_str = '.%.2i' % (ensemble_index)
logfile = open(join(output_dir, f'losses{ensemble_str}.dat'), 'w')
live_loss = eval(args['checkpoints']['live_updates'])
if train_vib:
inn = WrapperVIB(args)
else:
inn = GenerativeClassifier(args)
inn.cuda()
dataset = data.Dataset(args)
def log_write(line, endline='\n'):
print(line, flush=True)
logfile.write(line)
logfile.write(endline)
plot_columns = ['time', 'epoch', 'iteration',
'L_x_tr',
'L_x_val',
'L_y_tr',
'L_y_val',
'acc_tr',
'acc_val',
'delta_mu_val']
train_loss_names = [l for l in plot_columns if l[-3:] == '_tr']
val_loss_names = [l for l in plot_columns if l[-4:] == '_val']
header_fmt = '{:>15}' * len(plot_columns)
output_fmt = '{:15.1f} {:04d}/{:04d} {:04d}/{:04d}' + '{:15.5f}' * (len(plot_columns) - 3)
output_fmt_live = '{:15.1f} {:04d}/{:04d} {:04d}/{:04d}'
for l_name in plot_columns[3:]:
if l_name in train_loss_names:
output_fmt_live += '{:15.5f}'
else:
output_fmt_live += '{:>15}'.format('')
if eval(args['training']['exponential_scheduler']):
print('Using exponential scheduler')
sched = torch.optim.lr_scheduler.StepLR(inn.optimizer, gamma=0.002 ** (1/N_epochs), step_size=1)
else:
print('Using milestone scheduler')
sched = torch.optim.lr_scheduler.MultiStepLR(inn.optimizer, gamma=0.1,
milestones=eval(args['training']['scheduler_milestones']))
log_write(header_fmt.format(*plot_columns))
if resume:
inn.load(resume)
t_start = time()
if train_nll:
beta_x = 2. / (1 + beta)
beta_y = 2. * beta / (1 + beta)
else:
beta_x, beta_y = 0., 1.
try:
for i_epoch in range(N_epochs):
running_avg = {l: [] for l in train_loss_names}
for i_batch, (x,l) in enumerate(dataset.train_loader):
x, y = x.cuda(), dataset.onehot(l.cuda(), label_smoothing)
losses = inn(x, y)
if train_class_nll:
loss = 2. * losses['L_cNLL_tr']
else:
loss = beta_x * losses['L_x_tr'] - beta_y * losses['L_y_tr']
loss.backward()
torch.nn.utils.clip_grad_norm_(inn.trainable_params, grad_clip)
inn.optimizer.step()
inn.optimizer.zero_grad()
if live_loss:
print(output_fmt_live.format(*([(time() - t_start) / 60.,
i_epoch, N_epochs,
i_batch, len(dataset.train_loader)]
+ [losses[l].item() for l in train_loss_names])),
flush=True, end='\r')
for l_name in train_loss_names:
running_avg[l_name].append(losses[l_name].item())
if not i_batch % interval_log:
for l_name in train_loss_names:
running_avg[l_name] = np.mean(running_avg[l_name])
val_losses = inn.validate(dataset.val_x, dataset.val_y)
for l_name in val_loss_names:
running_avg[l_name] = val_losses[l_name].item()
losses_display = [(time() - t_start) / 60.,
i_epoch, N_epochs,
i_batch, len(dataset.train_loader)]
losses_display += [running_avg[l] for l in plot_columns[3:]]
#TODO visdom?
log_write(output_fmt.format(*losses_display))
running_avg = {l: [] for l in train_loss_names}
sched.step()
if i_epoch > 2 and (val_losses['L_x_val'].item() > 1e5 or not np.isfinite(val_losses['L_x_val'].item())):
if high_loss:
raise RuntimeError("loss is astronomical")
else:
high_loss = True
else:
high_loss = False
if i_epoch > 0 and (i_epoch % interval_checkpoint) == 0:
inn.save(join(output_dir, f'model_{i_epoch}{ensemble_str}.pt'))
if (i_epoch % interval_figure) == 0 and not inn.feed_forward and not train_vib:
evaluation.val_plots(join(output_dir, f'figs_{i_epoch}{ensemble_str}.pdf'), inn, dataset)
except:
if save_on_crash:
inn.save(join(output_dir, f'model_ABORT{ensemble_str}.pt'))
raise
finally:
logfile.close()
try:
for k in list(inn.inn._buffers.keys()):
if 'tmp_var' in k:
del inn.inn._buffers[k]
except AttributeError:
# Feed-forward nets dont have the wierd FrEIA problems, skip
pass
inn.save(join(output_dir, f'model{ensemble_str}.pt'))
|
[
"[email protected]"
] | |
db1c8172d34f9db120085047d2d4df1f2e1ce3c5
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/backup/user_152/ch40_2020_09_28_14_38_11_448144.py
|
869700df39676380d5c581c9967c8ac41f46af87
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 144 |
py
|
def soma_valores(lista):
soma = 0
i = 0
size = len(lista)
while(i<size):
soma += lista[i]
i += 1
return soma
|
[
"[email protected]"
] | |
f36445558f08904ce3cbc28219071f55910a3326
|
f07a42f652f46106dee4749277d41c302e2b7406
|
/Data Set/bug-fixing-5/04c0870f3abb72e4d68e5e712e5eb4445eff4161-<_get_lzma_file>-fix.py
|
406e9e4f070822a7fc5cd2a117bdddaaa5b300af
|
[] |
no_license
|
wsgan001/PyFPattern
|
e0fe06341cc5d51b3ad0fe29b84098d140ed54d1
|
cc347e32745f99c0cd95e79a18ddacc4574d7faa
|
refs/heads/main
| 2023-08-25T23:48:26.112133 | 2021-10-23T14:11:22 | 2021-10-23T14:11:22 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 504 |
py
|
def _get_lzma_file(lzma):
"\n Importing the `LZMAFile` class from the `lzma` module.\n\n Returns\n -------\n class\n The `LZMAFile` class from the `lzma` module.\n\n Raises\n ------\n RuntimeError\n If the `lzma` module was not imported correctly, or didn't exist.\n "
if (lzma is None):
raise RuntimeError('lzma module not available. A Python re-install with the proper dependencies, might be required to solve this issue.')
return lzma.LZMAFile
|
[
"[email protected]"
] | |
f99a2d4d664426b40ad53acceb227493989c5cfa
|
d5905fa195a09883da60c584fd547e7cc5adab6a
|
/circhic/tests/test_utils.py
|
34c41f3fab1f561a54ee31f51e964051f2cea0fb
|
[
"BSD-3-Clause"
] |
permissive
|
ijunier/circhic_dev
|
1370182f5a92d32d384f5d99a0556836d88cccb8
|
d312cebd555058859daa7bf0bb5aabcc18ecd748
|
refs/heads/master
| 2023-01-07T20:43:22.449704 | 2020-07-20T13:20:41 | 2020-07-20T13:20:41 | 275,118,657 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 571 |
py
|
from circhic import utils
import numpy as np
import pytest
def test_convert_xy_to_thetar():
lengths = np.array([42])
random_state = np.random.RandomState(seed=42)
n = 100
x = random_state.randint(0, lengths.sum(), n)
y = random_state.randint(0, lengths.sum(), n)
# Flush test to check the code runs
theta, r = utils.convert_xy_to_thetar((x, y), lengths)
with pytest.raises(ValueError):
utils.convert_xy_to_thetar((x, y[:-1]), lengths)
with pytest.raises(ValueError):
utils.convert_xy_to_thetar((x+42, y), lengths)
|
[
"[email protected]"
] | |
b17418f3c281dddaa920214c07c65cae375c55ba
|
1587aad7b5df3159015a06ec70137ca9cbdb74d3
|
/Fundamentals/Basics/comparearrays.py
|
af84baa6c18f4cfe6a49aef6c53112e5dedb44a5
|
[] |
no_license
|
brysonmckenzie/Python-Flask-Django
|
21d7302401859dc7a3ade677238f739f32e87aaa
|
38460ff4f321906b57c9c0167278e774e791a327
|
refs/heads/master
| 2021-01-18T16:37:19.737987 | 2017-08-16T08:16:35 | 2017-08-16T08:16:35 | 100,465,043 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 292 |
py
|
def CompareArrays(arr1,arr2):
if len(arr1) != len (arr2):
return False
elif len(arr1) == len (arr2):
for i in arr1:
if arr1 != arr2:
print arr1,arr2
return False
else:
return True
|
[
"[email protected]"
] | |
75a8c49083279d509530d4fc722e78913c10ba38
|
8c2791898f9bd5640353a7ba8088b6263da0ffa3
|
/client/commands/__init__.py
|
8035b273e180ba4438600a4f4d31eb92dd6db582
|
[
"MIT"
] |
permissive
|
dalejung/pyre-check
|
570e123da8097cbceb331638ef915da410d05a85
|
1bcd0a68ff5dc6e4ab36992088de5a6cb5a82531
|
refs/heads/master
| 2020-03-25T22:43:20.049890 | 2018-08-10T04:46:04 | 2018-08-10T04:48:54 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 854 |
py
|
# Copyright (c) 2016-present, Facebook, Inc.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from .analyze import Analyze as Analyze # noqa
from .check import Check as Check # noqa
from .error_handling import ErrorHandling as ErrorHandling # noqa
from .incremental import Incremental as Incremental # noqa
from .initialize import Initialize as Initialize # noqa
from .kill import Kill as Kill # noqa
from .persistent import Persistent as Persistent # noqa
from .query import Query as Query # noqa
from .rage import Rage as Rage # noqa
from .restart import Restart as Restart # noqa
from .start import Start # noqa
from .stop import Stop as Stop # noqa
from .command import ( # noqa; noqa; noqa
ClientException as ClientException,
Command as Command,
)
|
[
"[email protected]"
] | |
5675bc2d9d63337fe3c0cd7ce20077b8d0f5b066
|
553965e00fe5d7ba217b399d1b36f603743c9724
|
/chromeos/components/file_manager/resources/gen_main_html.py
|
624c8f318a61226d9874bfdf729c6e84e40f4cb0
|
[
"BSD-3-Clause"
] |
permissive
|
zoracon/chromium
|
3892e77996c03807c7fb96f535d84745e6540a8c
|
eaa78fd27b11621c877c62e022f1c0c4f47c5c12
|
refs/heads/master
| 2023-01-02T17:18:46.663596 | 2020-11-18T18:56:44 | 2020-11-18T18:56:44 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,214 |
py
|
#!/usr/bin/env python
#
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generate SWA files app main.html from files app main.html"""
import fileinput
import optparse
import os
import shutil
import sys
_SWA = '<script type="module" src="chrome://file-manager/main.js"></script>'
def GenerateSwaMainHtml(source, target, root):
"""Copy source file to target, do SWA edits, then add BUILD time stamp."""
# Copy source (main.html) file to the target (main.html) file.
shutil.copyfile(source, target)
# Edit the target file.
for line in fileinput.input(target, inplace=True):
# Add _SWA <script> tag after the <head> tag.
if line.find('<head>') >= 0:
print line + ' ' + _SWA
# Add <meta> charset="utf-8" attribute.
elif line.find('<meta ') >= 0:
sys.stdout.write(line.replace('<meta ', '<meta charset="utf-8" '))
# Root rewrite files app <link> stylesheet href attribute.
elif line.find('<link rel="stylesheet"') >= 0:
if not 'href="chrome://' in line:
href = 'href="' + root + 'ui/file_manager/file_manager/'
sys.stdout.write(line.replace('href="', href))
else:
sys.stdout.write(line)
# Remove files app foreground/js <script> tags: SWA app must lazy-load
# them after the SWA app has initialized needed resources.
elif line.find('<script src="foreground/js/') == -1:
sys.stdout.write(line)
# Create a BUILD time stamp for the target file.
open(target + '.stamp', 'a').close()
def main(args):
parser = optparse.OptionParser()
parser.add_option('--source', help='Files app main.html source file.')
parser.add_option('--target', help='Target SWA main.html for output.')
parser.add_option('--root', help='Source root: chrome/src path.')
options, _ = parser.parse_args(args)
if options.source and options.target and options.root:
target = os.path.join(os.getcwd(), options.target)
GenerateSwaMainHtml(options.source, target, options.root)
return
raise SyntaxError('Usage: all arguments are required.')
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
[
"[email protected]"
] | |
4eae384c04fc6ece4f04f2b9d2b08fa2e38fe0ba
|
9716a77ef1d0ba5ef9a61be04f6229494744d5d5
|
/chapter06 정렬/수 정렬하기3.py
|
8e0a8c303ae70ce83339bf67079a86549562e250
|
[] |
no_license
|
korea-space-codingmonster/Algorithm_Study
|
98b00c81839cf8ac8365d3982c25650a21226ce9
|
8c92857e458994a2d1d77dc3ea0d4b645b8b6a4b
|
refs/heads/main
| 2023-06-03T20:00:52.915447 | 2021-06-20T05:51:47 | 2021-06-20T05:51:47 | 329,354,196 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 573 |
py
|
# 문제
# N개의 수가 주어졌을 때, 이를 오름차순으로 정렬하는 프로그램을 작성하시오.
# 입력
# 첫째 줄에 수의 개수 N(1 ≤ N ≤ 10,000,000)이 주어진다. 둘째 줄부터 N개의 줄에는 숫자가 주어진다. 이 수는 10,000보다 작거나 같은 자연수이다.
# 출력
# 첫째 줄부터 N개의 줄에 오름차순으로 정렬한 결과를 한 줄에 하나씩 출력한다.
# 예제 입력 1 복사
# 10
# 5
# 2
# 3
# 1
# 4
# 2
# 3
# 5
# 1
# 7
# 예제 출력 1 복사
# 1
# 1
# 2
# 2
# 3
# 3
# 4
# 5
# 5
# 7
|
[
"[email protected]"
] | |
7aa31c9b0250a3fa6b8760cf05b19f6f9ab425be
|
d7016f69993570a1c55974582cda899ff70907ec
|
/sdk/eventhub/azure-mgmt-eventhub/azure/mgmt/eventhub/v2022_01_01_preview/aio/operations/_private_endpoint_connections_operations.py
|
bea1e46c5c841f5c1c1884b44e2698c5f7252586
|
[
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] |
permissive
|
kurtzeborn/azure-sdk-for-python
|
51ca636ad26ca51bc0c9e6865332781787e6f882
|
b23e71b289c71f179b9cf9b8c75b1922833a542a
|
refs/heads/main
| 2023-03-21T14:19:50.299852 | 2023-02-15T13:30:47 | 2023-02-15T13:30:47 | 157,927,277 | 0 | 0 |
MIT
| 2022-07-19T08:05:23 | 2018-11-16T22:15:30 |
Python
|
UTF-8
|
Python
| false | false | 24,471 |
py
|
# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._private_endpoint_connections_operations import (
build_create_or_update_request,
build_delete_request,
build_get_request,
build_list_request,
)
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class PrivateEndpointConnectionsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.eventhub.v2022_01_01_preview.aio.EventHubManagementClient`'s
:attr:`private_endpoint_connections` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list(
self, resource_group_name: str, namespace_name: str, **kwargs: Any
) -> AsyncIterable["_models.PrivateEndpointConnection"]:
"""Gets the available PrivateEndpointConnections within a namespace.
.. seealso::
- https://msdn.microsoft.com/en-us/library/azure/mt639412.aspx
:param resource_group_name: Name of the resource group within the azure subscription. Required.
:type resource_group_name: str
:param namespace_name: The Namespace name. Required.
:type namespace_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PrivateEndpointConnection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.eventhub.v2022_01_01_preview.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-01-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2022-01-01-preview")
)
cls: ClsType[_models.PrivateEndpointConnectionListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
resource_group_name=resource_group_name,
namespace_name=namespace_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("PrivateEndpointConnectionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/privateEndpointConnections"
}
@overload
async def create_or_update(
self,
resource_group_name: str,
namespace_name: str,
private_endpoint_connection_name: str,
parameters: _models.PrivateEndpointConnection,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.PrivateEndpointConnection:
"""Creates or updates PrivateEndpointConnections of service namespace.
.. seealso::
- https://msdn.microsoft.com/en-us/library/azure/mt639408.aspx
:param resource_group_name: Name of the resource group within the azure subscription. Required.
:type resource_group_name: str
:param namespace_name: The Namespace name. Required.
:type namespace_name: str
:param private_endpoint_connection_name: The PrivateEndpointConnection name. Required.
:type private_endpoint_connection_name: str
:param parameters: Parameters supplied to update Status of PrivateEndPoint Connection to
namespace resource. Required.
:type parameters: ~azure.mgmt.eventhub.v2022_01_01_preview.models.PrivateEndpointConnection
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateEndpointConnection or the result of cls(response)
:rtype: ~azure.mgmt.eventhub.v2022_01_01_preview.models.PrivateEndpointConnection
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def create_or_update(
self,
resource_group_name: str,
namespace_name: str,
private_endpoint_connection_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.PrivateEndpointConnection:
"""Creates or updates PrivateEndpointConnections of service namespace.
.. seealso::
- https://msdn.microsoft.com/en-us/library/azure/mt639408.aspx
:param resource_group_name: Name of the resource group within the azure subscription. Required.
:type resource_group_name: str
:param namespace_name: The Namespace name. Required.
:type namespace_name: str
:param private_endpoint_connection_name: The PrivateEndpointConnection name. Required.
:type private_endpoint_connection_name: str
:param parameters: Parameters supplied to update Status of PrivateEndPoint Connection to
namespace resource. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateEndpointConnection or the result of cls(response)
:rtype: ~azure.mgmt.eventhub.v2022_01_01_preview.models.PrivateEndpointConnection
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def create_or_update(
self,
resource_group_name: str,
namespace_name: str,
private_endpoint_connection_name: str,
parameters: Union[_models.PrivateEndpointConnection, IO],
**kwargs: Any
) -> _models.PrivateEndpointConnection:
"""Creates or updates PrivateEndpointConnections of service namespace.
.. seealso::
- https://msdn.microsoft.com/en-us/library/azure/mt639408.aspx
:param resource_group_name: Name of the resource group within the azure subscription. Required.
:type resource_group_name: str
:param namespace_name: The Namespace name. Required.
:type namespace_name: str
:param private_endpoint_connection_name: The PrivateEndpointConnection name. Required.
:type private_endpoint_connection_name: str
:param parameters: Parameters supplied to update Status of PrivateEndPoint Connection to
namespace resource. Is either a PrivateEndpointConnection type or a IO type. Required.
:type parameters: ~azure.mgmt.eventhub.v2022_01_01_preview.models.PrivateEndpointConnection or
IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateEndpointConnection or the result of cls(response)
:rtype: ~azure.mgmt.eventhub.v2022_01_01_preview.models.PrivateEndpointConnection
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-01-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2022-01-01-preview")
)
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "PrivateEndpointConnection")
request = build_create_or_update_request(
resource_group_name=resource_group_name,
namespace_name=namespace_name,
private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
create_or_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/privateEndpointConnections/{privateEndpointConnectionName}"
}
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, namespace_name: str, private_endpoint_connection_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-01-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2022-01-01-preview")
)
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_delete_request(
resource_group_name=resource_group_name,
namespace_name=namespace_name,
private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/privateEndpointConnections/{privateEndpointConnectionName}"
}
@distributed_trace_async
async def begin_delete(
self, resource_group_name: str, namespace_name: str, private_endpoint_connection_name: str, **kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes a Private Endpoint Connection.
.. seealso::
- https://msdn.microsoft.com/en-us/library/azure/mt639389.aspx
:param resource_group_name: Name of the resource group within the azure subscription. Required.
:type resource_group_name: str
:param namespace_name: The Namespace name. Required.
:type namespace_name: str
:param private_endpoint_connection_name: The PrivateEndpointConnection name. Required.
:type private_endpoint_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-01-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2022-01-01-preview")
)
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = await self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
namespace_name=namespace_name,
private_endpoint_connection_name=private_endpoint_connection_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_delete.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/privateEndpointConnections/{privateEndpointConnectionName}"
}
@distributed_trace_async
async def get(
self, resource_group_name: str, namespace_name: str, private_endpoint_connection_name: str, **kwargs: Any
) -> _models.PrivateEndpointConnection:
"""Gets a description for the specified Private Endpoint Connection name.
.. seealso::
- https://msdn.microsoft.com/en-us/library/azure/mt639379.aspx
:param resource_group_name: Name of the resource group within the azure subscription. Required.
:type resource_group_name: str
:param namespace_name: The Namespace name. Required.
:type namespace_name: str
:param private_endpoint_connection_name: The PrivateEndpointConnection name. Required.
:type private_endpoint_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateEndpointConnection or the result of cls(response)
:rtype: ~azure.mgmt.eventhub.v2022_01_01_preview.models.PrivateEndpointConnection
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2022-01-01-preview"] = kwargs.pop(
"api_version", _params.pop("api-version", "2022-01-01-preview")
)
cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
request = build_get_request(
resource_group_name=resource_group_name,
namespace_name=namespace_name,
private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.EventHub/namespaces/{namespaceName}/privateEndpointConnections/{privateEndpointConnectionName}"
}
|
[
"[email protected]"
] | |
96c4cfe9417124ff23fbdd2c85331526eb41d995
|
5373c570897992986b18c1f76b6a2e1482b9599d
|
/Exercícios/ex002.py
|
57ba81c90b3ead48d9b2d3ae1cc2f883f40089b2
|
[
"MIT"
] |
permissive
|
CarolineSantosAlves/Exercicios-Python
|
37ebe049919608ad6cf7f2558734f2f9b16082a1
|
3bbdc1fd996948142951e043175efd9d8a6c34f2
|
refs/heads/master
| 2022-09-08T20:29:26.375417 | 2020-05-30T23:44:04 | 2020-05-30T23:44:04 | 268,178,149 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 70 |
py
|
nome = input('Digite seu nome: ')
print('Bem vindo {}!'.format(nome))
|
[
"[email protected]"
] | |
6a712c9174d946d711b862d455e296f516466cbb
|
e7f67295e62fc5301ab23bce06c61f2311c2eeee
|
/mjml/helpers/py_utils.py
|
945f8f9a0e51353211751f6935e021af74d3e325
|
[
"MIT"
] |
permissive
|
bayesimpact/mjml-stub
|
94d10588359990cd58d2085429b19a3777c51f15
|
30bab3f2e197d2f940f58439f2e8cd9fadb58d48
|
refs/heads/main
| 2023-05-08T11:54:19.313877 | 2021-01-25T21:30:48 | 2021-01-25T21:30:48 | 344,026,118 | 0 | 0 |
MIT
| 2021-03-03T06:31:49 | 2021-03-03T06:31:48 | null |
UTF-8
|
Python
| false | false | 1,037 |
py
|
import re
__all__ = [
'is_nil',
'is_empty',
'is_not_empty',
'is_not_nil',
'omit',
'parse_float',
'parse_int',
'strip_unit',
]
def omit(attributes, keys):
if isinstance(keys, str):
keys = (keys, )
_attrs = dict(attributes)
for key in keys:
if key in _attrs:
_attrs.pop(key)
return _attrs
def parse_float(value_str):
match = re.search(r'^([-+]?\d+(.\d+)?)*', value_str)
return float(match.group(1))
def parse_int(value_str):
if isinstance(value_str, int):
return value_str
match = re.search(r'^([-+]?\d+)*', value_str)
return int(match.group(1))
def strip_unit(value_str):
match = re.search(r'^(\d+).*', value_str)
return int(match.group(1))
def is_nil(v):
return (v is None)
def is_not_nil(v):
return not is_nil(v)
def is_empty(v):
if v is None:
return True
elif hasattr(v, 'strip'):
return not bool(v.strip())
return not bool(v)
def is_not_empty(v):
return not is_empty(v)
|
[
"[email protected]"
] | |
b5897eec2be4340eb61e541ba9839cdc53a882d5
|
1ebe5a07e7f6260c2c2ceb6ca00dcf2a0341e544
|
/op_impl/built-in/ai_core/tbe/impl/yolo_v3_correct_region_box_v2.py
|
1a325ec9c0766aa9ab0d67f45dd302d7f7dc0876
|
[] |
no_license
|
gekowa/ascend-opp
|
f5e09905336d85f9974d555d03d37a75cb8185c1
|
5c28a2faf9d2a117ea6f0923efe35fcd53904dd2
|
refs/heads/master
| 2023-04-09T12:14:40.337104 | 2021-04-19T23:00:59 | 2021-04-19T23:00:59 | 359,620,865 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 55,592 |
py
|
# Copyright 2019 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""
yolo_v3_correct_region_box_v2
"""
# pylint: disable=too-many-lines,import-error,no-self-use
import math
from te import tik
from te import platform as tbe_platform
from impl.constant_util import BLOCK_SIZE
from impl.constant_util import DATA_SIZE_TWO
from impl.constant_util import DATA_SIZE_FOUR
from impl.constant_util import VECTOR_BYTE_SIZE
from impl.constant_util import STRIDE_ONE
from impl.constant_util import DATA_TYPE_FP16
# reserve size for ub
RESERVE_SIZE = 16 * 1024
# repeat one
REPEAT_ONE = 1
# one nburst
NBURST_ONE = 1
# value one
VALUE_ONE = 1
# stride eight for dma
STRIDE_EIGHT = 8
# stride zero for dma
GAP_ZERO = 0
# sid for dma
SID = 0
# value zero
VALUE_ZERO = 0
# value two
VALUE_TWO = 2
# value three
VALUE_THREE = 3
# neg two
NEG_TWO = -2
# neg one
NEG_ONE = -1
# value half
VALUE_HALF = 0.5
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-public-methods
class CorrectBoxComputer():
"""
Function: store CorrectBoxComputer parameters
Modify : 2019-11-06
"""
def __init__(self, input_dict):
"""
init the GetCorrectBox parameters
Parameters
----------
input_dict: input_dict is a dict, the keys as follow:
box_info(A list),
biases(A list)
coords,boxes,classes,relative,obj_threshold,post_top_k,
post_top_k,nms_threshold,pre_nms_topn,
max_box_number_per_batch,kernel_name, for more details,
please check the yolov3_detection_output function
Returns
-------
None
"""
self.instance = tik.Tik(tik.Dprofile())
self.one_max_size = (
tbe_platform.cce_conf.get_soc_spec(tbe_platform.cce_conf.UB_SIZE) -
RESERVE_SIZE) // 8
self.batch = input_dict.get("box_info")[0].get("shape")[0]
self.dtype = input_dict.get("box_info")[0].get("dtype")
self.classes = input_dict.get("classes")
self.boxes = input_dict.get("boxes")
self.relative = input_dict.get("relative")
self.coords = input_dict.get('coords')
self.obj_threshold = input_dict.get("obj_threshold")
self.classes_threshold = input_dict.get("classes_threshold")
self.post_top_k = input_dict.get("post_top_k")
self.nms_threshold = input_dict.get("nms_threshold")
self.pre_nms_topn = input_dict.get("pre_nms_topn")
self.max_box_number_per_batch = input_dict.get(
"max_box_number_per_batch")
self.kernel_name = input_dict.get("kernel_name")
self.biases = input_dict.get("biases")
self.resize_origin_img_to_net = input_dict.get("resize_origin_img_to_net")
self.input_dict = input_dict
self.dsize = DATA_SIZE_FOUR
if self.dtype == DATA_TYPE_FP16:
self.dsize = DATA_SIZE_TWO
self.len_32b = BLOCK_SIZE // self.dsize
self.mask = VECTOR_BYTE_SIZE // self.dsize
self.height = []
self.width = []
self.yolo_num = len(input_dict.get("box_info"))
for box_info in input_dict.get("box_info"):
self.height.append(box_info.get("shape")[2])
self.width.append(box_info.get("shape")[3])
self.img_info = self.instance.Tensor(self.dtype,
(self.batch * 4 + BLOCK_SIZE,),
scope=tik.scope_gm,
name="img_info")
self.coord_data = []
self.windex = []
self.hindex = []
self.obj_datas = []
self.classes_data = []
self.totalwh = 0
self.prepare_data()
# Intermediate Output
self.inter_coords = self.instance.Tensor(
self.dtype, self.get_shape(
(self.batch, 4, self.boxes * self.totalwh), True),
scope=tik.scope_gm, name="inter_coords", is_workspace=True)
self.inter_classes = self.instance.Tensor(
self.dtype, self.get_shape(
(self.batch, self.classes, self.boxes * self.totalwh), True),
scope=tik.scope_gm, name="inter_classes", is_workspace=True)
self.hwtail_len = self.height[-1] * self.width[-1] % (self.len_32b)
self.block_num, self.outer_loop, self.outer_tail = self.get_block_param()
def prepare_data(self):
"""
prepare tensors for op
:return: None
"""
for i in range(self.yolo_num):
self.totalwh += self.height[i] * self.width[i]
adj_hw = self.get_adj_hw(self.height[i], self.width[i])
self.coord_data.append(self.instance.Tensor(
self.dtype, (self.batch, self.boxes * 4, adj_hw),
scope=tik.scope_gm, name="coord_data" + str(i + 1)))
self.windex.append(self.instance.Tensor(
self.dtype, (adj_hw,), scope=tik.scope_gm,
name="windex" + str(i + 1)))
self.hindex.append(self.instance.Tensor(
self.dtype, (adj_hw,), scope=tik.scope_gm,
name="hindex" + str(i + 1)))
adj_hw = self.get_adj_hw(self.boxes * self.height[i], self.width[i])
self.obj_datas.append(self.instance.Tensor(
self.dtype, (self.batch, adj_hw), scope=tik.scope_gm,
name="obj_data" + str(i + 1)))
self.classes_data.append(self.instance.Tensor(
self.dtype, (self.batch, self.classes, adj_hw),
scope=tik.scope_gm, name="classes_data" + str(i + 1)))
def get_block_param(self):
"""
compute block parameters
Parameters
----------
None
Returns
-------
None
"""
block_num = tbe_platform.cce_conf.get_soc_spec(tbe_platform.cce_conf.CORE_NUM)
if block_num > self.batch:
outer_loop = 1
block_num = self.batch
outer_tail = 0
else:
outer_loop = self.batch // block_num
outer_tail = self.batch - block_num * outer_loop
return block_num, outer_loop, outer_tail
def get_adj_hw(self, height, width):
"""
compute height and weight with 32 alignment
Parameters
----------
height: box height
width: box width
Returns
-------
height * width
"""
return math.ceil((height * width + 16) / 16) * 16
def set_pre_nms_topn(self, pre_nms_topn):
"""
set pre_nms_topn
Parameters
----------
pre_nms_topn: for each category,take the number of pre nms topn
before processing, and the maximum is 1024
Returns
-------
None
"""
self.pre_nms_topn = pre_nms_topn
def get_shape(self, old_shape, need_low_dim=False):
"""
compute shape
Parameters
----------
old_shape: shape before compute
need_low_dim: whether need low dim,true or false
Returns
-------
None
"""
old_shape = list(old_shape)
length = len(old_shape)
if length == 1:
old_shape[0] += BLOCK_SIZE
return tuple(old_shape)
if not need_low_dim:
size = self.dsize
for i in range(0, length):
size *= old_shape[i]
unit_rev = self.dsize
for i in range(1, length):
unit_rev *= old_shape[i]
else:
size = self.dsize * old_shape[length - 1]
unit_rev = size
if size % BLOCK_SIZE == 0:
rev = 0
else:
rev = BLOCK_SIZE // unit_rev + 1
old_shape[0] += rev
return tuple(old_shape)
def set_dsize(self, dsize):
"""
set dsize
Parameters
----------
dsize: dsize
Returns
-------
None
"""
self.dsize = dsize
def get_dtype(self):
"""
get dtype
Parameters
----------
None
Returns
-------
dtype:data type
"""
return self.dtype
def t_small_mov_to_gm(self, batch, param):
"""
data move of small shape
Parameters
----------
batch: the number of the picture
param: param is a dict, the keys as follow:
tail_idx: a scalar,store tail_idx
box_id: a scalar,store box_id
burlen: a scalar,store burlen
last_32b: a tensor,store last_32b data
co_id: a scalar,store co_id
out_offset: a scalar,store out_offset
Returns
-------
None
"""
tail_idx = self.instance.Scalar(name="tail_idx", init_value=0)
if (param['h'] * param['w'] * self.dsize) % BLOCK_SIZE != 0:
with self.instance.if_scope(param['box_id'] == self.boxes - 1):
tail_idx.set_as(
(param['burlen'] - 2) * (BLOCK_SIZE // self.dsize)
+ (param['h'] * param['w']) % (BLOCK_SIZE // self.dsize))
param['burlen'].set_as(param['burlen'] - 1)
with self.instance.for_range(0,
BLOCK_SIZE // self.dsize) as loop:
tmp_scalar = self.instance.Scalar(self.dtype)
tmp_scalar.set_as(param['ub_b'][tail_idx + loop])
param['last_32b'][loop].set_as(tmp_scalar)
# move y to gm
self.instance.data_move(
self.inter_coords[
batch, param['co_id'], param['out_offset'] + param['w'] *
param['h'] * param['box_id']],
param['ub_b'], SID, NBURST_ONE, param['burlen'], GAP_ZERO,
GAP_ZERO)
if (param['h'] * param['w'] * self.dsize) % BLOCK_SIZE != 0:
with self.instance.if_scope(param['box_id'] == self.boxes - 1):
self.instance.data_move(
self.inter_coords[batch, param['co_id'],
param['out_offset'] +
param['w'] * param['h'] * param['box_id'] +
tail_idx],
param['last_32b'], SID, NBURST_ONE, VALUE_ONE, GAP_ZERO,
GAP_ZERO)
def get_faces_params(self, adj_hw, c_num):
"""
data move of small shape
Parameters
----------
adj_hw: the lenth of boxes
c_num: the length of boxes's c dim
Returns
-------
faces_in_one_loop, last_loop, loop
"""
if self.one_max_size // (adj_hw * self.dsize) > c_num:
loop = 1
last_loop = c_num
faces_in_one_loop = 0
else:
faces_in_one_loop = self.one_max_size // (adj_hw * self.dsize)
loop = c_num // faces_in_one_loop
faces_tail = c_num - faces_in_one_loop * loop
loop = loop if faces_tail == 0 else loop + 1
last_loop = faces_in_one_loop if faces_tail == 0 else faces_tail
return faces_in_one_loop, last_loop, loop
def newton_div(self, dst, divisor, dividend, repeat):
"""
use newton_div to improve performance
Parameters
----------
dst: vdiv's dest tensor
divisor: vdiv's src0 tensor
dividend: vdiv's src1 tensor
repeat: vdiv's needs repeat times
Returns
-------
None
"""
if tbe_platform.cce_conf.api_check_support("tik.vdiv", "float32"):
self.instance.vdiv(self.mask, dst, divisor, dividend, repeat,
STRIDE_ONE, STRIDE_ONE, STRIDE_ONE,
STRIDE_EIGHT, STRIDE_EIGHT, STRIDE_EIGHT)
else:
with self.instance.new_stmt_scope():
tmp_tensor = self.instance.Tensor(self.dtype, dividend.shape,
scope=tik.scope_ubuf,
name="tmp_tensor")
# 1/dividend
self.instance.vec_rec(self.mask, tmp_tensor, dividend, repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_mul(self.mask, dividend, dividend, tmp_tensor,
repeat,
STRIDE_EIGHT, STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_adds(self.mask, dividend, dividend, NEG_TWO,
repeat, STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_mul(self.mask, dividend, dividend, tmp_tensor,
repeat,
STRIDE_EIGHT, STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_muls(self.mask, dividend, dividend, NEG_ONE,
repeat, STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_mul(self.mask, dst, divisor, dividend, repeat,
STRIDE_EIGHT, STRIDE_EIGHT, STRIDE_EIGHT)
def get_burlen(self, length):
"""
compute data move nburst
Parameters
----------
length: the number of elements need data move
Returns
-------
burlen: data move nburst
"""
if (length * self.dsize) % BLOCK_SIZE == 0:
return (length * self.dsize) // BLOCK_SIZE
return (length * self.dsize) // BLOCK_SIZE + 1
def get_repeat(self, length):
"""
compute vector instructs repeat times
Parameters
----------
length: the number of elements need data move
Returns
-------
repeats: vector instructs repeat times
"""
if (length * self.dsize) % VECTOR_BYTE_SIZE == 0:
return (length * self.dsize) // VECTOR_BYTE_SIZE
return (length * self.dsize) // VECTOR_BYTE_SIZE + 1
def get_x_y_params(self, img_info):
"""
compute x,y parameters
Parameters
----------
img_info: a tensor,store image's width and height
Returns
-------
x_vmuls_val: a scalar,store x_vmuls_val
y_vmuls_val: a scalar,store y_vmuls_val
x_vadds_val: a scalar,store x_vadds_val
y_vadds_val: a scalar,store y_vadds_val
"""
x_vmuls_val = self.instance.Scalar(self.dtype)
y_vmuls_val = self.instance.Scalar(self.dtype)
x_vadds_val = self.instance.Scalar(self.dtype)
y_vadds_val = self.instance.Scalar(self.dtype)
param = {}
with self.instance.new_stmt_scope():
param['ub_d'] = self.instance.Tensor(self.dtype,
(VECTOR_BYTE_SIZE,),
scope=tik.scope_ubuf,
name="ub_d")
param['ub_e'] = self.instance.Tensor(self.dtype,
(VECTOR_BYTE_SIZE,),
scope=tik.scope_ubuf,
name="ub_e")
param['ub_f'] = self.instance.Tensor(self.dtype,
(VECTOR_BYTE_SIZE,),
scope=tik.scope_ubuf,
name="ub_f")
param['ub_g'] = self.instance.Tensor(self.dtype,
(VECTOR_BYTE_SIZE,),
scope=tik.scope_ubuf,
name="ub_g")
param['lgt_tensor'] = self.instance.Tensor(self.dtype,
(VECTOR_BYTE_SIZE,),
scope=tik.scope_ubuf,
name="lgt_tensor")
param['ret_tensor'] = self.instance.Tensor(self.dtype,
(VECTOR_BYTE_SIZE,),
scope=tik.scope_ubuf,
name="ret_tensor")
new_h, new_w = self.get_new_h_w(img_info, param)
tmp_scalar = self.instance.Scalar(self.dtype)
# x vmuls param --> netw / new_w
tmp_scalar.set_as(img_info[1])
# ub_d netw
self.instance.vec_dup(self.mask, param['ub_d'], tmp_scalar,
REPEAT_ONE, STRIDE_EIGHT)
# ub_e new_w
self.instance.vec_dup(self.mask, param['ub_e'], new_w,
REPEAT_ONE, STRIDE_EIGHT)
# netw / new_w
self.newton_div(param['ub_d'], param['ub_d'], param['ub_e'],
REPEAT_ONE)
x_vmuls_val.set_as(param['ub_d'][0])
# x vadds param --> ((new_w - netw)/2.0/netw) * (netw / new_w)
# --> ((-1)*(netw / new_w) + 1)* 0.5
self.instance.vec_muls(self.mask, param['ub_d'], param['ub_d'],
NEG_ONE, REPEAT_ONE,
STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_adds(self.mask, param['ub_d'], param['ub_d'],
VALUE_ONE, REPEAT_ONE,
STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_muls(self.mask, param['ub_d'], param['ub_d'],
VALUE_HALF, REPEAT_ONE,
STRIDE_EIGHT, STRIDE_EIGHT)
x_vadds_val.set_as(param['ub_d'][0])
# y vmuls param --> neth / new_h
tmp_scalar.set_as(img_info[0])
# ub_d neth
self.instance.vec_dup(self.mask, param['ub_d'], tmp_scalar,
REPEAT_ONE, STRIDE_EIGHT)
# ub_e new_h
self.instance.vec_dup(self.mask, param['ub_e'], new_h,
REPEAT_ONE, STRIDE_EIGHT)
# neth / new_h
self.newton_div(param['ub_d'], param['ub_d'], param['ub_e'],
REPEAT_ONE)
y_vmuls_val.set_as(param['ub_d'][0])
# y vadds param --> ((-1)*(neth / new_h) + 1)* 0.5
self.instance.vec_muls(self.mask, param['ub_d'], param['ub_d'],
NEG_ONE, REPEAT_ONE,
STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_adds(self.mask, param['ub_d'], param['ub_d'],
VALUE_ONE, REPEAT_ONE,
STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_muls(self.mask, param['ub_d'], param['ub_d'],
VALUE_HALF, REPEAT_ONE,
STRIDE_EIGHT, STRIDE_EIGHT)
y_vadds_val.set_as(param['ub_d'][0])
return x_vmuls_val, x_vadds_val, y_vmuls_val, y_vadds_val
def get_new_h_w(self, img_info, param):
"""
compute boxes's height and width
Parameters
----------
img_info: a tensor,store image's width and height
param: a dict,the keys as follow:
ub_d: a middle tensor,used to compute boxes's height and width
ub_e:a middle tensor,used to compute boxes's height and width
ub_f:a middle tensor,used to compute boxes's height and width
ub_g:a middle tensor,used to compute boxes's height and width
ret_tensor:a middle tensor,used to compute boxes's
height and width
lgt_tensor:a middle tensor,used to compute boxes's
height and width
Returns
-------
new_h: a scalar,store new_h
new_w: a scalar,store new_w
"""
new_h = self.instance.Scalar(self.dtype)
new_w = self.instance.Scalar(self.dtype)
if self.resize_origin_img_to_net:
new_h.set_as(img_info[0])
new_w.set_as(img_info[1])
return new_h, new_w
tmp_scalar = self.instance.Scalar(self.dtype)
# if netw/w < neth/h
# vdup neth/h
tmp_scalar.set_as(img_info[0])
self.instance.vec_dup(self.mask, param['ub_d'], tmp_scalar, REPEAT_ONE,
STRIDE_EIGHT)
tmp_scalar.set_as(img_info[2])
self.instance.vec_dup(self.mask, param['ub_g'], tmp_scalar, REPEAT_ONE,
STRIDE_EIGHT)
self.newton_div(param['ub_d'], param['ub_d'], param['ub_g'], REPEAT_ONE)
# vdup netw/w
tmp_scalar.set_as(img_info[1])
self.instance.vec_dup(self.mask, param['ub_e'], tmp_scalar, REPEAT_ONE,
STRIDE_EIGHT)
tmp_scalar.set_as(img_info[3])
self.instance.vec_dup(self.mask, param['ub_g'], tmp_scalar, REPEAT_ONE,
STRIDE_EIGHT)
self.newton_div(param['ub_e'], param['ub_e'], param['ub_g'], REPEAT_ONE)
sel = self.instance.Tensor("uint16", (8, ), name="sel",
scope=tik.scope_ubuf)
self.instance.vec_dup(8, sel, 0, 1, 8)
self.instance.vec_cmpv_lt(sel, param['ub_e'], param['ub_d'], 1, 8, 8)
# get new w
tmp_scalar.set_as(img_info[1])
param['lgt_tensor'][0].set_as(tmp_scalar)
tmp_scalar.set_as(img_info[3])
self.instance.vec_muls(self.mask, param['ub_d'], param['ub_d'],
tmp_scalar, REPEAT_ONE, STRIDE_EIGHT,
STRIDE_EIGHT)
self.instance.vec_sel(self.mask, VALUE_ZERO, param['ret_tensor'], sel,
param['lgt_tensor'], param['ub_d'], STRIDE_ONE)
new_w.set_as(param['ret_tensor'][0])
# get new h
tmp_scalar.set_as(img_info[2])
self.instance.vec_muls(self.mask, param['ub_e'], param['ub_e'],
tmp_scalar, REPEAT_ONE, STRIDE_EIGHT,
STRIDE_EIGHT)
tmp_scalar.set_as(img_info[0])
param['lgt_tensor'][0].set_as(tmp_scalar)
self.instance.vec_sel(self.mask, VALUE_ZERO, param['ret_tensor'], sel,
param['ub_e'], param['lgt_tensor'], STRIDE_ONE)
new_h.set_as(param['ret_tensor'][0])
return new_h, new_w
def convert_biases_data(self, param):
"""
compute biases data
Parameters
----------
param: a dict,the keys as follow:
box_id: batch
biases: A list
Returns
-------
ub_bias: a tensor,store bias
"""
ub_bias = self.instance.Tensor(self.dtype, (VECTOR_BYTE_SIZE,),
scope=tik.scope_ubuf,
name="ub_bias")
t_scalar = self.instance.Scalar(self.dtype)
biases = self.input_dict['biases'][param['box_id'] - 1]
# set bias to ub
for i in range(0, self.boxes):
t_scalar.set_as(biases[2 * i])
ub_bias[2 * i].set_as(t_scalar)
t_scalar.set_as(biases[2 * i + 1])
ub_bias[2 * i + 1].set_as(t_scalar)
return ub_bias
def get_tiling_param(self, height, weight):
"""
compute tilling param
Parameters
----------
height: box's height
weight: box's width
Returns
-------
mov_len: the number of elements of each loop
mov_loop: loop times
last_len: the number of elements of last loop
"""
max_size = self.one_max_size
mov_len = max_size // self.dsize
mov_loop = (height * weight) // (max_size // self.dsize)
mov_tail = height * weight - mov_len * mov_loop
mov_loop = mov_loop if mov_tail == 0 else mov_loop + 1
last_len = mov_len if mov_tail == 0 else mov_tail
return mov_len, mov_loop, last_len
def correct_box(self, batch, img_ub):
"""
compute correct_box
Parameters
----------
batch: the number of picture
img_ub: a tensor,store image info
Returns
-------
None
"""
out_offset = 0
for i in range(self.yolo_num):
with self.instance.new_stmt_scope():
self.handle_coords(i, batch, img_ub, out_offset)
out_offset += self.height[i] * self.width[i] * self.boxes
def handle_coords(self, idx, batch, img_ub, out_offset):
"""
compute box coords
Parameters
----------
batch: the number of picture
img_ub: a tensor,store image info
Returns
-------
None
"""
width = self.width[idx]
height = self.height[idx]
param = {}
param['out_offset'] = out_offset
param['w'] = self.width[idx]
param['h'] = self.height[idx]
param['in_data'] = self.coord_data[idx]
param['windex'] = self.windex[idx]
param['hindex'] = self.hindex[idx]
param['box_id'] = idx + 1
param['img_ub'] = img_ub
if width * height * self.dsize < self.one_max_size // 2:
self.small_surface_template(batch, param)
else:
self.big_surface_template(batch, param)
def big_surface_template(self, batch, param):
"""
compute big shape
Parameters
----------
batch: the number of picture
param: a dict,the keys as fllow:
mov_len: the number of elements of each data move
mov_loop: data move loop times
last_len: the number of elements of last_len data move
ub_bias: a tensor,store bias
x_vmuls_val: a scalar
x_vadds_val: a scalar
y_vmuls_val: a scalar
ub_a: a tensor,store middle compute data
ub_b: a tensor,store middle compute data
ub_c: a tensor,store middle compute data
last_32b: a tensor,store last_32b data
co_id: a scalar,store co_id
box_id: a scalar,store box_id
in_data: a tensor
Returns
-------
None
"""
param['mov_len'], param['mov_loop'], param[
'last_len'] = self.get_tiling_param(param['h'], param['w'])
param['ub_bias'] = self.convert_biases_data(param)
param['x_vmuls_val'], param['x_vadds_val'], param['y_vmuls_val'], param[
'y_vadds_val'] = self.get_x_y_params(param['img_ub'])
shape = self.one_max_size // self.dsize
with self.instance.for_range(0, self.boxes * self.coords) as cycle:
param['ub_a'] = self.instance.Tensor(self.dtype, (shape,),
scope=tik.scope_ubuf,
name="ub_a")
param['ub_b'] = self.instance.Tensor(self.dtype, (shape,),
scope=tik.scope_ubuf,
name="ub_b")
param['ub_c'] = self.instance.Tensor(self.dtype, (shape,),
scope=tik.scope_ubuf,
name="ub_c")
param['last_32b'] = self.instance.Tensor(self.dtype, (BLOCK_SIZE,),
scope=tik.scope_ubuf,
name="last_32b")
param['co_id'] = self.instance.Scalar()
param['box_id'] = self.instance.Scalar()
param['co_id'].set_as(cycle // self.boxes)
param['box_id'].set_as(cycle % self.boxes)
with self.instance.for_range(0, param['mov_loop']) as loop:
param['burlen'] = self.instance.Scalar(name="burlen")
repeat = self.instance.Scalar(name="repeat")
with self.instance.if_scope(loop == param['mov_loop'] - 1):
param['burlen'].set_as(self.get_burlen(param['last_len']))
repeat.set_as(self.get_repeat(param['last_len']))
with self.instance.else_scope():
param['burlen'].set_as(self.get_burlen(param['mov_len']))
repeat.set_as(self.get_repeat(param['mov_len']))
# move coords data to ub a
self.instance.data_move(param['ub_a'],
param['in_data'][
batch, cycle, param[
'mov_len'] * loop],
SID, NBURST_ONE,
param['burlen'], GAP_ZERO, GAP_ZERO)
self.compute_big_xy(batch, loop, param, repeat)
self.compute_big_hw(batch, loop, param, repeat)
def compute_big_hw(self, batch, loop, param, repeat):
"""
compute big shape height and weight
Parameters
----------
batch: the number of picture
loop: loop times
param: a dict,the keys as fllow:
mov_len: the number of elements of each data move
mov_loop: data move loop times
last_len: the number of elements of last_len data move
ub_bias: a tensor,store bias
x_vmuls_val: a scalar
x_vadds_val: a scalar
y_vmuls_val: a scalar
ub_a: a tensor,store middle compute data
ub_b: a tensor,store middle compute data
ub_c: a tensor,store middle compute data
last_32b: a tensor,store last_32b data
co_id: a scalar,store co_id
box_id: a scalar,store box_id
in_data: a tensor
repeat: vector repeat times
Returns
-------
None
"""
tmp_scalar = self.instance.Scalar(self.dtype)
bias_value = self.instance.Scalar(self.dtype)
# h
with self.instance.if_scope(param['co_id'] == VALUE_TWO):
bias_value.set_as(
param['ub_bias'][VALUE_TWO * param['box_id'] + VALUE_ONE])
tmp_scalar.set_as(param['img_ub'][0])
self.instance.vec_dup(self.mask, param['ub_b'], tmp_scalar, repeat,
STRIDE_EIGHT)
self.instance.vec_exp(self.mask, param['ub_c'], param['ub_a'],
repeat, STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_muls(self.mask, param['ub_c'], param['ub_c'],
bias_value, repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.newton_div(param['ub_b'], param['ub_c'], param['ub_b'], repeat)
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
param['y_vmuls_val'], repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
if not self.relative:
tmp_scalar.set_as(param['img_ub'][2])
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
tmp_scalar, repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.data_mov_out(batch, loop, param)
# w
with self.instance.if_scope(param['co_id'] == VALUE_THREE):
bias_value.set_as(param['ub_bias'][VALUE_TWO * param['box_id']])
# img ub: neth,netw,scaleh,scalew
tmp_scalar.set_as(param['img_ub'][1])
self.instance.vec_dup(self.mask, param['ub_b'], tmp_scalar, repeat,
STRIDE_EIGHT)
self.instance.vec_exp(self.mask, param['ub_c'], param['ub_a'], repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_muls(self.mask, param['ub_c'], param['ub_c'],
bias_value, repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.newton_div(param['ub_b'], param['ub_c'], param['ub_b'], repeat)
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
param['x_vmuls_val'], repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
if not self.relative:
tmp_scalar.set_as(param['img_ub'][3])
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
tmp_scalar, repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.data_mov_out(batch, loop, param)
def compute_big_xy(self, batch, cycle, param, repeat):
"""
compute big shape of x,y
Parameters
----------
batch: the number of picture
loop: loop times
param: a dict,the keys as fllow:
mov_len: the number of elements of each data move
mov_loop: data move loop times
last_len: the number of elements of last_len data move
ub_bias: a tensor,store bias
x_vmuls_val: a scalar
x_vadds_val: a scalar
y_vmuls_val: a scalar
ub_a: a tensor,store middle compute data
ub_b: a tensor,store middle compute data
ub_c: a tensor,store middle compute data
last_32b: a tensor,store last_32b data
co_id: a scalar,store co_id
box_id: a scalar,store box_id
in_data: a tensor
repeat: vector repeat times
Returns
-------
None
"""
tmp_scalar = self.instance.Scalar(self.dtype)
# x
with self.instance.if_scope(param['co_id'] == VALUE_ZERO):
# move windex to ub b
self.instance.data_move(param['ub_b'],
param['windex'][cycle * param['mov_len']],
SID, NBURST_ONE, param['burlen'],
GAP_ZERO, GAP_ZERO)
# a = x + windex
self.instance.vec_add(self.mask, param['ub_a'], param['ub_a'],
param['ub_b'], repeat,
STRIDE_EIGHT, STRIDE_EIGHT, STRIDE_EIGHT)
# a = (x + windex)*(1/lw)
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_a'],
(1.0 / param['w']), repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
param['x_vmuls_val'], repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_adds(self.mask, param['ub_b'], param['ub_b'],
param['x_vadds_val'], repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
if not self.relative:
tmp_scalar.set_as(param['img_ub'][3])
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
tmp_scalar, repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.data_mov_out(batch, cycle, param)
# y
with self.instance.if_scope(param['co_id'] == 1):
# move hindex to ub
self.instance.data_move(param['ub_b'],
param['hindex'][cycle * param['mov_len']],
SID,
NBURST_ONE,
param['burlen'],
GAP_ZERO, GAP_ZERO)
# a = y + hindex
self.instance.vec_add(self.mask, param['ub_b'], param['ub_a'],
param['ub_b'], repeat,
STRIDE_EIGHT, STRIDE_EIGHT, STRIDE_EIGHT)
# a = (y + hindex)*(1/lh)
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
(1.0 / param['h']), repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
param['y_vmuls_val'], repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_adds(self.mask, param['ub_b'], param['ub_b'],
param['y_vadds_val'], repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
if not self.relative:
tmp_scalar.set_as(param['img_ub'][2])
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
tmp_scalar, repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.data_mov_out(batch, cycle, param)
def data_mov_out(self, batch, loop, param):
"""
move result to gm
Parameters
----------
batch: the number of picture
loop: loop times
param: a dict,the keys as fllow:
mov_loop: data move loop times
burlen: data move nburst
h: height
w: weight
out_offset: a scalar,store out_offset
ub_a: a tensor,store middle compute data
ub_b: a tensor,store middle compute data
ub_c: a tensor,store middle compute data
last_32b: a tensor,store last_32b data
co_id: a scalar,store co_id
box_id: a scalar,store box_id
mov_len: the number of elements of each data move
Returns
-------
None
"""
if self.hwtail_len != VALUE_ZERO and param['h'] == self.height[-1] and \
param['w'] == self.width[-1]:
with self.instance.if_scope(loop == param['mov_loop'] - VALUE_ONE):
param['burlen'].set_as(param['burlen'] - VALUE_ONE)
with self.instance.if_scope(param['burlen'] > VALUE_ZERO):
self.instance.data_move(
self.inter_coords[batch, param['co_id'],
param['out_offset'] + param['w'] *
param['h'] * param['box_id'] +
param['mov_len'] * loop],
param['ub_b'], SID, NBURST_ONE,
param['burlen'], GAP_ZERO, GAP_ZERO)
param['burlen'].set_as(param['burlen'] + VALUE_ONE)
tail_idx = self.instance.Scalar(name="tail_idx")
tail_idx.set_as(param['last_len'] - self.len_32b)
self.instance.data_move(param['last_32b'], self.inter_coords[
batch, param['co_id'], param['out_offset'] + param['w'] * \
param['h'] * param['box_id'] + param['mov_len'] * loop + \
tail_idx],
SID, NBURST_ONE, VALUE_ONE, GAP_ZERO,
GAP_ZERO)
print("self.hwtail_len ", self.hwtail_len)
with self.instance.for_range(VALUE_ZERO,
self.hwtail_len) as cycle:
tmp_scalar = self.instance.Scalar(self.dtype)
tmp_scalar.set_as(param['ub_b'][param['last_len'] - \
self.hwtail_len + cycle])
param['last_32b'][self.len_32b - \
self.hwtail_len + \
cycle].set_as(tmp_scalar)
self.instance.data_move(self.inter_coords[batch, \
param['co_id'], param['out_offset'] + param['w'] * \
param['h'] * param['box_id'] + param['mov_len'] * \
loop + tail_idx], param['last_32b'], SID, NBURST_ONE,
VALUE_ONE, GAP_ZERO, GAP_ZERO)
with self.instance.else_scope():
dest = self.inter_coords[batch, param['co_id'], \
param['out_offset'] + \
param['w'] * param['h'] * \
param['box_id'] + \
param['mov_len'] * loop]
self.instance.data_move(dest, param['ub_b'], SID, NBURST_ONE,
param['burlen'], GAP_ZERO, GAP_ZERO)
else:
dest = self.inter_coords[batch, param['co_id'], param['out_offset'] \
+ param['w'] * param['h'] * param['box_id'] \
+ param['mov_len'] * loop]
self.instance.data_move(dest, param['ub_b'], SID, NBURST_ONE,
param['burlen'],
GAP_ZERO, GAP_ZERO)
def small_surface_template(self, batch, param):
"""
compute small shape
Parameters
----------
batch: the number of picture
param: a dict,the keys as fllow:
mov_loop: data move loop times
burlen: data move nburst
out_offset: a scalar,store out_offset
ub_a: a tensor,store middle compute data
ub_b: a tensor,store middle compute data
ub_c: a tensor,store middle compute data
last_32b: a tensor,store last_32b data
co_id: a scalar,store co_id
box_id: a scalar,store box_id
mov_len: the number of elements of each data move
x_vmuls_val: a scalar,store x_vmuls_val
y_vadds_val: a scalar,store y_vadds_val
y_vmuls_val: a scalar,store y_vmuls_val
adj_hw: a scalar,store adj_hw
Returns
-------
None
"""
param['ub_bias'] = self.convert_biases_data(param)
param['x_vmuls_val'], param['x_vadds_val'], param['y_vmuls_val'], param[
'y_vadds_val'] = self.get_x_y_params(param['img_ub'])
param['adj_hw'] = self.get_adj_hw(param['h'], param['w'])
param['faces_one_loop'], param['last_loop'], param[
'loop'] = self.get_faces_params(param['adj_hw'], 4 * self.boxes)
with self.instance.for_range(0, param['loop']) as loop_idx:
param['ub_a'] = self.instance.Tensor(self.dtype,
(
self.one_max_size // self.dsize,),
scope=tik.scope_ubuf,
name="ub_a")
param['ub_b'] = self.instance.Tensor(self.dtype,
(
self.one_max_size // self.dsize,),
scope=tik.scope_ubuf,
name="ub_b")
param['ub_c'] = self.instance.Tensor(self.dtype,
(
self.one_max_size // self.dsize,),
scope=tik.scope_ubuf,
name="ub_c")
param['last_32b'] = self.instance.Tensor(self.dtype,
(BLOCK_SIZE,),
scope=tik.scope_ubuf,
name="last_32b")
param['faces'] = self.instance.Scalar("int32")
with self.instance.if_scope(loop_idx != param['loop'] - 1):
param['faces'].set_as(param['faces_one_loop'])
with self.instance.else_scope():
param['faces'].set_as(param['last_loop'])
param['burlen'] = self.instance.Scalar()
param['burlen'].set_as(
(param['faces'] * param['adj_hw'] * self.dsize) // BLOCK_SIZE)
# move coords gm to ub_a
self.instance.data_move(param['ub_a'],
param['in_data'][
batch, param[
'faces_one_loop'] * loop_idx, 0],
SID,
NBURST_ONE, param['burlen'], GAP_ZERO,
GAP_ZERO)
with self.instance.for_range(0, param['faces'], thread_num=2) as cycle:
# Calculate the cindex.
start_idx = self.instance.Scalar()
start_idx.set_as(cycle * param['adj_hw'])
# Indicates the number of the box.
param['box_id'] = self.instance.Scalar()
param['box_id'].set_as(
(param['faces_one_loop'] * loop_idx + cycle) % self.boxes)
param['co_id'] = self.instance.Scalar()
param['co_id'].set_as(
(param['faces_one_loop'] * loop_idx + cycle) // self.boxes)
# burlen and repeat for move out
param['burlen'].set_as(self.get_burlen(param["h"]*param["w"]))
repeat = self.get_repeat(param["h"]*param["w"])
self.compute_small_xy(batch, param, repeat, start_idx)
self.compute_small_hw(batch, param, repeat, start_idx)
def compute_small_hw(self, batch, param, repeat, start_idx):
"""
compute small shape of height and weight
Parameters
----------
batch: the number of picture
param: a dict,the keys as fllow:
ub_a: a tensor,store middle compute data
ub_b: a tensor,store middle compute data
ub_c: a tensor,store middle compute data
last_32b: a tensor,store last_32b data
co_id: a scalar,store co_id
box_id: a scalar,store box_id
img_ub: a tensor,store img data
x_vmuls_val: a scalar,store x_vmuls_val
y_vmuls_val: a scalar,store y_vmuls_val
ub_bias: a tensor,store bias data
repeat: vector repeat times
start_idx: a scalar,store start_idx
Returns
-------
None
"""
tmp_scalar = self.instance.Scalar(self.dtype)
bias_value = self.instance.Scalar(self.dtype)
with self.instance.if_scope(param['co_id'] == VALUE_TWO):
bias_value.set_as(
param['ub_bias'][VALUE_TWO * param['box_id'] + VALUE_ONE])
tmp_scalar.set_as(param['img_ub'][0])
self.instance.vec_dup(self.mask, param['ub_b'], tmp_scalar, repeat,
STRIDE_EIGHT)
self.instance.vec_exp(self.mask, param['ub_c'],
param['ub_a'][start_idx], repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_muls(self.mask, param['ub_c'], param['ub_c'],
bias_value, repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.newton_div(param['ub_b'], param['ub_c'], param['ub_b'],
repeat)
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
param['y_vmuls_val'], repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
if not self.relative:
tmp_scalar.set_as(param['img_ub'][2])
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
tmp_scalar, repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.t_small_mov_to_gm(batch, param)
with self.instance.if_scope(param['co_id'] == VALUE_THREE):
bias_value.set_as(param['ub_bias'][VALUE_TWO * param['box_id']])
tmp_scalar.set_as(param['img_ub'][1])
self.instance.vec_dup(self.mask, param['ub_b'], tmp_scalar, repeat,
STRIDE_EIGHT)
self.instance.vec_exp(self.mask, param['ub_c'],
param['ub_a'][start_idx], repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_muls(self.mask, param['ub_c'], param['ub_c'],
bias_value, repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.newton_div(param['ub_b'], param['ub_c'], param['ub_b'],
repeat)
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
param['x_vmuls_val'], repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
if not self.relative:
tmp_scalar.set_as(param['img_ub'][3])
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
tmp_scalar, repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.t_small_mov_to_gm(batch, param)
def compute_small_xy(self, batch, param, repeat, start_idx):
"""
compute small shape of x,y
Parameters
----------
batch: the number of picture
param: a dict,the keys as fllow:
ub_a: a tensor,store middle compute data
ub_b: a tensor,store middle compute data
ub_c: a tensor,store middle compute data
last_32b: a tensor,store last_32b data
co_id: a scalar,store co_id
box_id: a scalar,store box_id
img_ub: a tensor,store img data
x_vmuls_val: a scalar,store x_vmuls_val
y_vmuls_val: a scalar,store y_vmuls_val
ub_bias: a tensor,store bias data
repeat: vector repeat times
start_idx: a scalar,store start_idx
Returns
-------
None
"""
tmp_scalar = self.instance.Scalar(self.dtype)
with self.instance.if_scope(param['co_id'] == VALUE_ZERO):
self.instance.data_move(param['ub_b'], param['windex'], SID,
NBURST_ONE, param['burlen'],
GAP_ZERO, GAP_ZERO)
self.instance.vec_add(self.mask, param['ub_b'], param['ub_a'][start_idx],
param['ub_b'], repeat,
STRIDE_EIGHT, STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
(1.0 / param['w']), repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
param['x_vmuls_val'], repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_adds(self.mask, param['ub_b'], param['ub_b'],
param['x_vadds_val'], repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
if not self.relative:
tmp_scalar.set_as(param['img_ub'][3])
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
tmp_scalar, repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.t_small_mov_to_gm(batch, param)
with self.instance.if_scope(param['co_id'] == VALUE_ONE):
self.instance.data_move(param['ub_b'], param['hindex'], SID,
NBURST_ONE,
param['burlen'], GAP_ZERO, GAP_ZERO)
# a = y + hindex
self.instance.vec_add(self.mask, param['ub_b'], param['ub_a'][start_idx],
param['ub_b'], repeat,
STRIDE_EIGHT, STRIDE_EIGHT, STRIDE_EIGHT)
# a = (y + hindex)*(1/lh)
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
(1.0 / param['h']), repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
param['y_vmuls_val'], repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.instance.vec_adds(self.mask, param['ub_b'], param['ub_b'],
param['y_vadds_val'], repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
if not self.relative:
tmp_scalar.set_as(param['img_ub'][2])
self.instance.vec_muls(self.mask, param['ub_b'], param['ub_b'],
tmp_scalar, repeat,
STRIDE_EIGHT, STRIDE_EIGHT)
self.t_small_mov_to_gm(batch, param)
|
[
"[email protected]"
] | |
bf096364438dde5831dbbf402a3bebf75076c262
|
a74b980fd95d5d810315f181449fc9d1710e6923
|
/savecode/threeyears/idownclient/scan/shodan/shodanbase.py
|
20cca24c06b1990ea28a1bb22d772c18772ab8d4
|
[
"Apache-2.0"
] |
permissive
|
cbbbbbbbb/sspywork
|
b70f5539203b47b21eec2f0514ddca155affc2b8
|
8f05a6b91fc205960edd57f9076facec04f49a1a
|
refs/heads/master
| 2023-03-22T19:45:13.024076 | 2021-03-08T01:24:21 | 2021-03-08T01:24:21 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 17,946 |
py
|
"""
因为要增加下载某个国家数据的功能
以前这个功能就是随便应付下,但是现在好像是确实需要这个功能
所以重新好好写下
这里把shodan的代码整理下
"""
import re
import threading
import traceback
from abc import abstractmethod
import requests
from commonbaby.httpaccess import HttpAccess
from common.tools import sslparse
from idownclient.clientdatafeedback.scandatafeedback import (
PortInfo,
PortInfo_one,
HttpData,
SiteInfo_One,
Geoinfo,
)
from idownclient.config_spiders import shodanconf
from idownclient.scan.scanplugbase import ScanPlugBase
from .cmsfinger import cmsver
class ShodanBase(ScanPlugBase):
def __init__(self, task):
ScanPlugBase.__init__(self, task)
# 把一些基本不动的参数放在这个构造函数里
self._basic_url = "https://api.shodan.io"
self.apikey = shodanconf.get("apikey", None).strip()
if self.apikey is None:
raise Exception("Shodan api key cant be None")
# cidr=net, app=product, ver=version, title=http.title
self.query = [
"after",
"asn",
"before",
"city",
"country",
"geo",
"has_ipv6",
"has_screenshot",
"hostname",
"isp",
"link",
"cidr",
"org",
"os",
"port",
"postal",
"app",
"state",
"ver",
"bitcoin.ip",
"bitcoin_ip_count",
"bitcoin.port",
"bitcoin.version",
"ip",
"http.component",
"http.component_category",
"http.html",
"http.status",
"title",
"ntp.ip",
"ntp.ip_count",
"ntp.more",
"ntp.port",
"ssl",
"ssl.alpn",
"ssl.chain_count",
"ssl.version",
"ssl.cert.alg",
"ssl.cert.expired",
"ssl.cert.extension",
"ssl.cert.serial",
"ssl.cert.pubkey.bits",
"ssl.cert.pubkey.type",
"ssl.cipher.version",
"ssl.cipher.bits",
"ssl.cipher.name",
"telnet.option",
"telnet.do",
"telnet.dont",
"telnet.will",
"telnet.wont",
]
# 访问一页出错的尝试次数
self.errortimes = 0
# 连续访问出错的页数
self.page_error_times = 0
self.error_limit = shodanconf.get("error_times", 5)
# 获取所有数据的开关,默认为false,但是这次需要全部数据
# 现在这个数据是直接配置的,但是要增加这个功能那么需要带在command里面,目前可以先这样放在这里
# 后期再说
self.__get_all_data_switch = shodanconf.get("get_all_data_switch", False)
# 文件锁,与文件相关的操作增加这个锁
self.shodan_file_locker = threading.RLock()
self._cmsver_lower = {}
for k, v in cmsver.items():
self._cmsver_lower[k.lower()] = v
def _get_cmd_fileter(self, condition: list):
"""
获取cmd中的查询条件
condition就是shodan的限制查询条件
需要在内部做字段转换
:return:
"""
filter_dict = self.task.cmd.stratagyscan.search.filter
# 做一个特殊查询
query = ""
# # 产品的话写最前面
if filter_dict.__contains__("app"):
v = filter_dict.pop("app")
if v is not None and v != "":
query += f'"{v}"+'
for k, v in filter_dict.items():
if v != "" and v is not None and v is not False and k in condition:
# 真不知道为啥明明没有值给你传一个空的字符串过来,毛病
# 为了兼容标准中的搜索字段,在程序内部做字段转换
if k == "cidr":
k = "net"
if k == "ver":
k = "version"
if k == "title":
k = "http.title"
# 需要特殊处理v为false的值
if v is True:
v = "true"
# 需要特殊处理的几个键
if k == "ssl":
query += f"{k}+"
continue
query += f'{k}:"{v}"+'
return query.strip("+")
def _parse_geoinfo(self, asn, ginfodict: dict):
"""
解析geoinfo
:param ginfodict:
:return:
"""
try:
ginfo = Geoinfo(asn)
city = {"names": {"en": ginfodict.get("city"), "zh-CN": None}}
# shodan是英文的,这两个东西好像是没有的
# continent = {}
# province = {}
country = {
"code": ginfodict.get("country_code"),
"names": {"en": ginfodict.get("country_name"), "zh-CN": None},
}
location = {
"lat": ginfodict.get("latitude"),
"lon": ginfodict.get("longitude"),
}
ginfo.city = city
# ginfo.continent = continent
# ginfo.province = province
ginfo.country = country
ginfo.location = location
return ginfo
except:
self._logger.error(f"Get geoinfo error, err:{traceback.format_exc()}")
def _get_cms_ver(self, host: str, path: str, rgx: re.Pattern):
ver: str = None
try:
ha = HttpAccess()
# access home page to get cookie
url = "http://" + host.strip("/")
ha.getstring(
url,
headers="""
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9
Accept-Encoding: gzip, deflate
Accept-Language: en-US,en;q=0.9
Cache-Control: no-cache
Pragma: no-cache
Proxy-Connection: keep-alive
Upgrade-Insecure-Requests: 1
User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36""",
)
# access version page
url = "http://" + host.strip("/") + "/" + path.lstrip("/")
html = ha.getstring(
url,
headers="""
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9
Accept-Encoding: gzip, deflate
Accept-Language: en-US,en;q=0.9
Cache-Control: no-cache
Pragma: no-cache
Proxy-Connection: keep-alive
Upgrade-Insecure-Requests: 1
User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/81.0.4044.138 Safari/537.36""",
)
if html is None or html == "":
return ver
# <version>(.+)</version>
m: re.Match = re.search(rgx, html, re.S)
if m is None:
return ver
ver = m.group(1)
except Exception as e:
self._logger.error("Get joomla version faile: {} {}".format(host, e.args))
return ver
def _recognize_cms_ver(self, host: str, name: str) -> str:
"""recognize cms and version"""
ver: str = None
try:
if not self._cmsver_lower.__contains__(name.lower()):
return ver
path, rgx = self._cmsver_lower[name.lower()]
ver: str = self._get_cms_ver(host, path, rgx)
except Exception:
self._logger.error(
"Recognize cms err: host={} name={} err={}".format(
host, name, traceback.format_exc()
)
)
return ver
def _add_a_sit(self, ip, port):
"""
尝试去获取个site
"""
if int(port) == 443:
urls = [
f"https://{ip}/wordpress/wp-content/plugins/wp-file-manager/lib/php/connector.minimal.php",
f"https://{ip}/wordpress/wp-content/plugins/wp-file-manager/lib/php/connector.minimal.php",
]
else:
urls = [
f"http://{ip}/wordpress/wp-content/plugins/wp-file-manager/lib/php/connector.minimal.php",
f"http://{ip}/wordpress/wp-content/plugins/wp-file-manager/lib/php/connector.minimal.php",
]
try:
for url in urls:
try:
site_one = None
http_data = HttpData()
resp = requests.get(url, verify=False, timeout=5)
if resp.status_code == 200:
respheard = ""
for k, v in resp.headers.items():
respheard += f"{k}:{v}\n"
http_data.respbody = resp.text
http_data.respheader = respheard
site_one = SiteInfo_One(http_data)
site_one.site = url
print(f"Get {url} response success")
yield site_one
except:
print(f"Cannt connect to {url}")
except:
print(f"Cannt connect to url")
def _parse_siteone(self, port, banner, service, http: dict, ip):
"""
解析port_one
:return:
"""
pinf_one = None
try:
# port info
pinf_one = PortInfo_one(port, banner, service)
# site info
# 需要先去拿http_data
if http is not None:
http_data = HttpData()
http_data.respbody = http.get("html")
site_one = SiteInfo_One(http_data)
http_host = http.get("host")
site_one.site = http_host
site_one.location = http.get("location")
# site_one.server.append({'name': http.get('server'), 'version': None})
# site_one.waf.append({'name': http.get('waf'), 'version': None})
server = http.get("server")
if server is not None:
site_one.component.append(
{"name": server, "version": None, "category": "server"}
)
waf = http.get("waf")
if waf is not None:
site_one.component.append(
{"name": waf, "version": None, "category": "waf"}
)
# add redirects
redis = http.get("redirects", [])
for rs in redis:
site_one.redirects.append(rs.get("host") + rs.get("location", ""))
# ---------------------------------------------------------------------
# add favicon
favicon = http.get("favicon", {})
site_one.favicon = favicon
# ---------------------------------------------------------------
# component
cptinfo: dict = http.get("components", {})
for k, v in cptinfo.items():
categories = v.get("categories", [])
if len(categories) == 0 and k.lower() == "joomla":
categories = ["CMS"]
for el in categories:
version = None
if el.lower() == "cms":
ver = self._recognize_cms_ver(http_host, k)
if not ver is None:
version = ver
self._logger.debug(
"Got cms version: {}:{}".format(k, version)
)
site_one.component.extend(
[{"name": k, "version": version, "category": el}]
)
# 增加site_one
pinf_one.append_siteinfo(site_one)
# 获取特定目标的
# for aso in self._add_a_sit(ip, port):
# if aso is not None:
# pinf_one.append_siteinfo(aso)
except:
self._logger.error(f"Get a port info error, err:{traceback.format_exc()}")
return pinf_one
def _parse_port_vuln(self, vulns: dict) -> list:
"""
解析port里面的cve漏洞信息,做了一点单独的处理
返回的是一个list里面带有的cve的信息
:param vulns:
:return:
"""
res = []
for k, v in vulns.items():
v["cve"] = k
res.append(v)
return res
def _get_ssl_info(self, ssldata: str):
"""
这里获取ssl的详细信息,这里会去调命令行的信息,如果
当前程序如果不是运行在docker容器里可能就会无法解析
ssl信息,所以尽量让当前的程序运行在容器里
调用的是openssl去解析的,所以需要做一个限制
:param ssldata:
:return:
"""
res = ""
try:
with self.shodan_file_locker:
res = sslparse.parse_ssl_raw(ssldata)
except Exception:
self._logger.error(f"parser ssl error, err:{traceback.format_exc()}")
return res
def _parse_portinfo(self, dinfo: dict, hostnames, domains, ip, vulns=None):
"""
解析portinfo
:param dinfo:
:param hostnames:
:param domains:
:param vulns:
:return:
"""
# port---------------------------------------------------------------------
pinf = PortInfo()
cpe: list = dinfo.get("cpe")
info = dinfo.get("info")
tags: list = dinfo.get("tags")
# timestamp的时间格式为2019-08-14T23:32:22.144111
timestamp: str = dinfo.get("timestamp", None)
if timestamp is not None:
timestamp = timestamp.replace("T", " ")
app = dinfo.get("product")
banner = dinfo.get("data")
port = dinfo.get("port")
os = dinfo.get("os")
version = dinfo.get("version")
transport = dinfo.get("transport")
link = dinfo.get("link")
uptime = dinfo.get("uptime")
device = dinfo.get("devicetype")
# 端口服务
http: dict = dinfo.get("http", None)
ssl = dinfo.get("ssl", None)
ftp = dinfo.get("ftp", None)
ssh = dinfo.get("ssh", None)
opts = dinfo.get("opts", {})
service = dinfo.get("_shodan", {}).get("module")
# --------------------------------------------------------------这里面尽量拿能拿到的数据
# port_one实例化的时候会实例化port, banner, service,主要是为了解析siteone,并且给一个port_one对象
pinf_one = self._parse_siteone(port, banner, service, http, ip)
if pinf_one is not None:
pinf_one.app = app
pinf_one.hostnames = hostnames
pinf_one.domains = domains
pinf_one.version = version
pinf_one.os = os
pinf_one.timestamp = timestamp
pinf_one.transport = transport
pinf_one.cpe = cpe
pinf_one.tags = tags
pinf_one.link = link
pinf_one.uptime = uptime
pinf_one.extrainfo = info
pinf_one.device = device
# ------------------------------------------------给port_one对象赋值
if ssl is not None:
pinf_one.append_sslinfo(ssl)
# 这里还需要将ssl里面的信息加入到banner,所以需要开个方法
chain = ssl.get("chain", [])
for el in chain:
ssldata = el
sslinfo = self._get_ssl_info(ssldata)
banner += "\n"
banner += sslinfo
pinf_one.banner = banner
if ftp is not None:
pinf_one.append_ftpinfo(ftp)
if ssh is not None:
pinf_one.append_sshinfo(ssh)
if vulns is not None:
# 这里好像是要处理下,shodan特有
v_data = self._parse_port_vuln(vulns)
pinf_one.append_vulns(v_data)
# 新增opts.screenshot
screenshot = opts.get("screenshot", None)
pinf_one.opt_set_screenshot(screenshot)
# 新增telent,by judy 190814
telnet = opts.get("telnet", None)
pinf_one.opt_set_telnet(telnet)
# ----------------------------------------使用port对象,添加一个port_one
pinf.append_one(pinf_one)
return pinf
def _download_data(self):
"""
继承base的下载接口,
但是这里可能需要分两步走了
:return:
"""
if not self.__get_all_data_switch:
# 搜索接口
for data in self._download_search_data():
yield data
else:
# 下载国家数据接口
for data in self._download_all_data():
yield data
@abstractmethod
def _download_search_data(self) -> iter:
"""
下载搜索的数据,
数据量比较小,但是需要去搜索ip然后获取ip的完整数据
:return:
"""
return []
@abstractmethod
def _download_all_data(self) -> iter:
"""
下载一个国家完整的数据
数据量比较大
:return:
"""
return []
|
[
"[email protected]"
] | |
1351f5d54c8d7e52592aec025dbfeeaeec150d5f
|
89dedd7f3c7acc81d12e2bcb2e716f9af9e5fa04
|
/device/bluetooth/bluetooth.gyp
|
b90b4e8221d7a5518abf586078d0082a8db46bcb
|
[
"BSD-3-Clause"
] |
permissive
|
bino7/chromium
|
8d26f84a1b6e38a73d1b97fea6057c634eff68cb
|
4666a6bb6fdcb1114afecf77bdaa239d9787b752
|
refs/heads/master
| 2022-12-22T14:31:53.913081 | 2016-09-06T10:05:11 | 2016-09-06T10:05:11 | 67,410,510 | 1 | 3 |
BSD-3-Clause
| 2022-12-17T03:08:52 | 2016-09-05T10:11:59 | null |
UTF-8
|
Python
| false | false | 22,522 |
gyp
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chromium_code': 1,
},
'targets': [
{
# GN version: //device/bluetooth
'target_name': 'device_bluetooth',
'type': '<(component)',
'dependencies': [
'../../base/base.gyp:base',
'../../crypto/crypto.gyp:crypto',
'../../net/net.gyp:net',
'../../ui/base/ui_base.gyp:ui_base',
'bluetooth_strings.gyp:bluetooth_strings',
'uribeacon',
],
'defines': [
'DEVICE_BLUETOOTH_IMPLEMENTATION',
],
'sources': [
# Note: file list duplicated in GN build.
'android/bluetooth_jni_registrar.cc',
'android/bluetooth_jni_registrar.h',
'android/wrappers.cc',
'android/wrappers.h',
'bluetooth_adapter.cc',
'bluetooth_adapter.h',
'bluetooth_adapter_android.cc',
'bluetooth_adapter_android.h',
'bluetooth_adapter_factory.cc',
'bluetooth_adapter_factory.h',
'bluetooth_adapter_factory_wrapper.cc',
'bluetooth_adapter_factory_wrapper.h',
'bluetooth_adapter_mac.h',
'bluetooth_adapter_mac.mm',
'bluetooth_adapter_win.cc',
'bluetooth_adapter_win.h',
'bluetooth_advertisement.cc',
'bluetooth_advertisement.h',
'bluetooth_audio_sink.cc',
'bluetooth_audio_sink.h',
'bluetooth_channel_mac.mm',
'bluetooth_channel_mac.h',
'bluetooth_classic_device_mac.mm',
'bluetooth_classic_device_mac.h',
'bluetooth_classic_win.cc',
'bluetooth_classic_win.h',
'bluetooth_common.h',
'bluetooth_device.cc',
'bluetooth_device.h',
'bluetooth_device_android.h',
'bluetooth_device_android.cc',
'bluetooth_device_mac.mm',
'bluetooth_device_mac.h',
'bluetooth_device_win.cc',
'bluetooth_device_win.h',
'bluetooth_discovery_filter.cc',
'bluetooth_discovery_filter.h',
'bluetooth_discovery_manager_mac.mm',
'bluetooth_discovery_manager_mac.h',
'bluetooth_discovery_session.cc',
'bluetooth_discovery_session.h',
'bluetooth_discovery_session_outcome.h',
'bluetooth_gatt_characteristic.cc',
'bluetooth_gatt_characteristic.h',
'bluetooth_gatt_connection.cc',
'bluetooth_gatt_connection.h',
'bluetooth_gatt_descriptor.cc',
'bluetooth_gatt_descriptor.h',
'bluetooth_gatt_notify_session.cc',
'bluetooth_gatt_notify_session.h',
'bluetooth_gatt_service.cc',
'bluetooth_gatt_service.h',
'bluetooth_init_win.cc',
'bluetooth_init_win.h',
'bluetooth_l2cap_channel_mac.mm',
'bluetooth_l2cap_channel_mac.h',
'bluetooth_local_gatt_characteristic.cc',
'bluetooth_local_gatt_characteristic.h',
'bluetooth_local_gatt_descriptor.cc',
'bluetooth_local_gatt_descriptor.h',
'bluetooth_local_gatt_service.cc',
'bluetooth_local_gatt_service.h',
'bluetooth_low_energy_central_manager_delegate.mm',
'bluetooth_low_energy_central_manager_delegate.h',
'bluetooth_low_energy_defs_win.cc',
'bluetooth_low_energy_defs_win.h',
'bluetooth_low_energy_device_mac.h',
'bluetooth_low_energy_device_mac.mm',
'bluetooth_low_energy_discovery_manager_mac.h',
'bluetooth_low_energy_discovery_manager_mac.mm',
'bluetooth_low_energy_peripheral_delegate.mm',
'bluetooth_low_energy_peripheral_delegate.h',
'bluetooth_low_energy_win.cc',
'bluetooth_low_energy_win.h',
'bluetooth_remote_gatt_characteristic.cc',
'bluetooth_remote_gatt_characteristic.h',
'bluetooth_remote_gatt_characteristic_android.cc',
'bluetooth_remote_gatt_characteristic_android.h',
'bluetooth_remote_gatt_characteristic_mac.h',
'bluetooth_remote_gatt_characteristic_mac.mm',
'bluetooth_remote_gatt_characteristic_win.cc',
'bluetooth_remote_gatt_characteristic_win.h',
'bluetooth_remote_gatt_descriptor.cc',
'bluetooth_remote_gatt_descriptor.h',
'bluetooth_remote_gatt_descriptor_android.cc',
'bluetooth_remote_gatt_descriptor_android.h',
'bluetooth_remote_gatt_descriptor_win.cc',
'bluetooth_remote_gatt_descriptor_win.h',
'bluetooth_remote_gatt_service.cc',
'bluetooth_remote_gatt_service.h',
'bluetooth_remote_gatt_service_android.cc',
'bluetooth_remote_gatt_service_android.h',
'bluetooth_remote_gatt_service_mac.h',
'bluetooth_remote_gatt_service_mac.mm',
'bluetooth_remote_gatt_service_win.cc',
'bluetooth_remote_gatt_service_win.h',
'bluetooth_rfcomm_channel_mac.mm',
'bluetooth_rfcomm_channel_mac.h',
'bluetooth_service_record_win.cc',
'bluetooth_service_record_win.h',
'bluetooth_socket.cc',
'bluetooth_socket.h',
'bluetooth_socket_mac.h',
'bluetooth_socket_mac.mm',
'bluetooth_socket_net.cc',
'bluetooth_socket_net.h',
'bluetooth_socket_thread.cc',
'bluetooth_socket_thread.h',
'bluetooth_socket_win.cc',
'bluetooth_socket_win.h',
'bluetooth_task_manager_win.cc',
'bluetooth_task_manager_win.h',
'bluetooth_uuid.cc',
'bluetooth_uuid.h',
],
'conditions': [
['chromeos==1 or OS=="linux"', {
'conditions': [
['use_dbus==1', {
'defines': [
'DEVICE_BLUETOOTH_IMPLEMENTATION',
],
'sources': [
'bluez/bluetooth_adapter_bluez.cc',
'bluez/bluetooth_adapter_bluez.h',
'bluez/bluetooth_adapter_profile_bluez.cc',
'bluez/bluetooth_adapter_profile_bluez.h',
'bluez/bluetooth_advertisement_bluez.cc',
'bluez/bluetooth_advertisement_bluez.h',
'bluez/bluetooth_audio_sink_bluez.cc',
'bluez/bluetooth_audio_sink_bluez.h',
'bluez/bluetooth_device_bluez.cc',
'bluez/bluetooth_device_bluez.h',
'bluez/bluetooth_gatt_characteristic_bluez.cc',
'bluez/bluetooth_gatt_characteristic_bluez.h',
'bluez/bluetooth_gatt_connection_bluez.cc',
'bluez/bluetooth_gatt_connection_bluez.h',
'bluez/bluetooth_gatt_descriptor_bluez.cc',
'bluez/bluetooth_gatt_descriptor_bluez.h',
'bluez/bluetooth_gatt_service_bluez.cc',
'bluez/bluetooth_gatt_service_bluez.h',
'bluez/bluetooth_local_gatt_characteristic_bluez.cc',
'bluez/bluetooth_local_gatt_characteristic_bluez.h',
'bluez/bluetooth_local_gatt_descriptor_bluez.cc',
'bluez/bluetooth_local_gatt_descriptor_bluez.h',
'bluez/bluetooth_local_gatt_service_bluez.cc',
'bluez/bluetooth_local_gatt_service_bluez.h',
'bluez/bluetooth_pairing_bluez.cc',
'bluez/bluetooth_pairing_bluez.h',
'bluez/bluetooth_remote_gatt_characteristic_bluez.cc',
'bluez/bluetooth_remote_gatt_characteristic_bluez.h',
'bluez/bluetooth_remote_gatt_descriptor_bluez.cc',
'bluez/bluetooth_remote_gatt_descriptor_bluez.h',
'bluez/bluetooth_remote_gatt_service_bluez.cc',
'bluez/bluetooth_remote_gatt_service_bluez.h',
'bluez/bluetooth_service_attribute_value_bluez.cc',
'bluez/bluetooth_service_attribute_value_bluez.h',
'bluez/bluetooth_service_record_bluez.cc',
'bluez/bluetooth_service_record_bluez.h',
'bluez/bluetooth_socket_bluez.cc',
'bluez/bluetooth_socket_bluez.h',
'dbus/bluetooth_adapter_client.cc',
'dbus/bluetooth_adapter_client.h',
'dbus/bluetooth_le_advertising_manager_client.cc',
'dbus/bluetooth_le_advertising_manager_client.h',
'dbus/bluetooth_le_advertisement_service_provider.cc',
'dbus/bluetooth_le_advertisement_service_provider.h',
'dbus/bluetooth_agent_manager_client.cc',
'dbus/bluetooth_agent_manager_client.h',
'dbus/bluetooth_agent_service_provider.cc',
'dbus/bluetooth_agent_service_provider.h',
'dbus/bluetooth_dbus_client_bundle.cc',
'dbus/bluetooth_dbus_client_bundle.h',
'dbus/bluetooth_device_client.cc',
'dbus/bluetooth_device_client.h',
'dbus/bluetooth_gatt_application_service_provider.cc',
'dbus/bluetooth_gatt_application_service_provider.h',
'dbus/bluetooth_gatt_application_service_provider_impl.cc',
'dbus/bluetooth_gatt_application_service_provider_impl.h',
'dbus/bluetooth_gatt_attribute_helpers.cc',
'dbus/bluetooth_gatt_attribute_helpers.h',
'dbus/bluetooth_gatt_attribute_value_delegate.cc',
'dbus/bluetooth_gatt_attribute_value_delegate.h',
'dbus/bluetooth_gatt_characteristic_client.cc',
'dbus/bluetooth_gatt_characteristic_client.h',
'dbus/bluetooth_gatt_characteristic_delegate_wrapper.cc',
'dbus/bluetooth_gatt_characteristic_delegate_wrapper.h',
'dbus/bluetooth_gatt_characteristic_service_provider_impl.cc',
'dbus/bluetooth_gatt_characteristic_service_provider_impl.h',
'dbus/bluetooth_gatt_characteristic_service_provider.cc',
'dbus/bluetooth_gatt_characteristic_service_provider.h',
'dbus/bluetooth_gatt_descriptor_delegate_wrapper.cc',
'dbus/bluetooth_gatt_descriptor_delegate_wrapper.h',
'dbus/bluetooth_gatt_descriptor_client.cc',
'dbus/bluetooth_gatt_descriptor_client.h',
'dbus/bluetooth_gatt_descriptor_service_provider_impl.cc',
'dbus/bluetooth_gatt_descriptor_service_provider_impl.h',
'dbus/bluetooth_gatt_descriptor_service_provider.cc',
'dbus/bluetooth_gatt_descriptor_service_provider.h',
'dbus/bluetooth_gatt_manager_client.cc',
'dbus/bluetooth_gatt_manager_client.h',
'dbus/bluetooth_gatt_service_client.cc',
'dbus/bluetooth_gatt_service_client.h',
'dbus/bluetooth_gatt_service_service_provider_impl.cc',
'dbus/bluetooth_gatt_service_service_provider_impl.h',
'dbus/bluetooth_gatt_service_service_provider.cc',
'dbus/bluetooth_gatt_service_service_provider.h',
'dbus/bluetooth_input_client.cc',
'dbus/bluetooth_input_client.h',
'dbus/bluetooth_media_client.cc',
'dbus/bluetooth_media_client.h',
'dbus/bluetooth_media_endpoint_service_provider.cc',
'dbus/bluetooth_media_endpoint_service_provider.h',
'dbus/bluetooth_media_transport_client.cc',
'dbus/bluetooth_media_transport_client.h',
'dbus/bluetooth_profile_manager_client.cc',
'dbus/bluetooth_profile_manager_client.h',
'dbus/bluetooth_profile_service_provider.cc',
'dbus/bluetooth_profile_service_provider.h',
'dbus/bluez_dbus_client.h',
'dbus/bluez_dbus_manager.cc',
'dbus/bluez_dbus_manager.h',
'dbus/fake_bluetooth_adapter_client.cc',
'dbus/fake_bluetooth_adapter_client.h',
'dbus/fake_bluetooth_le_advertising_manager_client.cc',
'dbus/fake_bluetooth_le_advertising_manager_client.h',
'dbus/fake_bluetooth_le_advertisement_service_provider.cc',
'dbus/fake_bluetooth_le_advertisement_service_provider.h',
'dbus/fake_bluetooth_agent_manager_client.cc',
'dbus/fake_bluetooth_agent_manager_client.h',
'dbus/fake_bluetooth_agent_service_provider.cc',
'dbus/fake_bluetooth_agent_service_provider.h',
'dbus/fake_bluetooth_device_client.cc',
'dbus/fake_bluetooth_device_client.h',
'dbus/fake_bluetooth_gatt_application_service_provider.cc',
'dbus/fake_bluetooth_gatt_application_service_provider.h',
'dbus/fake_bluetooth_gatt_characteristic_client.cc',
'dbus/fake_bluetooth_gatt_characteristic_client.h',
'dbus/fake_bluetooth_gatt_characteristic_service_provider.cc',
'dbus/fake_bluetooth_gatt_characteristic_service_provider.h',
'dbus/fake_bluetooth_gatt_descriptor_client.cc',
'dbus/fake_bluetooth_gatt_descriptor_client.h',
'dbus/fake_bluetooth_gatt_descriptor_service_provider.cc',
'dbus/fake_bluetooth_gatt_descriptor_service_provider.h',
'dbus/fake_bluetooth_gatt_manager_client.cc',
'dbus/fake_bluetooth_gatt_manager_client.h',
'dbus/fake_bluetooth_gatt_service_client.cc',
'dbus/fake_bluetooth_gatt_service_client.h',
'dbus/fake_bluetooth_gatt_service_service_provider.cc',
'dbus/fake_bluetooth_gatt_service_service_provider.h',
'dbus/fake_bluetooth_input_client.cc',
'dbus/fake_bluetooth_input_client.h',
'dbus/fake_bluetooth_media_client.cc',
'dbus/fake_bluetooth_media_client.h',
'dbus/fake_bluetooth_media_endpoint_service_provider.cc',
'dbus/fake_bluetooth_media_endpoint_service_provider.h',
'dbus/fake_bluetooth_media_transport_client.cc',
'dbus/fake_bluetooth_media_transport_client.h',
'dbus/fake_bluetooth_profile_manager_client.cc',
'dbus/fake_bluetooth_profile_manager_client.h',
'dbus/fake_bluetooth_profile_service_provider.cc',
'dbus/fake_bluetooth_profile_service_provider.h',
],
'conditions': [
['OS=="linux"', {
'sources': [
'dbus/dbus_bluez_manager_wrapper_linux.cc',
'dbus/dbus_bluez_manager_wrapper_linux.h',
'dbus/dbus_thread_manager_linux.cc',
'dbus/dbus_thread_manager_linux.h',
]
}]
],
'dependencies': [
'../../build/linux/system.gyp:dbus',
'../../dbus/dbus.gyp:dbus',
],
'export_dependent_settings': [
'../../build/linux/system.gyp:dbus'
]
}, { # !use_dbus
'sources': [ 'bluetooth_adapter_stub.cc' ],
'conditions': [
['OS=="linux"', {
'sources': [
'dbus/dbus_bluez_manager_wrapper_linux.h',
'dbus/dbus_bluez_manager_wrapper_stub_linux.cc',
]
}],
]
}],
],
}],
['chromeos==1', {
'dependencies': [
'../../chromeos/chromeos.gyp:chromeos',
],
}],
['OS == "android"', {
'dependencies': [
'device_bluetooth_java',
'device_bluetooth_jni_headers',
],
}],
['OS=="win"', {
# The following two blocks are duplicated. They apply to static lib
# and shared lib configurations respectively.
'all_dependent_settings': { # For static lib, apply to dependents.
'msvs_settings': {
'VCLinkerTool': {
'DelayLoadDLLs': [
'BluetoothApis.dll',
# Despite MSDN stating that Bthprops.dll contains the
# symbols declared by bthprops.lib, they actually reside here:
'Bthprops.cpl',
'setupapi.dll',
],
'AdditionalDependencies': [
# Bthprops must be listed before BluetoothApis or else delay
# loading crashes.
'Bthprops.lib',
'BluetoothApis.lib',
],
},
},
},
'msvs_settings': { # For shared lib, apply to self.
'VCLinkerTool': {
'DelayLoadDLLs': [
'BluetoothApis.dll',
# Despite MSDN stating that Bthprops.dll contains the
# symbols declared by bthprops.lib, they actually reside here:
'Bthprops.cpl',
'setupapi.dll',
],
'AdditionalDependencies': [
# Bthprops must be listed before BluetoothApis or else delay
# loading crashes.
'Bthprops.lib',
'BluetoothApis.lib',
],
},
},
}],
['OS=="mac"', {
'link_settings': {
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/IOBluetooth.framework',
],
'conditions': [
['mac_sdk == "10.10"', {
'xcode_settings': {
# In the OSX 10.10 SDK, CoreBluetooth became a top level
# framework. Previously, it was nested in IOBluetooth. In
# order for Chrome to run on OSes older than OSX 10.10, the
# top level CoreBluetooth framework must be weakly linked.
'OTHER_LDFLAGS': [
'-weak_framework CoreBluetooth',
],
},
}],
],
},
}],
],
},
{
# GN version: //device/bluetooth/uribeacon
'target_name': 'uribeacon',
'type': 'static_library',
'dependencies': [
'../../base/base.gyp:base',
],
'sources': [
'uribeacon/uri_encoder.cc',
'uribeacon/uri_encoder.h'
]
},
{
# GN version: //device/bluetooth:mocks
'target_name': 'device_bluetooth_mocks',
'type': 'static_library',
'dependencies': [
'../../testing/gmock.gyp:gmock',
'device_bluetooth',
],
'include_dirs': [
'../../',
],
'sources': [
# Note: file list duplicated in GN build.
'test/mock_bluetooth_adapter.cc',
'test/mock_bluetooth_adapter.h',
'test/mock_bluetooth_advertisement.cc',
'test/mock_bluetooth_advertisement.h',
'test/mock_bluetooth_device.cc',
'test/mock_bluetooth_device.h',
'test/mock_bluetooth_discovery_session.cc',
'test/mock_bluetooth_discovery_session.h',
'test/mock_bluetooth_gatt_characteristic.cc',
'test/mock_bluetooth_gatt_characteristic.h',
'test/mock_bluetooth_gatt_connection.cc',
'test/mock_bluetooth_gatt_connection.h',
'test/mock_bluetooth_gatt_descriptor.cc',
'test/mock_bluetooth_gatt_descriptor.h',
'test/mock_bluetooth_gatt_notify_session.cc',
'test/mock_bluetooth_gatt_notify_session.h',
'test/mock_bluetooth_gatt_service.cc',
'test/mock_bluetooth_gatt_service.h',
'test/mock_bluetooth_socket.cc',
'test/mock_bluetooth_socket.h',
],
},
],
'conditions': [
['OS == "android"', {
'targets': [
{
'target_name': 'device_bluetooth_jni_headers',
'type': 'none',
'sources': [
'android/java/src/org/chromium/device/bluetooth/ChromeBluetoothAdapter.java',
'android/java/src/org/chromium/device/bluetooth/ChromeBluetoothDevice.java',
'android/java/src/org/chromium/device/bluetooth/ChromeBluetoothRemoteGattCharacteristic.java',
'android/java/src/org/chromium/device/bluetooth/ChromeBluetoothRemoteGattDescriptor.java',
'android/java/src/org/chromium/device/bluetooth/ChromeBluetoothRemoteGattService.java',
'android/java/src/org/chromium/device/bluetooth/Wrappers.java',
],
'variables': {
'jni_gen_package': 'device_bluetooth',
},
'includes': [ '../../build/jni_generator.gypi' ],
},
{
'target_name': 'device_bluetooth_java',
'type': 'none',
'dependencies': [
'../../base/base.gyp:base',
],
'variables': {
'java_in_dir': '../../device/bluetooth/android/java',
},
'includes': [ '../../build/java.gypi' ],
},
],
}],
['OS != "ios"', {
'targets': [
{
'target_name': 'bluetooth_interfaces_mojom',
'type': 'none',
'variables': {
'mojom_files': [
'public/interfaces/bluetooth_uuid.mojom',
],
'mojom_typemaps': [
'public/interfaces/bluetooth_uuid.typemap',
],
'use_new_wrapper_types': 'false',
},
'includes': [ '../../mojo/mojom_bindings_generator_explicit.gypi' ],
},
{
'target_name': 'bluetooth_interfaces_blink_mojom',
'type': 'none',
'variables': {
'mojom_files': [
'public/interfaces/bluetooth_uuid.mojom',
],
'for_blink': 'true',
'use_new_wrapper_types': 'false',
},
'includes': [ '../../mojo/mojom_bindings_generator_explicit.gypi' ],
},
{
'target_name': 'bluetooth_mojom',
'type': 'static_library',
'export_dependent_settings': [
'../../mojo/mojo_public.gyp:mojo_cpp_bindings',
],
'dependencies': [
'../../mojo/mojo_public.gyp:mojo_cpp_bindings',
'bluetooth_interfaces_blink_mojom',
'bluetooth_interfaces_mojom',
'device_bluetooth',
],
},
],
}],
],
}
|
[
"[email protected]"
] | |
619e77d5ddd1def133e212f22da11d5ffd178366
|
639b8dee007e50e64f1cbb94c2f06f114c7824c8
|
/config/config.py
|
25ecdf38a473f3adc73cbef56ebd86cde3e49f6a
|
[] |
no_license
|
mskoko/Voice-assistant
|
42f40b457f06a16e26b408e5a2368b0b69360634
|
d46f2cc13115c6e8261d8ce8c0534fb62692621f
|
refs/heads/master
| 2022-12-03T10:17:55.459744 | 2020-08-15T13:17:50 | 2020-08-15T13:17:50 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 7,990 |
py
|
# - *- coding: utf- 8 - *-
import logging
import os
LOG_FILE = "voice_assistant.log"
logging.basicConfig(level=logging.DEBUG, filename="voice_assistant.log",
format='%(asctime)s: %(levelname)s - %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p')
logger = logging
# logging - debug, info, warning, error, critical
PROVIDED_LANGUAGES = {
"english": "en-US",
"serbian": "sr-RS",
"default": "en-US"
}
LANGUAGES_IN_SERBIAN = os.path.abspath("data/languages/langs_in_serbian.json")
LANG_CODES = "data/languages/langs_codes.json"
RECOGNITION_METHODS = {
"bing": "recognize_bing",
"google": "recognize_google",
"google_cloud": "recognize_google_cloud",
"houndify": "recognize_houndify",
"ibm": "recognize_ibm",
"sphinx": "recognize_sphinx",
"wit": "recognize_wit",
"azure": "recognize_azure"
}
# tts audio config
PATH_TO_AUDIO_DIR = r"data/audio/"
DEFAULT_AUDIO_FILE = PATH_TO_AUDIO_DIR + "temporary.mp3"
# semantic processor
ENGLISH_DICTIONARY_PATH = "data/words/words-en.json"
SERBIAN_DICTIONARY_PATH = "data/words/words-sr.json"
# keywords
ENGLISH_KEYWORDS = "data/keywords/keywords-en.json"
SERBIAN_KEYWORDS = "data/keywords/keywords-sr.json"
LANG_KEYWORDS = {
"en": ENGLISH_KEYWORDS,
"sr": SERBIAN_KEYWORDS
}
# messages
CALL_MESSAGE = {"en": "I'm ready for a new command", "sr": "Spreman sam za novu komandu."}
# commands
COMMANDS = "data/commands/commands.json"
# TODO: refactor path variables to be in dictionary form for simpler usage
# action result statuses
SUCCESS = 1
FAIL = 0
FATAL = -1
# exception messages
GENERIC_MESSAGE_EN = "Some internal error occurred. Check the log for more details!"
GENERIC_MESSAGE_SR = "Došlo je do interne greške. Proverite log fajl za više detalja!"
EXCEPTION_MESSAGES = {
"KeyError": {
"en": GENERIC_MESSAGE_EN,
"sr": GENERIC_MESSAGE_SR
},
"TypeError": {
"en": GENERIC_MESSAGE_EN,
"sr": GENERIC_MESSAGE_SR
},
"ValueError": {
"en": GENERIC_MESSAGE_EN,
"sr": GENERIC_MESSAGE_SR
},
"AssertionError": {
"en": GENERIC_MESSAGE_EN,
"sr": GENERIC_MESSAGE_SR
},
"IndexError": {
"en": GENERIC_MESSAGE_EN,
"sr": GENERIC_MESSAGE_SR
},
"speech_recognition.UnknownValueError": {
"en": "Speech cannot be analyzed or recognized!",
"sr": "Vaš govor ne može biti obrađen ili prepoznat!"
},
"speech_recognition.RequestError": {
"en": "Request error problem. Check API limits and connectivity status!",
"sr": "Problemi sa slanjem zahteva. Proverite API limit i status mreže!"
},
# TODO: handle when error occurs in speaking module - how to inform user
"gtts.tts.gTTSError": {
"en": "I have a problem with speaking. Probably you reached out the API limit!",
"sr": "Imam problem sa govorom. Verovatno si probio API limit!"
},
"pyowm.exceptions.api_response_error.NotFoundError": {
"en": "The weather forecast cannot be estimated. I cannot find that location!",
"sr": "Ne mogu da procenim vremensku prognozu. Ne mogu da pronađem tu lokaciju!"
},
"pyowm.exceptions.api_call_error.APICallError": {
"en": "The weather forecast cannot be estimated. I cannot find that location!",
"sr": "Ne mogu da procenim vremensku prognozu. Ne mogu da pronađem tu lokaciju!"
},
"smtplib.SMTPAuthenticationError":
{
"en": "There is some problem with email authentication. Check your email address credentials.",
"sr": "Došlo je do problema sa autentifikacijom email naloga. Proveri kredencijale."
},
"smtplib.SMTPNotSupportedError": {
"en": "There is some problem with email settings configuration.",
"sr": "Postoje određeni problemi sa podešavanjima emaila."
},
"smtplib.SMTPHeloError": {
"en": "There is some problem with email settings configuration.",
"sr": "Postoje određeni problemi sa podešavanjima emaila."
},
"smtplib.SMTPDataError": {
"en": "There is some problem with email settings configuration.",
"sr": "Postoje određeni problemi sa podešavanjima emaila."
},
"smtplib.SMTPConnectError": {
"en": "There is some problem with an email connection.",
"sr": "Postoje određeni problemi sa konekcijom ka emaila serveru."
},
"smtplib.SMTPServerDisconnected": {
"en": "There is some problem with an email connection.",
"sr": "Postoje određeni problemi sa konekcijom ka emaila serveru."
},
"smtplib.SMTPSenderRefused": {
"en": "Sender's email settings are not valid.",
"sr": "Postoje određeni problemi sa podešavanjima emaila pošiljaoca."
},
"smtplib.SMTPRecipientsRefused": {
"en": "Recipient's email settings are not valid. Check the recipient's email address.",
"sr": "Postoje određeni problemi sa podešavanjima emaila primaoca. Proveri da li si uneo validnu adresu."
},
"wikipedia.exceptions.PageError": {
"en": "I cannot find anything on Wikipedia that suits your query. Try another one or try again with more precise"
" speech.",
"sr": "Nisam uspeo da nađem ništa na Vikipediji što odgovara tvom zahtevu. Probaj sa nekim drugim zahtevom ili probaj"
"ponovo, ali probaj da budeš precizniji u govoru."
},
"exceptions.exceptions.GoogleSearchException": {
"en": "Google search cannot find anything that suits your query.",
"sr": "Gugl pretraga nije našla ništa što odgovara tvom upitu."
},
"exceptions.exceptions.VoiceAssistantException": {
"en": "Fatal error. The application could not proceed.",
"sr": "Došlo je do fatalne interne greške. Aplikacija ne može nastaviti sa radom."
},
"exception": {
"en": GENERIC_MESSAGE_EN,
"sr": GENERIC_MESSAGE_SR
}
}
# credentials
OWM_API_KEY = "5ab8013f8b3c54d28b8f8035ffd40f0a"
OMDB_API_KEY = "56674ea0"
PROXY_MAIL = "[email protected]"
MAIL_PASSWORD = "mizcechlykbgsfhx"
# weather params
# NOTE: only some of params are included (most important ones)
WEATHER_PARAMS = {"clouds", "detailed_status", "dewpoint", "heat_index", "humidex", "humidity", "pressure", "rain",
"reference_time", "snow", "status", "sunrise_time", "sunset_time", "temperature",
"visibility_distance", "weather_code", "weather_icon_name", "weather_icon_url", "wind"}
# format <name in json response>: (json_subvalue or alias, child/alias, display name)
# hr stands for croatian _language because OWM API doesn't support serbian, so instead of serbian (sr),
# croatian _language is used (hr)
WEATHER_PARAMETERS = {
"clouds": ("clouds", "alias", {"en": "clouds", "hr": "oblačnost"}),
"pressure": ("press", "child", {"en": "pressure", "hr": "pritisak"}),
"wind": ("speed", "child", {"en": "wind speed", "hr": "brzina vetra"}),
# "snow":("snow", "child", ),
"humidity": ("humidity", "alias", {"en": "humidity", "hr": "vlažnost vazduha"}),
"temperature_min": ("temp_min", "child", {"en": "minimum temperature", "hr": "minimalna dnevna temperatura"}),
"temperature_max": ("temp_max", "child", {"en": "maximum temperature", "hr": "maksimalna dnevna temperatura"}),
"temperature": ("temp", "child", {"en": "temperature", "hr": "prosečna dnevna temperatura"}),
"detailed_status": ("detailed_status", "alias", {"en": "detailed status", "hr": "detaljniji opis"}),
"reference_time": ("reference_time", "alias", {"en": "reference time", "hr": "trenutak merenja"})
# "rain":{}
}
# social networks urls
FACEBOOK_BASE_URL = "https://www.facebook.com/"
TWITTER_BASE_URL = "https://twitter.com/"
INSTAGRAM_BASE_URL = "https://instagram.com/"
LINKEDIN_BASE_URL = "https://www.linkedin.com/in/"
# serial communication params
SERIAL_PORT = "/dev/ttyUSB0"
DEFAULT_BAUD_RATE = 9600
|
[
"[email protected]"
] | |
754374375b34782f7c5e7f47fac2212c270272c6
|
cfdf0fde6786c160e059502d3c4d5f679d63867e
|
/myApp/apiApp/management/commands/fetch.py
|
d10c7e295cf0280c2ed66fa3d529329481a42c20
|
[
"MIT"
] |
permissive
|
mdmarjanhossain0/Django-Demo-for-client
|
30a89989c1e5446175377c174024ddc02cb3c660
|
5f345b2c9ec35f4d7210ce0de5fc7c2f36a33427
|
refs/heads/main
| 2023-07-13T23:38:34.992579 | 2021-08-18T13:54:19 | 2021-08-18T13:54:19 | 397,874,162 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,162 |
py
|
from django.core.management.base import BaseCommand
import django
import os
import requests
from apiApp import models
class Command(BaseCommand):
help = 'Fetch Data From API.'
def handle(self, *args, **kwargs):
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "myApp.settings")
django.setup()
response = requests.get('https://restcountries.eu/rest/v2/all').json()
for data in response:
try:
table = models.ApiApp()
table.name = data.get('name')
table.alpha2code = data.get('alpha2code')
table.alpha3code = data.get('alpha3code')
table.capital = data.get('capital')
table.population = data.get('population')
table.timezones = data.get('timezones')
table.languages = data.get('languages')
table.borders = data.get('borders')
table.save()
print("Please Wait...")
except Exception as ex:
print(ex)
# print("Could Not Save: ", data.get('name'))
self.stdout.write("Action Completed!")
|
[
"[email protected]"
] | |
7d7df800f2f943ea6eb981ce5a811699b8b95cb8
|
21e1d00c48c1732cc44af077572299831b93ffc2
|
/PROBLEM_SOLVING/CODE_FORCE/P_200B.py
|
6fab219e0a60ddaf6b771195f4745a2e57bdf4c1
|
[] |
no_license
|
GolamRabbani20/PYTHON-A2Z
|
7be72041407e4417359b3a610ced0919f3939993
|
7c89223f253aa559fa15caacb89c68e0b78ff915
|
refs/heads/master
| 2023-05-09T00:43:03.012963 | 2021-05-26T07:56:56 | 2021-05-26T07:56:56 | 317,953,879 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 74 |
py
|
n=int(input())
f=input().split()
s=0
for i in f:
s=s+int(i)
print(s/n)
|
[
"[email protected]"
] | |
8cb7bea7bd989fe62805917eb6e5763389439f49
|
a4ea28288898ff957d69f4fff2ea353aac09ec48
|
/commander/thirdparty/covertutils/handlers/responseonly.py
|
486eba43da1dfcdee6de07212316f213e5670f51
|
[
"Apache-2.0"
] |
permissive
|
how2how/ToyHome
|
d520b453b17a2bcd08e204cf209333cb02d2a3cf
|
4457b1d28e21ed6fd4ab980a0f7fed345c570ae3
|
refs/heads/master
| 2021-06-19T16:51:44.116241 | 2019-09-18T16:43:26 | 2019-09-18T16:43:26 | 148,563,602 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,364 |
py
|
from abc import ABCMeta, abstractmethod
from covertutils.handlers import BaseHandler
from covertutils.helpers import defaultArgMerging
class ResponseOnlyHandler( BaseHandler ) :
"""
This handler doesn't send messages with the `sendAdHoc` method. It implements a method `queueSend` to queue messages, and send them only if it is queried with a `request_data` message.
Can be nicely paired with :class:`covertutils.handlers.InterrogatingHandler` for a Client-Server approach.
"""
__metaclass__ = ABCMeta
Defaults = {'request_data' : 'X'}
def __init__( self, recv, send, orchestrator, **kw ) :
"""
:param str request_data: The data that, when received as message, a stored chunk will be sent.
"""
super(ResponseOnlyHandler, self).__init__( recv, send, orchestrator, **kw )
arguments = defaultArgMerging( self.Defaults, kw )
self.request_data = arguments['request_data']
self.preferred_send = self.queueSend
def onMessage( self, stream, message ) :
beacon = (message == self.request_data) # got a beacon message?
# if beacon :
# print "List of messages '%s' " % self.to_send_list
# if not self.readifyQueue() : return False
self.readifyQueue()
# print "Raw packets pending: %s" % len(self.to_send_raw)
if self.to_send_raw :
to_send = self.to_send_raw.pop(0)
self.send_function( to_send )
return True
return False
|
[
"[email protected]"
] | |
4482823c9b7d35b19f0b0ea846cfe8c197e4ca64
|
1c751c001357d23fe10e7a42490e3b76434dfa18
|
/tools/py/zzcat.py
|
ba0a398a4f941704bfc9fae8f1c75e1a41730fa8
|
[] |
no_license
|
pie-crust/etl
|
995925199a71b299544bfac1ed8f504f16fbadc2
|
14b19b542eaa69b8679ce7df4d9a5d2720b3c5c7
|
refs/heads/master
| 2022-12-12T18:40:31.866907 | 2019-10-14T15:46:16 | 2019-10-14T15:46:16 | 215,082,544 | 0 | 0 | null | 2022-12-08T05:22:54 | 2019-10-14T15:43:04 |
Python
|
UTF-8
|
Python
| false | false | 1,180 |
py
|
#!~/python27/bin/python
#s3://home-pmt-accounting-dev/racct/DY_Position_SD/file_0_100.2019-06-17.13_39_12.IQ.csv.gz test.csv.gz
from boto.s3.connection import S3Connection
import gzip
import csv
import io
class ReadOnce(object):
def __init__(self, k):
self.key = k
self.has_read_once = False
def read(self, size=0):
if self.has_read_once:
return b''
data = self.key.read(size)
if not data:
self.has_read_once = True
return data
class ReadFromS3:
def __init__(self, options):
conn = S3Connection()
self.bucket = conn.get_bucket(options['s3']['bucket'], validate=False)
def stream_file(self, file):
key = self.bucket.get_key(file)
gz_file = gzip.GzipFile(fileobj=key, mode='r')
#print gz_file.read(100)
reader = csv.DictReader(io.TextIOWrapper(
gz_file, newline="", encoding="utf-8-sig"), delimiter='\t')
#for line in reader:
# print(line)
def main(options):
tsr = ReadFromS3(options)
tsr.stream_file('racct/DY_Position_SD/file_0_100.2019-06-17.13_39_12.IQ.csv.gz test.csv.gz')
if __name__ == "__main__":
options = {
's3':{
'user': 's_dev_racct',
#'key': ,
'bucket':'home-pmt-accounting-dev',
}
}
main(options)
|
[
"[email protected]"
] | |
8ea1f65d0ffcdf98d13b393d63c9cd93dfc1f5dd
|
ffb05b145989e01da075e2a607fb291955251f46
|
/pypers/meta/fill.py
|
ec92c0ba2e423d9f2c8c9731e9f9214ccb389d96
|
[] |
no_license
|
micheles/papers
|
a5e7f2fa0cf305cd3f8face7c7ecc0db70ce7cc7
|
be9070f8b7e8192b84a102444b1238266bdc55a0
|
refs/heads/master
| 2023-06-07T16:46:46.306040 | 2018-07-14T04:17:51 | 2018-07-14T04:17:51 | 32,264,461 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 100 |
py
|
import textwrap
text=file('meta.txt').read()
print textwrap.fill(text,70,replace_whitespace=False)
|
[
"[email protected]"
] | |
1259e96f551d528f6556f7d5d26d78d353838ab1
|
6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4
|
/YqeS2Ta52jyjS7cD7_4.py
|
b358e4ae709bb1d13c75abcf0c661fd77ecf4fc0
|
[] |
no_license
|
daniel-reich/ubiquitous-fiesta
|
26e80f0082f8589e51d359ce7953117a3da7d38c
|
9af2700dbe59284f5697e612491499841a6c126f
|
refs/heads/master
| 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 134 |
py
|
def is_prime(n):
if n == 0 or n == 1:
return False
for i in range(2,n):
if n % i == 0:
return False
return True
|
[
"[email protected]"
] | |
3e3fdbd3a6d4194fc93724c838eeb04603f31191
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/17/usersdata/71/6755/submittedfiles/lecker.py
|
f80187761dd3724711eed58af6e9bd198d3cdcc8
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 399 |
py
|
# -*- coding: utf-8 -*-
from __future__ import division
import math
a = input("Insira o valor de a: ")
b = input("Insira o valor de b: ")
c = input("Insira o valor de c: ")
d = input("Insira o valor de d: ")
if a>b and b>=c and c>=d:
print("S")
elif a=<b and b>c and c>=d:
print("S")
elif a=<b and b<c and c>d:
print("S")
elif a=<b and b=<c and c<d:
print("S")
else:
print("N")
|
[
"[email protected]"
] | |
b94bcefd6e49ffebaa03bbf8ba22b389bd93f07b
|
e511cdd3114e0d3611422e5f82eef7fc0f839402
|
/413.total_energy.py
|
afb959cb79900dd3b66005a4a6c18d96c9ef3b23
|
[] |
no_license
|
GiantMolecularCloud/NGC253-outflow-at-2pc
|
46898bb4d93ed1f30f5d24e6ee7d7efbe1d79a20
|
b7ee55cc2f4bebf5af8912a662765d5ceed16977
|
refs/heads/main
| 2023-02-26T23:32:05.408835 | 2021-02-05T16:39:22 | 2021-02-05T16:39:22 | 336,310,328 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 10,743 |
py
|
from __future__ import print_function
########################
# CASA ANALYSIS SCRIPT #
########################
# Get estimates for various quantities for disk (inside disk mask) and the rest (mainly outflows).
###################################################################################################
# import required modules
execfile('scripts/casa_imports.py')
execfile('NGC253/project_info.py')
###################################################################################################
# load sub project info
execfile(os.path.join(projectdir, 'info.py'))
os.system('mkdir -p '+ratedir)
###################################################################################################
# find major axis
#################
# Get the y coordinate that corresponds to the major axis.
def find_major_axis(dataset):
im = fits.open(os.path.join(ratedir, 'diskfit.total_model.regrid_'+dataset['line']+'.'+ax_type+'.rotate.fits'))[0]
im_wcs = WCS(im.header)
x,y = im_wcs.all_world2pix(kin_center.ra.value, kin_center.dec.value,1)
major_axis = int(np.round(y))
return major_axis
###################################################################################################
# get energy cube
#################
def get_energy_cube(dataset, ax_type='major', SNR=3.0):
# load data
mass_cube = fits.open(os.path.join(ratedir, dataset['cube']+'.ppV_mask_'+ax_type+'.non-disk.'+str(SNR)+'s.mass_cube.fits'))[0]
model = fits.open(os.path.join(ratedir, 'diskfit.total_model.regrid_'+dataset['line']+'.'+ax_type+'.rotate.fits'))[0]
# get pixel position of major axis
ma_y = find_major_axis(dataset)
# pixel to km conversion
pix_scale = u.Quantity(str(np.abs(mass_cube.header['cdelt1']))+mass_cube.header['cunit1'])
pix_to_km = ((distance*np.sin(pix_scale)).to(u.km)).value
# cube to store mass outflow rates
energy_cube = np.full_like(mass_cube.data, np.nan)
energy_cube_p = np.full_like(mass_cube.data, np.nan)
energy_cube_m = np.full_like(mass_cube.data, np.nan)
energy_header = copy.deepcopy(mass_cube.header)
del energy_header['history']
energy_header['bunit'] = 'erg'
# cube extent
nv, ny, nx = energy_cube.shape
for v in np.arange(nv):
velo = (u.Quantity(str((v-mass_cube.header['crpix3'])*mass_cube.header['cdelt3']+mass_cube.header['crval3'])+mass_cube.header['cunit3']).to(u.km/u.s)).value # km/s
for y in np.arange(ny):
for x in np.arange(nx):
print(dataset['line']+" "+ax_type+" "+str(SNR)+"sigma : pixel ["+str(v)+","+str(x)+","+str(y)+"] of ["+str(nv)+","+str(nx)+","+str(ny)+"]", end='\r')
mass = mass_cube.data[v,y,x]*1.98847542e+33 # Msun -> g
modv = model.data[ma_y,x]*1e5 # km/s -> cm/s
modv_p = model.data[ma_y+25,x]*1e5 # km/s -> cm/s
modv_m = model.data[ma_y-25,x]*1e5 # km/s -> cm/s
# energy needs to be saved modulo 1e51 to not overflow a 32bit fits image
energy = 0.5*mass*np.abs(velo-modv)**2 /1e51
energy_p = 0.5*mass*np.abs(velo-modv_p)**2 /1e51
energy_m = 0.5*mass*np.abs(velo-modv_m)**2 /1e51
energy_cube[v,y,x] = energy
energy_cube_p[v,y,x] = energy_p
energy_cube_m[v,y,x] = energy_m
print("\n")
# mask inf values if they happen somehow
energy_cube[np.isinf(energy_cube)] = np.nan
energy_cube_p[np.isinf(energy_cube_p)] = np.nan
energy_cube_m[np.isinf(energy_cube_m)] = np.nan
fits.writeto(os.path.join(ratedir, dataset['cube']+'.ppV_mask_'+ax_type+'.non-disk.'+str(SNR)+'s.energy_cube.fits'), data=energy_cube, header=energy_header, overwrite=True)
fits.writeto(os.path.join(ratedir, dataset['cube']+'.ppV_mask_'+ax_type+'.non-disk.'+str(SNR)+'s.energy_cube_p.fits'), data=energy_cube_p, header=energy_header, overwrite=True)
fits.writeto(os.path.join(ratedir, dataset['cube']+'.ppV_mask_'+ax_type+'.non-disk.'+str(SNR)+'s.energy_cube_m.fits'), data=energy_cube_m, header=energy_header, overwrite=True)
###################################################################################################
# get integrated outflow rate
#############################
def integrate_energy(dataset, ax_type='major', SNR=3.0):
energy_cube = fits.open(os.path.join(ratedir, dataset['cube']+'.ppV_mask_'+ax_type+'.non-disk.'+str(SNR)+'s.energy_cube.fits'))[0]
energy_cube_p = fits.open(os.path.join(ratedir, dataset['cube']+'.ppV_mask_'+ax_type+'.non-disk.'+str(SNR)+'s.energy_cube_p.fits'))[0]
energy_cube_m = fits.open(os.path.join(ratedir, dataset['cube']+'.ppV_mask_'+ax_type+'.non-disk.'+str(SNR)+'s.energy_cube_m.fits'))[0]
energy = np.nansum(energy_cube.data)
energy_p = np.nansum(energy_cube_p.data)
energy_m = np.nansum(energy_cube_m.data)
return [energy,energy_p,energy_m]
###################################################################################################
# get integrated outflow rate with defined region
####################################################
def integrate_energy_region(dataset, mask_file, ax_type='major', SNR=3.0):
randname = 'temp.'+str(int(np.random.rand()*1e4))
# regrid mask to match the cube
imregrid(imagename = mask_file,
template = os.path.join(dnddir, dataset['cube']+'.ppV_mask_'+ax_type+'.non-disk.3.0s.mom1'),
output = os.path.join(ratedir, randname+'.mask.regrid'),
overwrite = True
)
# rotate mask to match the rotated cube
rotate_by = str( 90*u.degree - disk_PA )
os.system('rm -rf '+os.path.join(ratedir, randname+'.mask.rotate'))
ia.open(os.path.join(ratedir, randname+'.mask.regrid'))
temp = ia.rotate(pa = rotate_by,
outfile = os.path.join(ratedir, randname+'.mask.rotate'),
overwrite = True
)
temp.close()
ia.done()
exportfits(imagename = os.path.join(ratedir, randname+'.mask.rotate'),
fitsimage = os.path.join(ratedir, randname+'.mask.rotate.fits'),
velocity = True,
optical = True,
dropstokes = True,
dropdeg = True,
overwrite = True
)
# multiply each channel with the mask
energy_cube = fits.open(os.path.join(ratedir, dataset['cube']+'.ppV_mask_'+ax_type+'.non-disk.'+str(SNR)+'s.energy_cube.fits'))[0]
mask = fits.open(os.path.join(ratedir, randname+'.mask.rotate.fits'))[0]
masked_rate = copy.deepcopy(energy_cube.data)
nv,ny,nx = energy_cube.data.shape
for v in np.arange(nv):
masked_rate[v] *= mask.data
os.system('rm -rf '+os.path.join(ratedir, randname+'.mask.rotate'))
os.system('rm -rf '+os.path.join(ratedir, randname+'.mask.rotate.fits'))
os.system('rm -rf '+os.path.join(ratedir, randname+'.mask.regrid'))
energy = np.nansum(masked_rate)
return energy
###################################################################################################
# get outflow energy
####################
# At 100sigma there are only a few very small regions of non-disk emission left in the CO(3-2) cube
# which cannot be rotated. Rotation loses emission by resampling the data leading to an empty image
# in that case. Thus try/except statements are needed to keep the code running.
# generate energy cubes in parallel
# This is pure python and can be done in parallel
def parallel_energy_cubes(inp):
try:
print("Executing: "+inp[0]['line']+" "+inp[1]+" "+str(inp[2]))
get_energy_cube(inp[0], ax_type=inp[1], SNR=inp[2])
except:
print("Failed: "+inp[0]['line']+" "+inp[1]+" "+str(inp[2]))
for dataset in datasets:
# for ax_type in ['major','minor']:
for ax_type in ['major']:
plist = [[dataset, ax_type, SNR] for SNR in SNRs]
pool = Pool(len(SNRs))
pool.map(parallel_energy_cubes, plist)
pool.close()
# sequentially read in energies
energies = {}
for dataset in datasets:
energies[dataset['line']] = {}
for ax_type in ['major','minor']:
energies[dataset['line']][ax_type] = {}
for SNR in SNRs:
try:
E, E_p, E_m = integrate_energy(dataset, ax_type=ax_type, SNR=SNR)
E_real_outflow = integrate_energy_region(dataset, os.path.join(ratedir, 'nondisk.'+dataset['line']+'.real_outflow_new.mask'), ax_type=ax_type, SNR=SNR)
E_real_superbubble = integrate_energy_region(dataset, os.path.join(ratedir, 'nondisk.'+dataset['line']+'.superbubble_new.mask'), ax_type=ax_type, SNR=SNR)
E_real_cospatial_disk = integrate_energy_region(dataset, os.path.join(ratedir, 'nondisk.'+dataset['line']+'.cospatial_disk_new.mask'), ax_type=ax_type, SNR=SNR)
except:
E, E_p, E_m = np.nan, np.nan, np.nan
E_real_outflow = np.nan
E_real_superbubble = np.nan
E_real_cospatial_disk = np.nan
energies[dataset['line']][ax_type][SNR] = [E,E_p,E_m,E_real_outflow,E_real_superbubble,E_real_cospatial_disk]
fnpickle(energies, 'energies.pickle')
###################################################################################################
# deprojected energies
######################
dep_energies = {}
for dataset in datasets:
dep_energies[dataset['line']] = {}
for ax_type in ['major','minor']:
dep_energies[dataset['line']][ax_type] = {}
for SNR in SNRs:
dep_energies[dataset['line']][ax_type][SNR] = {}
E, E_p, E_m, E_outflow, E_bubble, E_codisk = energies[dataset['line']][ax_type][SNR]
dep_E = [ E/(np.sin(j*u.degree).value)**2 for j in np.arange(48,108) ]
dep_E_outflow = [ E_outflow/(np.sin(j*u.degree).value)**2 for j in np.arange(48,108) ]
dep_E_bubble = [ E_bubble/(np.sin(j*u.degree).value)**2 for j in np.arange(48,108) ]
dep_E_codisk = [ E_codisk/(np.sin(j*u.degree).value)**2 for j in np.arange(48,108) ]
dep_energies[dataset['line']][ax_type][SNR]['total'] = np.nanpercentile(dep_E, [16,50,81])
dep_energies[dataset['line']][ax_type][SNR]['outflow'] = np.nanpercentile(dep_E_outflow, [16,50,81])
dep_energies[dataset['line']][ax_type][SNR]['bubble'] = np.nanpercentile(dep_E_bubble, [16,50,81])
dep_energies[dataset['line']][ax_type][SNR]['codisk'] = np.nanpercentile(dep_E_codisk, [16,50,81])
fnpickle(dep_energies, 'energies_deprojected.pickle')
###################################################################################################
|
[
"[email protected]"
] | |
679e983d8074ebf8f7dbce8af00db87168d0c887
|
f9308d5a8efe2dbb48e9cc87cd06405b60a9dc7b
|
/samples/python/apidocs/ee_number_abs.py
|
f4daf40ec08bb1ff5e89839d54938f661958a4b3
|
[
"Apache-2.0",
"CC-BY-4.0"
] |
permissive
|
google/earthengine-community
|
4e054b421f66f03507d58668084aee981062fc24
|
ce931040c518860f8788b4888c0acfdebd2952fc
|
refs/heads/master
| 2023-09-01T14:47:54.812703 | 2023-08-31T23:01:00 | 2023-08-31T23:01:39 | 200,732,820 | 428 | 552 |
Apache-2.0
| 2023-09-13T21:46:51 | 2019-08-05T21:42:11 |
Jupyter Notebook
|
UTF-8
|
Python
| false | false | 899 |
py
|
# Copyright 2022 The Google Earth Engine Community Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START earthengine__apidocs__ee_number_abs]
print('Absolute value of -1:', ee.Number(-1).abs().getInfo()) # 1
print('Absolute value of 0:', ee.Number(0).abs().getInfo()) # 0
print('Absolute value of 2.3:', ee.Number(2.3).abs().getInfo()) # 2.3
# [END earthengine__apidocs__ee_number_abs]
|
[
"[email protected]"
] | |
236cd21d398219589f77bf3b49cc6d3600f4a6ae
|
21e73840ea6db347141b7c569bf9e70e617adbd4
|
/src/posts/admin.py
|
1093ba83e6e05259b916218134bc354c68e2f7b8
|
[] |
no_license
|
ratulkhan44/djangoBlog
|
4b41e011fddb3095ef5c5ccbab8f711d054ed019
|
1da4a2fe96ae34cebd8d286bc1ab27d1623a7045
|
refs/heads/master
| 2023-01-23T14:27:08.225993 | 2020-11-28T15:26:17 | 2020-11-28T15:26:17 | 316,205,197 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 257 |
py
|
from django.contrib import admin
from .models import Post, PostView, Like, Comment, User
# Register your models here.
admin.site.register(User)
admin.site.register(Post)
admin.site.register(PostView)
admin.site.register(Comment)
admin.site.register(Like)
|
[
"[email protected]"
] | |
b0a8b8742f5ffd386452964a27070ef0a186c91f
|
15f321878face2af9317363c5f6de1e5ddd9b749
|
/solutions_python/Problem_118/2541.py
|
2571b77cac41e1d084f5c0929f63a1dc9bec94f1
|
[] |
no_license
|
dr-dos-ok/Code_Jam_Webscraper
|
c06fd59870842664cd79c41eb460a09553e1c80a
|
26a35bf114a3aa30fc4c677ef069d95f41665cc0
|
refs/heads/master
| 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,562 |
py
|
'''
Created on 13/04/2013
@author: Rafael
'''
def cuadradoPalindromos(listaPalindromo):
listResult=[]
for i in listaPalindromo:
listResult.append(i*i)
return listResult
def hallaPalindromo(minNum, maxNum):
list = range(minNum, maxNum + 1)
listPalindromo = []
listaPalindromoCuadrado = []
for i in list:
cad = str(i)
bandera = False
if len(cad) == 1:
listPalindromo.append(i);
else:
if len(cad) % 2 == 0:
pivote = int(len(cad) / 2)
bandera = validaPalindromo(pivote, cad)
else:
pivote = int(len(cad) / 2)
bandera = validaPalindromo(pivote, cad[:pivote] + cad[pivote + 1:])
if(bandera == True):
listPalindromo.append(i)
listaPalindromoCuadrado = cuadradoPalindromos(listPalindromo)
listaPalindromoAux = []
for i in listaPalindromoCuadrado:
cad = str(i)
bandera = False
if len(cad) == 1:
listaPalindromoAux.append(i);
else:
if len(cad) % 2 == 0:
pivote = int(len(cad) / 2)
bandera = validaPalindromo(pivote, cad)
else:
pivote = int(len(cad) / 2)
bandera = validaPalindromo(pivote, cad[:pivote] + cad[pivote + 1:])
if(bandera == True):
listaPalindromoAux.append(i)
return len(listaPalindromoAux)
def validaPalindromo(pivote, cadena):
j = 0
for i in range(1, pivote + 1):
if(cadena[pivote - i] != cadena[pivote + j]):
return False
j += 1
return True
if __name__ == '__main__':
fileIn = open("C-small-attempt0.in")
iter = int (fileIn.readline())
l = range(iter)
listaEntrada = []
listaPalindromos = []
for i in l:
cad = fileIn.readline()
cad = cad[:-1]
auxList = cad.split(" ")
listaEntrada.append(auxList)
fileIn.close()
for i in listaEntrada:
list = []
min = pow(int(i[0]), 0.5)
if(min%1!=0):
min=min+1
min = "%.2f" % min
max = "%.2f" % pow(int(i[1]), 0.5)
listaPalindromos.append( hallaPalindromo(int(min[:-3]), int(max[:-3])))
fileOut = open("output.txt",'w')
for i in range(len(listaPalindromos)):
fileOut.writelines("Case #%d: %s" % (i + 1, listaPalindromos[i])+'\n')
fileOut.close()
|
[
"[email protected]"
] | |
535dc67b848397904e9589a841cb3eed33579914
|
d3efc82dfa61fb82e47c82d52c838b38b076084c
|
/crossmarketetf_bak/crossmarket_redemption_HA/YW_CETFSS_SHSH_067.py
|
4f03de4f7cd81d8e34ba46a0bc890d0b0db273ca
|
[] |
no_license
|
nantongzyg/xtp_test
|
58ce9f328f62a3ea5904e6ed907a169ef2df9258
|
ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f
|
refs/heads/master
| 2022-11-30T08:57:45.345460 | 2020-07-30T01:43:30 | 2020-07-30T01:43:30 | 280,388,441 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,173 |
py
|
#!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
sys.path.append("/home/yhl2/workspace/xtp_test")
from crossmarketetf.cetfservice.cetf_main_service import *
from crossmarketetf.cetfservice.cetf_get_components_asset import *
from crossmarketetf.cetfservice.cetf_utils import *
from mysql.QueryOrderErrorMsg import queryOrderErrorMsg
from service.mainService import *
from mysql.getUpOrDownPrice import getUpPrice
class YW_CETFSS_SHSH_067(xtp_test_case):
def test_YW_CETFSS_SHSH_067(self):
# -----------ETF申购-------------
title = ('T日申购ETF-T日赎回当天申购的ETF-T日卖出T日的ETF')
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、全成、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'期望状态': '全成',
'errorID': 0,
'errorMSG': '',
'是否生成报单': '是',
'是否是撤废': '否',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
unit_info = {
'ticker': '530690', # etf代码
'etf_unit': 1.0 , # etf申赎单位数
'etf_unit_sell': 1.0, # etf卖出单位数
}
# 查询ETF申购前成分股持仓
component_stk_info = cetf_get_all_component_stk(Api,unit_info['ticker'])
# 查询etf最小申赎数量
unit_number = query_creation_redem_unit(unit_info['ticker'])
# etf申赎数量
quantity = int(unit_info['etf_unit'] * unit_number)
# 定义委托参数信息------------------------------------------
wt_reqs = {
'business_type':
Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_ETF'],
'order_client_id':
2,
'market':
Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker':
unit_info['ticker'],
'side':
Api.const.XTP_SIDE_TYPE['XTP_SIDE_PURCHASE'],
'price_type':
Api.const.XTP_PRICE_TYPE['XTP_PRICE_LIMIT'],
'quantity':
quantity
}
g_func.cetf_parm_init(case_goal['期望状态'])
rs1 = cetf_service_test(Api, case_goal, wt_reqs,component_stk_info,)
etf_creation_log(case_goal, rs1)
self.assertEqual(rs1['用例测试结果'], True)
# -----------ETF赎回-------------
case_goal['期望状态'] = '废单'
case_goal['errorID'] = 11010121
case_goal['errorMSG'] = queryOrderErrorMsg(11010121)
# 查询ETF赎回前成分股持仓
component_stk_info2 =cetf_get_all_component_stk(Api,unit_info['ticker'])
# 定义委托参数信息------------------------------------------
wt_reqs = {
'business_type':
Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_ETF'],
'order_client_id':
2,
'market':
Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker':
unit_info['ticker'],
'side':
Api.const.XTP_SIDE_TYPE['XTP_SIDE_REDEMPTION'],
'price_type':
Api.const.XTP_PRICE_TYPE['XTP_PRICE_LIMIT'],
'quantity':
quantity
}
g_func.cetf_parm_init(case_goal['期望状态'])
rs2 = cetf_service_test(Api, case_goal, wt_reqs,component_stk_info2)
etf_creation_log(case_goal, rs2)
self.assertEqual(rs2['用例测试结果'], True)
# --------二级市场,卖出etf-----------
case_goal['期望状态'] = '全成'
case_goal['errorID'] = 0
case_goal['errorMSG'] = ''
# 二级市场卖出的etf数量
quantity = int(unit_info['etf_unit_sell'] * unit_number)
quantity_list = split_etf_quantity(quantity)
# 查询涨停价
limitup_px = getUpPrice(unit_info['ticker'])
rs3 = {}
for etf_quantity in quantity_list:
wt_reqs_etf = {
'business_type':
Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_CASH'],
'order_client_id':
2,
'market':
Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker':
unit_info['ticker'],
'side':
Api.const.XTP_SIDE_TYPE['XTP_SIDE_SELL'],
'price_type':
Api.const.XTP_PRICE_TYPE['XTP_PRICE_BEST5_OR_CANCEL'],
'price':
limitup_px,
'quantity':
etf_quantity
}
ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
rs3 = serviceTest(Api, case_goal, wt_reqs_etf)
if rs3['用例测试结果'] is False:
etf_sell_log(case_goal, rs3)
self.assertEqual(rs3['用例测试结果'], True)
return
etf_sell_log(case_goal, rs3)
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
ebd34678904d8f650aa47877f6328d14b8087b89
|
801f367bd19b8f2ab08669fd0a85aad7ace961ac
|
/project/experiments/exp_020_random_bodies_revisit/src/gym_envs/my_envs.py
|
4bdd9eeb25ef7e90771dc38a768f4ec97b664343
|
[
"MIT"
] |
permissive
|
Wendong-Huo/thesis-bodies
|
d91b694a6b1b6a911476573ed1ed27eb27fb000d
|
dceb8a36efd2cefc611f6749a52b56b9d3572f7a
|
refs/heads/main
| 2023-04-17T18:32:38.541537 | 2021-03-12T19:53:23 | 2021-03-12T19:53:23 | 623,471,326 | 1 | 0 | null | 2023-04-04T12:45:48 | 2023-04-04T12:45:47 | null |
UTF-8
|
Python
| false | false | 2,604 |
py
|
import numpy as np
import pybullet
from pybullet_envs.gym_locomotion_envs import WalkerBaseBulletEnv
from pybullet_envs.robot_locomotors import WalkerBase
class MyWalkerBase(WalkerBase):
def __init__(self, fn, robot_name, action_dim, obs_dim, power):
super().__init__(fn, robot_name, action_dim, obs_dim, power)
# e.g. fn = "".../300.xml"
self.robot_id = int(fn.split("/")[-1].split(".")[0])
class MyWalkerBaseBulletEnv(WalkerBaseBulletEnv):
def __init__(self, robot, render=False):
self._last_x = 0
self._history_x = []
self._history_dx = []
super().__init__(robot, render=render)
def step(self, a):
obs = super().step(a)
self.camera_adjust()
return obs
def reset(self):
self._history_x = []
self._history_dx = []
obs = super().reset()
self.pybullet = self._p
self.camera_angle = 0
return obs
def show_body_id(self):
if self._p:
self._p.addUserDebugText(f"{self.xml.split('/')[-1]}", [-0.5, 0, 1], [0, 0, 1])
def camera_adjust(self):
self.camera_simpy_follow_robot()
def camera_simpy_follow_robot(self, rotate=False):
if self._p:
self.camera_angle += 1
distance = 4
pitch = -10
if rotate:
yaw = self.camera_angle
else:
yaw = 0
_current_x = self.robot.body_xyz[0]
_current_y = self.robot.body_xyz[1]
lookat = [_current_x, _current_y, 0.7]
self._p.resetDebugVisualizerCamera(distance, yaw, pitch, lookat)
def camera_follow_robot(self):
if self._p:
distance = 4
pitch = -5
yaw = 0
# Smooth Camera
if len(self._history_x) > 0:
self._last_x = self._history_x[-1]
self._history_x.append(self.robot.body_xyz[0])
self._history_dx.append(self.robot.body_xyz[0] - self._last_x)
_average_speed = np.mean(self._history_dx) if len(self._history_dx) <= 11 else np.mean(self._history_dx[-10:])
_current_x = self._last_x + _average_speed
# print(_current_x, self.robot.body_xyz[0])
lookat = [_current_x, 0, 0.7]
self._p.resetDebugVisualizerCamera(distance, yaw, pitch, lookat)
def set_view(self):
if self._p:
distance = 3
pitch = -80
yaw = 0
lookat = [0, 0, 0]
self._p.resetDebugVisualizerCamera(distance, yaw, pitch, lookat)
|
[
"[email protected]"
] | |
3e3cd56dc57fe0dabd04a2540c24b971c3c33c09
|
80f1d4300aa4de8824ad76f3a2606e5ad020e638
|
/right-to-left.py
|
860ea3112096156c63467813db036b252ee3516c
|
[] |
no_license
|
deanstreet/checkio
|
cf14419e921a6ffa18466ba2f01968ec1de271e4
|
825f648c5cbb8d02fe26cd675ddc0913296c0417
|
refs/heads/master
| 2021-01-02T08:24:53.693247 | 2015-05-11T00:01:00 | 2015-05-11T00:01:00 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 615 |
py
|
def left_join(phrases):
"""
Join strings and replace "right" to "left"
"""
return ','.join(map(lambda x: x.replace('right', 'left'), phrases))
if __name__ == '__main__':
#These "asserts" using only for self-checking and not necessary for auto-testing
assert left_join(("left", "right", "left", "stop")) == "left,left,left,stop", "All to left"
assert left_join(("bright aright", "ok")) == "bleft aleft,ok", "Bright Left"
assert left_join(("brightness wright",)) == "bleftness wleft", "One phrase"
assert left_join(("enough", "jokes")) == "enough,jokes", "Nothing to replace"
|
[
"[email protected]"
] | |
02a4a248f3ff1f507622c9bba64ddc55b45a3f52
|
f90341eea9ae5750ae34fee0fbab253c19b89ff0
|
/abilian/sbe/apps/documents/tests/test_parser.py
|
6cf799cb20ac801946a993d52476497b665d702e
|
[] |
no_license
|
0decimal0/abilian-sbe
|
f8e5d6ed1474484bbf35dc2a2d13029cbe47eb4e
|
bcf08c4aec1d761d2e525006fe2c1291cc00e0b8
|
refs/heads/master
| 2021-01-24T23:41:37.866768 | 2015-12-14T11:02:01 | 2015-12-14T11:02:08 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,839 |
py
|
from ..cmis.parser import Entry
XML_ENTRY = """\
<?xml version="1.0" encoding="utf-8"?>
<entry xmlns="http://www.w3.org/2005/Atom"
xmlns:app="http://www.w3.org/2007/app"
xmlns:cmisra="http://docs.oasis-open.org/ns/cmis/restatom/200908/">
<cmisra:object xmlns:cmis="http://docs.oasis-open.org/ns/cmis/core/200908/">
<cmis:properties>
<cmis:propertyString propertyDefinitionId="cmis:name">
<cmis:value>Toto Titi</cmis:value>
</cmis:propertyString>
<cmis:propertyId propertyDefinitionId="cmis:objectTypeId">
<cmis:value>cmis:folder</cmis:value>
</cmis:propertyId>
</cmis:properties>
</cmisra:object>
<title>Toto Titi</title>
</entry>
"""
XML_ENTRY_WITH_CONTENT = """\
<?xml version="1.0" encoding="utf-8"?>
<entry xmlns="http://www.w3.org/2005/Atom"
xmlns:app="http://www.w3.org/2007/app"
xmlns:cmisra="http://docs.oasis-open.org/ns/cmis/restatom/200908/">
<cmisra:content>
<cmisra:mediatype>text/plain</cmisra:mediatype>
<cmisra:base64>VGVzdCBjb250ZW50IHN0cmluZw==</cmisra:base64>
</cmisra:content>
<cmisra:object
xmlns:cmis="http://docs.oasis-open.org/ns/cmis/core/200908/">
<cmis:properties>
<cmis:propertyString propertyDefinitionId="cmis:name">
<cmis:value>testDocument</cmis:value>
</cmis:propertyString>
<cmis:propertyId propertyDefinitionId="cmis:objectTypeId">
<cmis:value>cmis:document</cmis:value>
</cmis:propertyId>
</cmis:properties>
</cmisra:object>
<title>testDocument</title>
</entry>
"""
def test_parse_folder_entry():
e = Entry(XML_ENTRY)
assert e.name == "Toto Titi"
assert e.type == "cmis:folder"
def test_parse_document_entry():
e = Entry(XML_ENTRY_WITH_CONTENT)
assert e.name == "testDocument"
assert e.type == "cmis:document"
assert e.content == "Test content string"
|
[
"[email protected]"
] | |
257323258e584c6ede80fc932efea1422a39ea6c
|
64267b1f7ca193b0fab949089b86bc7a60e5b859
|
/slehome/account/migrations/0007_auto_20150124_2357.py
|
9dc78adf7b4c13fd262250eef42336da3db7bb8e
|
[] |
no_license
|
hongdangodori/slehome
|
6a9f2b4526c2783932627b982df0540762570bff
|
3e558c78c3943dadf0ec485738a0cc98dea64353
|
refs/heads/master
| 2021-01-17T12:00:34.221088 | 2015-02-06T13:44:00 | 2015-02-06T13:44:00 | 28,847,585 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 531 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('account', '0006_auto_20150124_2347'),
]
operations = [
migrations.AlterField(
model_name='basicmemberinformation',
name='auth_key',
field=models.CharField(default='f26e2ff059e1c71478e358c3f6eb407f217bd163e39fc1b7ab7a53c10c918989', max_length=64),
preserve_default=True,
),
]
|
[
"[email protected]"
] | |
51e916768da88495398585f4f2062d91c0e36cfb
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03426/s835842924.py
|
6a7b12f94298b54775e9eaca3a758d891cefb3a6
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 702 |
py
|
import sys;input=sys.stdin.readline
H, W, D = map(int, input().split())
def dist(x, y):
return abs(x[0]-y[0]) + abs(x[1]-y[1])
X = []
for _ in range(H):
X.append(list(map(int, input().split())))
d = dict()
for i in range(H):
x = X[i]
for j in range(W):
d[x[j]] = (i, j)
Ls = []
for i in range(1, D+1):
tmp = [0]
st = d[i]
j = i
while j in d:
tmp.append(tmp[-1]+dist(st, d[j]))
st = d[j]
j += D
Ls.append(tmp)
Q = int(input())
for _ in range(Q):
x, y = map(int, input().split())
a, b = divmod(x, D)
c, d = divmod(y, D)
if b > 0:
print(Ls[b-1][c+1]-Ls[b-1][a+1])
else:
print(Ls[b-1][c]-Ls[b-1][a])
|
[
"[email protected]"
] | |
98c348967b90cbac75baa5e05e551e2d52e3a16a
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p02714/s632667651.py
|
fa656fe0bf144d612f78ab8ca7788c47309163aa
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 261 |
py
|
from collections import *
N=int(input())
S=input()
c=Counter(S)
ans=c['R']*c['G']*c['B']
for i in range(N):
for j in range(i+1,N):
if S[i]==S[j]:
continue
k=j*2-i
if k>=N or S[k]==S[i] or S[k]==S[j]:
continue
ans-=1
print(ans)
|
[
"[email protected]"
] | |
04d35b32d242e45e128b6a2f356d510d254e23f8
|
01de66bc478982722cb25120e9d15c39a235a051
|
/python3/Library/Database/__init__.py
|
d595d0b40a3f9816257ed0044b084ab44fd52335
|
[] |
no_license
|
ptracton/experimental
|
2503c2e8ead7e5719d7aee612fb2ba4d219e6c87
|
7d4a27126f7f2a93f7216b9ea4eed15789599bf3
|
refs/heads/master
| 2020-06-08T08:21:43.100928 | 2018-04-08T04:54:22 | 2018-04-08T04:54:22 | 7,012,360 | 4 | 7 | null | null | null | null |
UTF-8
|
Python
| false | false | 378 |
py
|
"""
This is the init for our Database Libary.
"""
__all__ = ['Postgres']
import os
import sys
sys.path.append("/user/tractp1/scratch/src/experimental/python3/Library/Database")
sys.path.append("/home/ptracton/src/software/experimental/python3/Library/Database")
sys.path.append('c:\\Users\\tractp1\\src\\software\\experimental\\python3\\Library\\Database')
import Postgres
|
[
"[email protected]"
] | |
4f1c677f0d7ba0732c066ffc7c83a94bdc902f04
|
e73cd093ab804f3abe5efb14ce9af244c02ff5e1
|
/todolist/urls.py
|
52a6418268ecb901fd267833e3af073cfd1b8256
|
[] |
no_license
|
zhangfanjm/xadmin_Dj
|
7c1d1e6b52f1d91755deaea36b4a356782796d36
|
72adc022e7fbe19e34cdc1d10ec6c0e1e152dbed
|
refs/heads/master
| 2021-06-22T08:45:43.045362 | 2019-07-30T02:44:40 | 2019-07-30T02:44:40 | 199,424,964 | 0 | 0 | null | 2021-06-10T21:47:36 | 2019-07-29T09:47:08 |
Python
|
UTF-8
|
Python
| false | false | 237 |
py
|
from django.urls import path, include
from .import views
app_name = 'todolist'
urlpatterns = [
path('home/', views.home, name='主页'),
path('about/', views.about, name='关于'),
path('edit/', views.edit, name='编辑'),
]
|
[
"[email protected]"
] | |
d2cf755b7757dc878ef76f0dcbcfca0aaa9c2f2e
|
3f13885fdb0649374d866d24a43f86ccc6b4c782
|
/apps/dns_pod/serializers.py
|
2a47a921f91b8bb89af969adb71cb72ac1bcf18a
|
[] |
no_license
|
linkexf/oneops
|
426b271c00c5b4b4c55d1d91bf42030dab29623a
|
64a9c7fd949b6220234a276614ab6555dc8cc17c
|
refs/heads/master
| 2020-12-10T04:45:55.681731 | 2019-11-28T09:02:30 | 2019-11-28T09:02:30 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 408 |
py
|
from rest_framework import serializers
from dns_pod.models import Zone, Record
class ZoneSerializer(serializers.ModelSerializer):
class Meta:
model = Zone
fields = ('id', 'domain_name', 'type', 'comment')
class RecordSerializer(serializers.ModelSerializer):
class Meta:
model = Record
fields = ('id', 'zone', 'host', 'type', 'data', 'ttl', 'mx_priority', 'view')
|
[
"[email protected]"
] | |
91499efe1318209699fc0774996e2d989fdce2cf
|
b46f5825b809c0166622149fc5561c23750b379c
|
/AppImageBuilder/app_dir/runtimes/classic/helpers/test_qt.py
|
32d64d7c94fe1937b457aa1232f754928e1b31ac
|
[
"MIT"
] |
permissive
|
gouchi/appimage-builder
|
22b85cb682f1b126515a6debd34874bd152a4211
|
40e9851c573179e066af116fb906e9cad8099b59
|
refs/heads/master
| 2022-09-28T09:46:11.783837 | 2020-06-07T19:44:48 | 2020-06-07T19:44:48 | 267,360,199 | 0 | 0 |
MIT
| 2020-05-27T15:42:25 | 2020-05-27T15:42:24 | null |
UTF-8
|
Python
| false | false | 2,176 |
py
|
# Copyright 2020 Alexis Lopez Zubieta
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
import unittest
from .qt import Qt
class QtHelperTestCase(unittest.TestCase):
def setUp(self) -> None:
self.app_dir_files = [
'/AppDir/usr/lib/x86_64-linux-gnu/libQt5Core.so.5',
'/AppDir/usr/lib/x86_64-linux-gnu/qt5/libexec/QtWebProcess',
'/AppDir/usr/lib/x86_64-linux-gnu/qt5/plugins/platforms/libqxcb.so',
'/AppDir/usr/lib/x86_64-linux-gnu/qt5/qml/org/kde/plasma/components/Label.qml',
'/AppDir/usr/share/qt5/translations/qtbase_en.qm'
]
self.app_dir = '/AppDir'
self.qt = Qt(self.app_dir, self.app_dir_files)
def test_get_qt_(self):
self.assertEqual(self.qt._get_qt_conf_prefix_path('/AppDir/lib/x86_64'), '../..')
def test_get_qt_libs_path(self):
self.assertEqual(self.qt._get_qt_libs_path(), 'usr/lib/x86_64-linux-gnu')
def test_get_qt_lib_exec_path(self):
self.assertEqual(self.qt._get_qt_lib_exec_path(), 'usr/lib/x86_64-linux-gnu/qt5/libexec')
def test_get_qt_plugins_path(self):
self.assertEqual(self.qt._get_qt_plugins_path(), 'usr/lib/x86_64-linux-gnu/qt5/plugins')
def test_get_qt_qml_path(self):
self.assertEqual(self.qt._get_qt_qml_path(), 'usr/lib/x86_64-linux-gnu/qt5/qml')
def test_get_qt_translations_path(self):
self.assertEqual(self.qt._get_qt_translations_path(), 'usr/share/qt5/translations')
def test_get_qt_data_dir(self):
self.assertEqual(self.qt._get_qt_data_dir(), 'usr/share/qt5')
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
d82556b2fa08b9b5bf669d8a5260876206ab8720
|
fc678a0a5ede80f593a29ea8f43911236ed1b862
|
/146-LRUCache.py
|
0a03d9ada59e10ced0060bfe5e5ab5d7ae005f87
|
[] |
no_license
|
dq-code/leetcode
|
4be0b1b154f8467aa0c07e08b5e0b6bd93863e62
|
14dcf9029486283b5e4685d95ebfe9979ade03c3
|
refs/heads/master
| 2020-12-13T15:57:30.171516 | 2017-11-07T17:43:19 | 2017-11-07T17:43:19 | 35,846,262 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,954 |
py
|
class Node(object):
def __init__(self, key, val):
self.key = key
self.val = val
self.next = None
self.prev = None
class DoubleLinkedList(object):
def __init__(self):
self.head = Node(0, 0)
self.tail = Node(0, 0)
self.tail.prev = self.head
self.head.next = self.tail
def remove(self, node):
tempPrev = node.prev
tempNext = node.next
tempPrev.next = tempNext
tempNext.prev = tempPrev
def addFirst(self, node):
curHead = self.head.next
self.head.next = node
node.next = curHead
curHead.prev = node
node.prev = self.head
def removeLast(self):
curTail = self.tail.prev
curTailPrev = curTail.prev
curTailPrev.next = self.tail
self.tail.prev = curTailPrev
return curTail
class LRUCache(object):
def __init__(self, capacity):
"""
:type capacity: int
"""
self.cache = DoubleLinkedList()
self.map = {}
self.capacity = capacity
def isFull(self):
return len(self.map) >= self.capacity
def get(self, key):
"""
:rtype: int
"""
if key in self.map:
self.cache.remove(self.map[key])
self.cache.addFirst(self.map[key])
return self.map[key].val
return -1
def set(self, key, value):
"""
:type key: int
:type value: int
:rtype: nothing
"""
if key in self.map:
self.map[key].val = value
self.cache.remove(self.map[key])
self.cache.addFirst(self.map[key])
else:
if len(self.map) >= self.capacity:
del self.map[self.cache.tail.prev.key]
lastNode = self.cache.removeLast()
newNode = Node(key, value)
self.map[key] = newNode
self.cache.addFirst(newNode)
|
[
"[email protected]"
] | |
f8202b25cbf0d1cf4eb8728ee8dd4f2ac4be6fe0
|
d7cde25f64c784238919b5a4463aca8e98e1042d
|
/json2.py
|
17bd12a4f1a2f2ccaed3d62658b168eaa7f5ef28
|
[] |
no_license
|
kwoshvick/pythonscripts
|
6d684dbaaaa7d317c5393f7fd982e115770db3c4
|
1c56f21faab098752fcfdcbd2e28590e904a4e0c
|
refs/heads/master
| 2021-09-23T03:19:31.799045 | 2018-09-20T06:20:47 | 2018-09-20T06:20:47 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 513 |
py
|
__author__ = 'kwoshvick'
import urllib.request as rq
import json
list_numbers = list()
# http://python-data.dr-chuck.net/comments_42.json
# http://python-data.dr-chuck.net/comments_171968.json
url = input("Please Enter URL: ")
urlhandler = rq.urlopen(url).read().decode()
jsondata = json.loads(urlhandler)
#print(json.dumps(jsondata,indent=4))
comments = jsondata['comments']
#print(comments)
for numbers in comments:
num=int(numbers['count'])
list_numbers.append(num)
print(sum(list_numbers))
|
[
"[email protected]"
] | |
7473164a03951ff6879bfaa6df6e9a56ab92202d
|
75f6bbcdf10dec884202b3136feb0317842df55f
|
/apps/task/migrations/0014_taskscript_script_name.py
|
d71fecd9858efaf7f356441edd75b3326109e7d7
|
[] |
no_license
|
qt-pay/python-devops
|
bafa305fbcd7bef4498857ab75be7447bc1e0a42
|
60e9481ab84628cf817fde1c52f4a15d5085e503
|
refs/heads/main
| 2023-03-15T12:39:45.813287 | 2021-01-24T18:40:38 | 2021-01-24T18:40:38 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 466 |
py
|
# Generated by Django 2.2.2 on 2021-01-09 17:15
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('task', '0013_remove_taskscript_script_name'),
]
operations = [
migrations.AddField(
model_name='taskscript',
name='script_name',
field=models.CharField(blank=True, max_length=32, null=True, unique=True, verbose_name='脚本名称'),
),
]
|
[
"[email protected]"
] | |
2c5ba50c4354a7a56ca98be6d504781db5df0726
|
de24f83a5e3768a2638ebcf13cbe717e75740168
|
/moodledata/vpl_data/72/usersdata/161/32028/submittedfiles/tomadas.py
|
0c042de34b3f1ac31dff8e3c9989b37274d5862e
|
[] |
no_license
|
rafaelperazzo/programacao-web
|
95643423a35c44613b0f64bed05bd34780fe2436
|
170dd5440afb9ee68a973f3de13a99aa4c735d79
|
refs/heads/master
| 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 359 |
py
|
# -*- coding: utf-8 -*-
import math
#COMECE SEU CODIGO AQUI
T1=int(input('Informe o número de tomadas da primeira régua:'))
T2=int(input('Informe o número de tomadas da segunda régua:'))
T3=int(input('Informe o número de tomadas da terceira régua:'))
T4=int(input('Informe o número de tomadas da quarta régua:'))
T=(T1-1)+(T2-1)+(T3-1)+T4
print('T')
|
[
"[email protected]"
] | |
3687eb472a1646c1b493cf9ccb45d1cd84cf9d77
|
c1bd12405d244c5924a4b069286cd9baf2c63895
|
/azure-mgmt-network/azure/mgmt/network/v2017_09_01/models/connectivity_issue_py3.py
|
441d34f087704a1372caae1431882ebe92453a81
|
[
"MIT"
] |
permissive
|
lmazuel/azure-sdk-for-python
|
972708ad5902778004680b142874582a284a8a7c
|
b40e0e36cc00a82b7f8ca2fa599b1928240c98b5
|
refs/heads/master
| 2022-08-16T02:32:14.070707 | 2018-03-29T17:16:15 | 2018-03-29T17:16:15 | 21,287,134 | 1 | 3 |
MIT
| 2019-10-25T15:56:00 | 2014-06-27T19:40:56 |
Python
|
UTF-8
|
Python
| false | false | 2,129 |
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ConnectivityIssue(Model):
"""Information about an issue encountered in the process of checking for
connectivity.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar origin: The origin of the issue. Possible values include: 'Local',
'Inbound', 'Outbound'
:vartype origin: str or ~azure.mgmt.network.v2017_09_01.models.Origin
:ivar severity: The severity of the issue. Possible values include:
'Error', 'Warning'
:vartype severity: str or ~azure.mgmt.network.v2017_09_01.models.Severity
:ivar type: The type of issue. Possible values include: 'Unknown',
'AgentStopped', 'GuestFirewall', 'DnsResolution', 'SocketBind',
'NetworkSecurityRule', 'UserDefinedRoute', 'PortThrottled', 'Platform'
:vartype type: str or ~azure.mgmt.network.v2017_09_01.models.IssueType
:ivar context: Provides additional context on the issue.
:vartype context: list[dict[str, str]]
"""
_validation = {
'origin': {'readonly': True},
'severity': {'readonly': True},
'type': {'readonly': True},
'context': {'readonly': True},
}
_attribute_map = {
'origin': {'key': 'origin', 'type': 'str'},
'severity': {'key': 'severity', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'context': {'key': 'context', 'type': '[{str}]'},
}
def __init__(self, **kwargs) -> None:
super(ConnectivityIssue, self).__init__(**kwargs)
self.origin = None
self.severity = None
self.type = None
self.context = None
|
[
"[email protected]"
] | |
6f0abd001fc4ea2a018990b3eea17370b082a5ff
|
c1f09426670b5efe35956acd19c67a2de72af284
|
/python/5.concurrent/ZCoroutine/z_spider/1.pyquery.py
|
203181b9993d604b17a4b4e6131cdfed178857fc
|
[
"Apache-2.0"
] |
permissive
|
keasyops/BaseCode
|
388218d89d60b958c1fcc50eb15f29eafabaea1f
|
0255f498e1fe67ed2b3f66c84c96e44ef1f7d320
|
refs/heads/master
| 2023-05-08T05:08:39.754170 | 2021-05-26T10:48:01 | 2021-05-26T10:48:01 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,056 |
py
|
import asyncio
import aiohttp
from pyquery import PyQuery
error_urls = set()
# 获取页面html
async def fetch(session, url):
async with session.get(url) as response:
if response.status == 200:
return await response.text()
else:
error_urls.add(url) # 待处理的url集合
# 阻塞方法
def saves(results):
with open("www.biquge.cm.txt", "a+", encoding="utf-8") as fs:
fs.writelines(results)
print("ok")
async def main():
async with aiohttp.ClientSession() as session:
html = await fetch(session, "http://www.biquge.cm/12/12097/")
pq = PyQuery(html)
results = [
item.text() + ":" + item.attr("href") + "\n"
for item in pq.items("dd a")
]
# print(pq("dd a").text())
# 兼容阻塞旧代码
await asyncio.get_running_loop().run_in_executor(None, saves, results)
if __name__ == "__main__":
import time
start_time = time.time()
asyncio.run(main())
print(time.time() - start_time)
|
[
"[email protected]"
] | |
f28a1c6306277227ba9d8d0c585124aee5fa06f9
|
54277288865f738e44d7be1d6b41b19c63af267e
|
/configs/srtcp/r2plus1d_18_got10k_syn3x/finetune_hmdb51.py
|
8bec58ef23f6b18b9d12f72d1e6dcdf965326cd4
|
[] |
no_license
|
scenarios/SR-SVRL
|
7b41d29e16cff3020f333efc28a624d85bba4537
|
26e89ecb29355635b10a355f2f16f1b5db9c4e9b
|
refs/heads/master
| 2023-02-26T06:16:13.314491 | 2021-01-30T16:30:57 | 2021-01-30T16:30:57 | 307,295,720 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 324 |
py
|
_base_ = '../r3d_18_got10k_syn3x/finetune_hmdb51.py'
work_dir = './output/tcp/r2plus1d_18_got10k_syn3x/finetune_hmdb51/'
model = dict(
backbone=dict(
type='R2Plus1D',
pretrained='./output/tcp/r2plus1d_18_got10k_syn3x/pretraining/epoch_300.pth',
),
cls_head=dict(
num_classes=51
)
)
|
[
"[email protected]"
] | |
5ed876f70146b5735041edb9aa96dd8f7a27affe
|
62bdd0d6ea614613eda76cba1862effb86f2acb7
|
/dj4e_2/myarts/models.py
|
31a9407d60ee6ee586a73e216aa66fb8edfa720d
|
[] |
no_license
|
aman007shrestha/Django4everybody
|
bb2d1df8d681ddac369b1bdde13aff2b9bf08148
|
659bfd579b13d5d7b59022dec3dd3c14c7c37608
|
refs/heads/main
| 2023-01-21T06:39:48.560094 | 2020-12-02T06:55:20 | 2020-12-02T06:55:20 | 317,775,981 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 631 |
py
|
from django.db import models
from django.core.validators import MinLengthValidator
from django.contrib.auth.models import User
from django.conf import settings
# Create your models here.
class Article(models.Model):
title = models.CharField(
max_length=200,
validators=[MinLengthValidator(2, "Title must be greater than a character")]
)
text = models.TextField()
owner = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
created_at = models.DateTimeField('Published Date', auto_now_add=True)
updated_at = models.DateTimeField("Last Updated", auto_now=True)
def __str__(self):
return self.title
|
[
"[email protected]"
] | |
76d1a103a3d9419a8f07e15c521acb164732079c
|
6169a0af24553278c9493c9ac14d2351e9085afd
|
/tests/system/providers/google/cloud/gcs/example_firestore.py
|
9be3b8dd8dcf3d071209a5d3b5743a6375bbd08b
|
[
"Apache-2.0",
"BSD-3-Clause",
"MIT"
] |
permissive
|
Nextdoor/airflow
|
c994f8fbaf48bebd891300f44dd78a58fd0b057b
|
863ec46e25ea49d6d5b006d8fd3a83f50aa9db79
|
refs/heads/master
| 2023-06-12T19:25:58.052324 | 2023-01-20T17:43:14 | 2023-01-20T17:43:14 | 54,076,271 | 7 | 8 |
Apache-2.0
| 2023-06-05T20:38:53 | 2016-03-17T00:34:45 |
Python
|
UTF-8
|
Python
| false | false | 6,327 |
py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example Airflow DAG that shows interactions with Google Cloud Firestore.
Prerequisites
=============
This example uses two Google Cloud projects:
* ``GCP_PROJECT_ID`` - It contains a bucket and a firestore database.
* ``G_FIRESTORE_PROJECT_ID`` - it contains the Data Warehouse based on the BigQuery service.
Saving in a bucket should be possible from the ``G_FIRESTORE_PROJECT_ID`` project.
Reading from a bucket should be possible from the ``GCP_PROJECT_ID`` project.
The bucket and dataset should be located in the same region.
If you want to run this example, you must do the following:
1. Create Google Cloud project and enable the BigQuery API
2. Create the Firebase project
3. Create a bucket in the same location as the Firebase project
4. Grant Firebase admin account permissions to manage BigQuery. This is required to create a dataset.
5. Create a bucket in Firebase project and
6. Give read/write access for Firebase admin to bucket to step no. 5.
7. Create collection in the Firestore database.
"""
from __future__ import annotations
import os
from datetime import datetime
from urllib.parse import urlsplit
from airflow import models
from airflow.providers.google.cloud.operators.bigquery import (
BigQueryCreateEmptyDatasetOperator,
BigQueryCreateExternalTableOperator,
BigQueryDeleteDatasetOperator,
BigQueryInsertJobOperator,
)
from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator
from airflow.providers.google.firebase.operators.firestore import CloudFirestoreExportDatabaseOperator
from airflow.utils.trigger_rule import TriggerRule
ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
DAG_ID = "example_google_firestore"
GCP_PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-gcp-project")
FIRESTORE_PROJECT_ID = os.environ.get("G_FIRESTORE_PROJECT_ID", "example-firebase-project")
BUCKET_NAME = f"bucket_{DAG_ID}_{ENV_ID}"
DATASET_NAME = f"dataset_{DAG_ID}_{ENV_ID}"
EXPORT_DESTINATION_URL = os.environ.get("GCP_FIRESTORE_ARCHIVE_URL", "gs://INVALID BUCKET NAME/namespace/")
EXPORT_PREFIX = urlsplit(EXPORT_DESTINATION_URL).path
EXPORT_COLLECTION_ID = os.environ.get("GCP_FIRESTORE_COLLECTION_ID", "firestore_collection_id")
DATASET_LOCATION = os.environ.get("GCP_FIRESTORE_DATASET_LOCATION", "EU")
if BUCKET_NAME is None:
raise ValueError("Bucket name is required. Please set GCP_FIRESTORE_ARCHIVE_URL env variable.")
with models.DAG(
DAG_ID,
start_date=datetime(2021, 1, 1),
schedule="@once",
catchup=False,
tags=["example", "firestore"],
) as dag:
create_bucket = GCSCreateBucketOperator(task_id="create_bucket", bucket_name=BUCKET_NAME)
create_dataset = BigQueryCreateEmptyDatasetOperator(
task_id="create_dataset",
dataset_id=DATASET_NAME,
location=DATASET_LOCATION,
project_id=GCP_PROJECT_ID,
)
# [START howto_operator_export_database_to_gcs]
export_database_to_gcs = CloudFirestoreExportDatabaseOperator(
task_id="export_database_to_gcs",
project_id=FIRESTORE_PROJECT_ID,
body={"outputUriPrefix": EXPORT_DESTINATION_URL, "collectionIds": [EXPORT_COLLECTION_ID]},
)
# [END howto_operator_export_database_to_gcs]
# [START howto_operator_create_external_table_multiple_types]
create_external_table_multiple_types = BigQueryCreateExternalTableOperator(
task_id="create_external_table",
bucket=BUCKET_NAME,
table_resource={
"tableReference": {
"projectId": GCP_PROJECT_ID,
"datasetId": DATASET_NAME,
"tableId": "firestore_data",
},
"schema": {
"fields": [
{"name": "name", "type": "STRING"},
{"name": "post_abbr", "type": "STRING"},
]
},
"externalDataConfiguration": {
"sourceFormat": "DATASTORE_BACKUP",
"compression": "NONE",
"csvOptions": {"skipLeadingRows": 1},
},
},
)
# [END howto_operator_create_external_table_multiple_types]
read_data_from_gcs_multiple_types = BigQueryInsertJobOperator(
task_id="execute_query",
configuration={
"query": {
"query": f"SELECT COUNT(*) FROM `{GCP_PROJECT_ID}.{DATASET_NAME}.firestore_data`",
"useLegacySql": False,
}
},
)
delete_dataset = BigQueryDeleteDatasetOperator(
task_id="delete_dataset",
dataset_id=DATASET_NAME,
project_id=GCP_PROJECT_ID,
delete_contents=True,
trigger_rule=TriggerRule.ALL_DONE,
)
delete_bucket = GCSDeleteBucketOperator(
task_id="delete_bucket", bucket_name=BUCKET_NAME, trigger_rule=TriggerRule.ALL_DONE
)
(
# TEST SETUP
create_bucket
>> create_dataset
# TEST BODY
>> export_database_to_gcs
>> create_external_table_multiple_types
>> read_data_from_gcs_multiple_types
# TEST TEARDOWN
>> delete_dataset
>> delete_bucket
)
from tests.system.utils.watcher import watcher
# This test needs watcher in order to properly mark success/failure
# when "tearDown" task with trigger rule is part of the DAG
list(dag.tasks) >> watcher()
from tests.system.utils import get_test_run # noqa: E402
# Needed to run the example DAG with pytest (see: tests/system/README.md#run_via_pytest)
test_run = get_test_run(dag)
|
[
"[email protected]"
] | |
7ff31e753171c8ee1b134daddd4f357f9b1005f2
|
95386eb0d35216dec743388b2718da15f61b608d
|
/NETCONF/CODE/cisco_XR_netconf_2.py
|
51c0d0e66d9773f20f106e54327e21ec87ee7c61
|
[] |
no_license
|
Preet2fun/NetworkProgramming
|
930c48601d7f5199510479126806be298ccfcca5
|
78bf55d05574d85c373ad88df10c5df4139ed178
|
refs/heads/master
| 2020-05-17T23:54:48.517255 | 2019-09-30T09:43:45 | 2019-09-30T09:43:45 | 184,040,833 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 807 |
py
|
#! /usr/bin/python3.6
from ncclient import manager
from cisco_device import IOS_XR
import sys
import xmltodict
import logging
logger = logging.basicConfig(level=logging.DEBUG,format='%(asctime)s %(levelname)s: %(message)s',stream=sys.stdout)
netconf_filter = open("ietf_fileter_interface.xml").read()
print (netconf_filter)
with manager.connect(host=IOS_XR['address'],port=IOS_XR['port'],username=IOS_XR['username'],\
password=IOS_XR['password'],hostkey_verify=False,device_params={'name': 'default'},allow_agent=False, look_for_keys=False) as m:
netconf_reply = m.get(netconf_filter)
interface_dict = xmltodict.parse(netconf_reply)
print(interface_dict)
m.close_session()
#except Exception as e:
# print ("Encountered folloing error..")
# print (e)
# sys.exit()
|
[
"[email protected]"
] | |
25ad11b9bb7c1e93a1ad963baf5a210ef888e6fb
|
2d5d13c4bdc64202a520f32e7d4a44bb75e2004f
|
/week-02/d01/variable_mutation.py
|
22d74862ed356281daca16cc1e29a0bcfc7e14c3
|
[] |
no_license
|
green-fox-academy/andrasnyarai
|
43b32d5cc4ad3792ef8d621328f9593fc9623e0b
|
19759a146ba2f63f1c3e4e51160e6111ca0ee9c3
|
refs/heads/master
| 2021-09-07T16:19:34.636119 | 2018-02-26T00:38:00 | 2018-02-26T00:38:00 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 845 |
py
|
a = 3
# make it bigger by 10
print(a + 10)
b = 100
# make it smaller by 7
print(b - 7)
c = 44
# please double c's value
print(c * 2)
d = 125
# please divide by 5 d's value
print(d // 5)
e = 8
# please cube of e's value
print(e ** 3)
f1 = 123
f2 = 345
# tell if f1 is bigger than f2 (pras a boolean)
print(f1 > f2)
g1 = 350
g2 = 200
# tell if the double of g2 is bigger than g1 (pras a boolean)
print((g2 * 2) > g1)
h = 1357988018575474
# tell if it has 11 as a divisor (pras a boolean)
print(h % 11 == 0)
i1 = 10
i2 = 3
# tell if i1 is higher than i2 squared and smaller than i2 cubed (pras a boolean)
print(i1 ** 3 > i2 ** 2)
j = 1521
# tell if j is dividable by 3 or 5 (pras a boolean)
print(j % 3 == 0 or j % 5 == 0)
k = "Apple"
#fill the k variable with its cotnent 4 times
print(str(k) * 4)
|
[
"[email protected]"
] | |
4843baf002f6dae0fe5cb1470e278cd63ae8bfc9
|
fbdc36fe99d2f49b150b8cb8d2f09fcb10bd7ca4
|
/pytorch/train.py
|
6e947e04b93dd6dfe8522b3940eada1aa41fe75f
|
[
"MIT"
] |
permissive
|
MiaoDragon/completion3d
|
9ff17cfba8706991f5fe88f6d270007eba06481b
|
95430f16ae73c5b27180b542cf1c56f87b4cdbaf
|
refs/heads/master
| 2020-06-14T06:01:22.167439 | 2019-07-29T01:58:19 | 2019-07-29T01:58:19 | 194,927,243 | 0 | 0 | null | 2019-07-02T20:01:59 | 2019-07-02T20:01:59 | null |
UTF-8
|
Python
| false | false | 5,000 |
py
|
"""
"""
#from builtins import range
import os
import sys
sys.path.append(os.getcwd())
import _init_paths
from PointNetFCAE import *
#from modules.emd import EMDModule
from tools.obs_data_loader import load_dataset
from tools.import_tool import fileImport
import argparse
import torch
from chamfer_distance import ChamferDistance as chamfer
import torch.nn as nn
import numpy as np
import os
import pickle
#from tools.path_data_loader import load_dataset_end2end
from torch.autograd import Variable
import time
def to_var(x, volatile=False):
if torch.cuda.is_available():
x = x.cuda()
return Variable(x, volatile=volatile)
def main(args):
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # see issue #152
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
importer = fileImport()
env_data_path = args.env_data_path
path_data_path = args.path_data_path
pcd_data_path = args.pointcloud_data_path
# append all envs and obstacles
#envs_files = os.listdir(env_data_path)
#envs_files = ['trainEnvironments.pkl']
envs_files = ['trainEnvironmentsLarge.pkl']
#envs_files = ['trainEnvironments.pkl']
obstacles = []
for envs_file in envs_files:
envs = importer.environments_import(env_data_path + envs_file)
print("Loading obstacle data...\n")
obs = load_dataset(envs, pcd_data_path, importer)
obstacles.append(obs)
obstacles = np.stack(obstacles).astype(float)[0].reshape(len(obs),-1,3)
print(obstacles.shape)
print("Loaded dataset, targets, and pontcloud obstacle vectors: ")
print("\n")
if not os.path.exists(args.trained_model_path):
os.makedirs(args.trained_model_path)
# Build the models
net = PointNetFCAE(code_ntfs=1024, num_points=len(obstacles[0]), output_channels=3)
if torch.cuda.is_available():
net.cuda()
# Loss and Optimizer
params = list(net.parameters())
optimizer = torch.optim.Adam(params, lr=args.learning_rate)
total_loss = []
epoch = 1
sm = 100 # start saving models after 100 epochs
criterion = chamfer()
print("Starting epochs...\n")
# epoch=1
done = False
for epoch in range(args.num_epochs):
# while (not done)
# every time use a new obstacle
start = time.time()
print("epoch" + str(epoch))
avg_loss = 0
for i in range(0, len(obstacles), args.batch_size):
# Forward, Backward and Optimize
# zero gradients
net.zero_grad()
# convert to pytorch tensors and Varialbes
bobs = torch.from_numpy(obstacles[i:i+args.batch_size]).type(torch.FloatTensor)
#bobs = to_var(bobs).view(len(bobs), -1, 3).permute(0,2,1)
bobs = to_var(bobs)
# forward pass through encoder
bt = net(bobs)
# compute overall loss and backprop all the way
loss1, loss2 = criterion(bobs, bt)
#loss1, loss2 = criterion(bobs, bt)
print('loss1')
print(loss1)
print('loss2')
print(loss2)
loss = torch.mean(loss1) + torch.mean(loss2)
print('loss:')
print(loss)
avg_loss = avg_loss+loss.data
loss.backward()
optimizer.step()
print("--average loss:")
# Save the models
if epoch == sm:
print("\nSaving model\n")
print("time: " + str(time.time() - start))
torch.save(net.state_dict(), os.path.join(
args.trained_model_path, 'pointnet_'+str(epoch)+'.pkl'))
#if (epoch != 1):
sm = sm+100 # save model after every 50 epochs from 100 epoch ownwards
torch.save(total_loss, 'total_loss.dat')
print(encoder.state_dict())
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--env_data_path', type=str, default='./env/environment_data/')
parser.add_argument('--path_data_path', type=str, default='./data/train/paths/')
parser.add_argument('--pointcloud_data_path', type=str, default='./data/train/pcd/')
parser.add_argument('--trained_model_path', type=str, default='./models/sample_train/', help='path for saving trained models')
parser.add_argument('--batch_size', type=int, default=100)
parser.add_argument('--learning_rate', type=float, default=0.001)
parser.add_argument('--num_epochs', type=int, default=200)
parser.add_argument('--enc_input_size', type=int, default=16053)
parser.add_argument('--enc_output_size', type=int, default=60)
parser.add_argument('--mlp_input_size', type=int, default=74)
parser.add_argument('--mlp_output_size', type=int, default=7)
parser.add_argument('--train_ratio', type=float, default=0.8)
parser.add_argument('--envs_file', type=str, default='trainEnvironments.pkl')
parser.add_argument('--path_data_file', type=str, default='trainPaths.pkl')
args = parser.parse_args()
main(args)
|
[
"[email protected]"
] | |
82e3b37bdb2ad82c4f96ac43d5740de6895f4863
|
3df19730d14ee920f9efb5949a39b2b7ce03890b
|
/layerviewer/normalize.py
|
2ce0e883731472496aff3e3df6a0238e3c4bff69
|
[] |
no_license
|
shalevy1/ivigraph
|
6948f66ffa06e5132d5884658d1e78122ab6604c
|
4e8c29d92d36cee27de3f9100d20df5c8ce706c7
|
refs/heads/master
| 2023-03-16T15:44:06.248049 | 2013-08-05T16:40:25 | 2013-08-05T16:40:25 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 236 |
py
|
def norm01(dataIn,channelWise=False):
out=dataIn.copy()
if channelWise==False:
out-=out.min()
out/=out.max()
else :
for c in range(dataIn.shape[2]):
out[:,:,c]-=out[:,:,c].min()
out[:,:,c]/=out[:,:,c].max()
return out
|
[
"[email protected]"
] | |
fa561650bdf461db8a95273cbd2fbb8bdc7b0e54
|
191a7f83d964f74a2b3c7faeb4fc47d9c63d521f
|
/.history/main_20210529105730.py
|
dfdfad8f4b990aa7ac7b90bed6d22e6fbfa94693
|
[] |
no_license
|
AndreLiu1225/Kinder-Values-Survey
|
2a317feee8d5b17c27da2b2116742656e35d8ab9
|
090c27da0c822abb7dfc0ec6e13ae1b3dcb7bbf3
|
refs/heads/master
| 2023-05-03T00:26:00.481423 | 2021-06-04T03:24:19 | 2021-06-04T03:24:19 | 371,989,154 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,582 |
py
|
from flask import Flask, render_template, redirect, url_for, flash, request
from flask_sqlalchemy import SQLAlchemy
from flask_wtf import FlaskForm
from wtforms import StringField, TextField, SubmitField, IntegerField, SelectField, RadioField
from wtforms.validators import DataRequired, Email, EqualTo, Length, ValidationError
import datetime
import plotly.graph_objs as go
app = Flask(__name__)
app.config['SECRET_KEY'] = "0c8973c8a5e001bb0c816a7b56c84f3a"
app.config['SQLALCHEMY_DATABASE_URI'] = "sqlite:///site.db"
db = SQLAlchemy(app)
class Survey(db.Model):
age = db.Column(db.Integer, nullable=False, primary_key=True)
email = db.Column(db.String(50), unique=True, nullable=False)
profession = db.Column(db.String(50), nullable=False)
power = db.Column(db.Integer, nullable=False)
tradition = db.Column(db.Integer, nullable=False)
achievement = db.Column(db.Integer, nullable=False)
stimulation = db.Column(db.Integer, nullable=False)
hedonism = db.Column(db.Integer, nullable=False)
conformity = db.Column(db.Integer, nullable=False)
self_direction = db.Column(db.Integer, nullable=False)
benevolence = db.Column(db.Integer, nullable=False)
universalism = db.Column(db.Integer, nullable=False)
date_posted = db.Column(db.DateTime, nullable=False, default=datetime.datetime.utcnow)
def __repr__(self):
return f"Survey('{self.age}', '{self.name}', '{self.date_posted}')"
# @staticmethod
# def is_email_in_database(email):
# return True if Survey.query.filter_by(email=email).first() else False
class MCQ(FlaskForm):
email = StringField("What is your email?", validators=[DataRequired(), Email(message=('Not a valid email address')), Length(max=50)])
age = IntegerField("Please enter your age", validators=[DataRequired()])
profession = StringField("What is your profession?", validators=[DataRequired(), Length(max=30)])
power = IntegerField("Do you desire a higher social status and dominance over others? (4- It is my utmost priority, 3-It is important, 2-Doesn't bother me, 1-Not even a thought)", validators=[DataRequired()])
tradition = IntegerField("Do you care about preserving traditions? (4- It is my utmost priority, 3-It is important, 2-Doesn't bother me, 1-Not even a thought)", validators=[DataRequired()])
achievement = IntegerField("Is achievement according to social standards important? (4- It is my utmost priority, 3-It is important, 2-Doesn't bother me, 1-Not even a thought)", validators=[DataRequired()])
stimulation = IntegerField("Do you prefer novel and exciting challenges in life? (4- It is my utmost priority, 3-It is important, 2-Doesn't bother me, 1-Not even a thought)", validators=[DataRequired()])
hedonism = IntegerField("Is personal gratification the most important? (4- It is my utmost priority, 3-It is important, 2-Doesn't bother me, 1-Not even a thought)", validators=[DataRequired()])
conformity = IntegerField("Do you think restraint of actions against social norms is important? (4- It is my utmost priority, 3-It is important, 2-Doesn't bother me, 1-Not even a thought)", validators=[DataRequired()])
self_direction = IntegerField("Do you think independent thought and action are important (4- It is my utmost priority, 3-It is important, 2-Doesn't bother me, 1-Not even a thought)", validators=[DataRequired()])
benevolence = IntegerField("Are preserving and enhancing the welfare of your friends and family the most important? (4- It is my utmost priority, 3-It is important, 2-Doesn't bother me, 1-Not even a thought)", validators=[DataRequired()])
universalism = IntegerField("I find it important to understand, tolerate, appreciate and protect all ethnicities and people. (4- It is my utmost priority, 3-It is important, 2-Doesn't bother me, 1-Not even a thought)", validators=[DataRequired()])
submit = SubmitField("Submit")
@app.route('/', methods=['POST','GET'])
def values_quiz():
form = MCQ()
if form.validate_on_submit():
post = Survey(age=form.age.data, email=form.email.data, profession=form.profession.data, power=form.power.data,
tradition=form.tradition.data, achievement=form.achievement.data, stimulation=form.stimulation.data,
hedonism=form.hedonism.data, conformity=form.conformity.data, self_direction=form.self_direction.data,
benevolence=form.benevolence.data, universalism=form.universalism.data)
# if Survey.is_email_in_database(form.email.data):
# flash(f"The user with {form.email.data} has already filled the survey", "danger")
db.session.add(post)
db.session.commit()
flash(f'Survey is completed by {form.email.data}', 'success')
return redirect(url_for('data_dashboard'))
else:
flash('Ensure all questions are answered correctly', 'warning')
return render_template('MCQ.html', form=form)
@app.route('/results', methods=['POST','GET'])
def data_dashboard():
# power = request.form.get('power')
# tradition = request.form.get('tradition')
# achievement = request.form.get('achievement')
# stimulation = request.form.get('stimulation')
# hedonism = request.form.get('hedonism')
# conformity = request.form.get('conformity')
# self_direction = request.form.get('self_direction')
# benevolence = request.form.get('benevolence')
# universalism = request.form.get('universalism')
return render_template('data_dashboard.html')
if __name__ == "__main__":
app.run(debug=True)
|
[
"[email protected]"
] | |
cdfb8014bcedb551132758dec0df2ed87e21ee59
|
e9c9e38ed91969df78bbd7f9ca2a0fdb264d8ddb
|
/lib/python3.8/site-packages/ansible_collections/fortinet/fortimanager/plugins/modules/fmgr_user_radius_dynamicmapping_accountingserver.py
|
b9d570727b13fbb84e13fdb83e2a6d5f9444efec
|
[] |
no_license
|
Arceusir/PRELIM_SKILLS_EXAM
|
882fcf2868926f0bbfe1fb18d50e5fe165936c02
|
b685c5b28d058f59de2875c7579739c545df2e0c
|
refs/heads/master
| 2023-08-15T07:30:42.303283 | 2021-10-09T01:27:19 | 2021-10-09T01:27:19 | 415,167,192 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 12,540 |
py
|
#!/usr/bin/python
from __future__ import absolute_import, division, print_function
# Copyright 2019-2021 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fmgr_user_radius_dynamicmapping_accountingserver
short_description: Additional accounting servers.
description:
- This module is able to configure a FortiManager device.
- Examples include all parameters and values which need to be adjusted to data sources before usage.
version_added: "2.10"
author:
- Link Zheng (@chillancezen)
- Jie Xue (@JieX19)
- Frank Shen (@fshen01)
- Hongbin Lu (@fgtdev-hblu)
notes:
- Running in workspace locking mode is supported in this FortiManager module, the top
level parameters workspace_locking_adom and workspace_locking_timeout help do the work.
- To create or update an object, use state present directive.
- To delete an object, use state absent directive.
- Normally, running one module can fail when a non-zero rc is returned. you can also override
the conditions to fail or succeed with parameters rc_failed and rc_succeeded
options:
enable_log:
description: Enable/Disable logging for task
required: false
type: bool
default: false
proposed_method:
description: The overridden method for the underlying Json RPC request
required: false
type: str
choices:
- update
- set
- add
bypass_validation:
description: only set to True when module schema diffs with FortiManager API structure, module continues to execute without validating parameters
required: false
type: bool
default: false
workspace_locking_adom:
description: the adom to lock for FortiManager running in workspace mode, the value can be global and others including root
required: false
type: str
workspace_locking_timeout:
description: the maximum time in seconds to wait for other user to release the workspace lock
required: false
type: int
default: 300
state:
description: the directive to create, update or delete an object
type: str
required: true
choices:
- present
- absent
rc_succeeded:
description: the rc codes list with which the conditions to succeed will be overriden
type: list
required: false
rc_failed:
description: the rc codes list with which the conditions to fail will be overriden
type: list
required: false
adom:
description: the parameter (adom) in requested url
type: str
required: true
radius:
description: the parameter (radius) in requested url
type: str
required: true
dynamic_mapping:
description: the parameter (dynamic_mapping) in requested url
type: str
required: true
user_radius_dynamicmapping_accountingserver:
description: the top level parameters set
required: false
type: dict
suboptions:
id:
type: int
description: no description
interface:
type: str
description: no description
interface-select-method:
type: str
description: no description
choices:
- 'auto'
- 'sdwan'
- 'specify'
port:
type: int
description: no description
secret:
description: no description
type: str
server:
type: str
description: no description
source-ip:
type: str
description: no description
status:
type: str
description: no description
choices:
- 'disable'
- 'enable'
'''
EXAMPLES = '''
- hosts: fortimanager-inventory
collections:
- fortinet.fortimanager
connection: httpapi
vars:
ansible_httpapi_use_ssl: True
ansible_httpapi_validate_certs: False
ansible_httpapi_port: 443
tasks:
- name: Additional accounting servers.
fmgr_user_radius_dynamicmapping_accountingserver:
bypass_validation: False
workspace_locking_adom: <value in [global, custom adom including root]>
workspace_locking_timeout: 300
rc_succeeded: [0, -2, -3, ...]
rc_failed: [-2, -3, ...]
adom: <your own value>
radius: <your own value>
dynamic_mapping: <your own value>
state: <value in [present, absent]>
user_radius_dynamicmapping_accountingserver:
id: <value of integer>
interface: <value of string>
interface-select-method: <value in [auto, sdwan, specify]>
port: <value of integer>
secret: <value of string>
server: <value of string>
source-ip: <value of string>
status: <value in [disable, enable]>
'''
RETURN = '''
request_url:
description: The full url requested
returned: always
type: str
sample: /sys/login/user
response_code:
description: The status of api request
returned: always
type: int
sample: 0
response_message:
description: The descriptive message of the api response
type: str
returned: always
sample: OK.
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import NAPIManager
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import check_galaxy_version
from ansible_collections.fortinet.fortimanager.plugins.module_utils.napi import check_parameter_bypass
def main():
jrpc_urls = [
'/pm/config/global/obj/user/radius/{radius}/dynamic_mapping/{dynamic_mapping}/accounting-server',
'/pm/config/adom/{adom}/obj/user/radius/{radius}/dynamic_mapping/{dynamic_mapping}/accounting-server'
]
perobject_jrpc_urls = [
'/pm/config/global/obj/user/radius/{radius}/dynamic_mapping/{dynamic_mapping}/accounting-server/{accounting-server}',
'/pm/config/adom/{adom}/obj/user/radius/{radius}/dynamic_mapping/{dynamic_mapping}/accounting-server/{accounting-server}'
]
url_params = ['adom', 'radius', 'dynamic_mapping']
module_primary_key = 'id'
module_arg_spec = {
'enable_log': {
'type': 'bool',
'required': False,
'default': False
},
'proposed_method': {
'type': 'str',
'required': False,
'choices': [
'set',
'update',
'add'
]
},
'bypass_validation': {
'type': 'bool',
'required': False,
'default': False
},
'workspace_locking_adom': {
'type': 'str',
'required': False
},
'workspace_locking_timeout': {
'type': 'int',
'required': False,
'default': 300
},
'rc_succeeded': {
'required': False,
'type': 'list'
},
'rc_failed': {
'required': False,
'type': 'list'
},
'state': {
'type': 'str',
'required': True,
'choices': [
'present',
'absent'
]
},
'adom': {
'required': True,
'type': 'str'
},
'radius': {
'required': True,
'type': 'str'
},
'dynamic_mapping': {
'required': True,
'type': 'str'
},
'user_radius_dynamicmapping_accountingserver': {
'required': False,
'type': 'dict',
'revision': {
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'options': {
'id': {
'required': True,
'revision': {
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'int'
},
'interface': {
'required': False,
'revision': {
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'str'
},
'interface-select-method': {
'required': False,
'revision': {
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'choices': [
'auto',
'sdwan',
'specify'
],
'type': 'str'
},
'port': {
'required': False,
'revision': {
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'int'
},
'secret': {
'required': False,
'revision': {
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'str'
},
'server': {
'required': False,
'revision': {
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'str'
},
'source-ip': {
'required': False,
'revision': {
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'type': 'str'
},
'status': {
'required': False,
'revision': {
'6.4.2': True,
'6.4.5': True,
'7.0.0': True
},
'choices': [
'disable',
'enable'
],
'type': 'str'
}
}
}
}
params_validation_blob = []
check_galaxy_version(module_arg_spec)
module = AnsibleModule(argument_spec=check_parameter_bypass(module_arg_spec, 'user_radius_dynamicmapping_accountingserver'),
supports_check_mode=False)
fmgr = None
if module._socket_path:
connection = Connection(module._socket_path)
connection.set_option('enable_log', module.params['enable_log'] if 'enable_log' in module.params else False)
fmgr = NAPIManager(jrpc_urls, perobject_jrpc_urls, module_primary_key, url_params, module, connection, top_level_schema_name='data')
fmgr.validate_parameters(params_validation_blob)
fmgr.process_curd(argument_specs=module_arg_spec)
else:
module.fail_json(msg='MUST RUN IN HTTPAPI MODE')
module.exit_json(meta=module.params)
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
8a4bc42d9cafe0d8a4ab3a08aefcb128066c613a
|
d6eca1b4b056beb41ac494db7399e1f146099c97
|
/alien_invasion/exercise/key.py
|
7a13a30d303781ea41b65e756d2a29ca1604771a
|
[] |
no_license
|
liangsongyou/python-crash-course-code
|
15090b48d77de1115bfaaaa6e5638a9bb9b3c7cc
|
f369e18030f2aafe358dd0fab1e479ca7bf4ceb8
|
refs/heads/master
| 2021-05-08T06:42:29.147923 | 2017-08-11T06:41:30 | 2017-08-11T06:41:30 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 429 |
py
|
from os import sys
import pygame
def main():
"""Main loop of the window."""
pygame.init()
scr_size = (1386,700)
screen = pygame.display.set_mode(scr_size)
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
elif event.type == pygame.KEYDOWN:
print('{}'.format(event.key))
pygame.display.flip()
main()
|
[
"[email protected]"
] | |
e051541904d7e0186fdaa038986ca03eacfcd709
|
608df16a4271aac72e15e5031601a5495ea27d6b
|
/Projects/dhl/env/bin/wheel
|
b90fc275d4c6b7143a86c237df4cc2e13b4bf859
|
[] |
no_license
|
HackV7/k-means_clustering
|
2895108a57a60beffd8b525cbd972fe644f36404
|
5e40cece8037ef6ec499af00a7e0c3911e21d764
|
refs/heads/master
| 2021-07-09T06:27:00.543658 | 2017-10-08T03:56:35 | 2017-10-08T03:56:35 | 106,089,624 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 246 |
#!/home/vishrut/Desktop/Projects/dhl/env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from wheel.tool import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"[email protected]"
] | ||
c5f1f29dadca98aef9c9bf1afbe50a266d59b0a4
|
55ab64b67d8abc02907eb43a54ff6c326ded6b72
|
/scripts/addon_library/local/RetopoFlow/addon_common/common/updater_core.py
|
9b22e9e85aa82bc9c2b1df0f0149ef92ac3fb991
|
[
"MIT",
"GPL-3.0-only",
"GPL-1.0-or-later",
"GPL-3.0-or-later"
] |
permissive
|
Tilapiatsu/blender-custom_config
|
2f03b0bb234c3b098d2830732296d199c91147d0
|
00e14fc190ebff66cf50ff911f25cf5ad3529f8f
|
refs/heads/master
| 2023-08-16T14:26:39.990840 | 2023-08-16T01:32:41 | 2023-08-16T01:32:41 | 161,249,779 | 6 | 2 |
MIT
| 2023-04-12T05:33:59 | 2018-12-10T23:25:14 |
Python
|
UTF-8
|
Python
| false | false | 64,117 |
py
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
"""
See documentation for usage
https://github.com/CGCookie/blender-addon-updater
"""
__version__ = "1.1.0"
import errno
import traceback
import platform
import ssl
import urllib.request
import urllib
import os
import json
import zipfile
import shutil
import threading
import fnmatch
from datetime import datetime, timedelta
# Blender imports, used in limited cases.
import bpy
import addon_utils
# -----------------------------------------------------------------------------
# The main class
# -----------------------------------------------------------------------------
class SingletonUpdater:
"""Addon updater service class.
This is the singleton class to instance once and then reference where
needed throughout the addon. It implements all the interfaces for running
updates.
"""
def __init__(self):
self._engine = GithubEngine()
self._user = None
self._repo = None
self._website = None
self._current_version = None
self._subfolder_path = None
self._tags = list()
self._tag_latest = None
self._tag_names = list()
self._latest_release = None
self._use_releases = False
self._include_branches = False
self._include_branch_list = ['master']
self._include_branch_auto_check = False
self._manual_only = False
self._version_min_update = None
self._version_max_update = None
# By default, backup current addon on update/target install.
self._backup_current = True
self._backup_ignore_patterns = None
# Set patterns the files to overwrite during an update.
self._overwrite_patterns = ["*.py", "*.pyc"]
self._remove_pre_update_patterns = list()
# By default, don't auto disable+re-enable the addon after an update,
# as this is less stable/often won't fully reload all modules anyways.
self._auto_reload_post_update = False
# Settings for the frequency of automated background checks.
self._check_interval_enabled = False
self._check_interval_months = 0
self._check_interval_days = 7
self._check_interval_hours = 0
self._check_interval_minutes = 0
# runtime variables, initial conditions
self._verbose = False
self._use_print_traces = True
self._fake_install = False
self._async_checking = False # only true when async daemon started
self._update_ready = None
self._update_link = None
self._update_version = None
self._source_zip = None
self._check_thread = None
self._select_link = None
self.skip_tag = None
# Get data from the running blender module (addon).
# Note: this file _could_ be nested
addon_module_path = __package__.split('.')
root_path = os.path.abspath(os.path.join(os.path.dirname(__file__), *(['..'] * (len(addon_module_path)-1))))
self._addon = addon_module_path[0].lower()
self._addon_package = addon_module_path[0] # must not change!
self._updater_path = os.path.join(root_path, 'updater_tmp')
self._addon_root = root_path
self._json = dict()
self._error = None
self._error_msg = None
self._prefiltered_tag_count = 0
# UI properties, not used within this module but still useful to have.
# to verify a valid import, in place of placeholder import
self.show_popups = True # UI uses to show popups or not.
self.invalid_updater = False
# pre-assign basic select-link function
def select_link_function(self, tag):
return tag["zipball_url"]
self._select_link = select_link_function
def print_trace(self):
"""Print handled exception details when use_print_traces is set"""
if self._use_print_traces:
traceback.print_exc()
def print_verbose(self, msg):
"""Print out a verbose logging message if verbose is true."""
if not self._verbose:
return
print("{} addon: ".format(self.addon) + msg)
# -------------------------------------------------------------------------
# Getters and setters
# -------------------------------------------------------------------------
@property
def addon(self):
return self._addon
@addon.setter
def addon(self, value):
self._addon = str(value)
@property
def api_url(self):
return self._engine.api_url
@api_url.setter
def api_url(self, value):
if not self.check_is_url(value):
raise ValueError("Not a valid URL: " + value)
self._engine.api_url = value
@property
def async_checking(self):
return self._async_checking
@property
def auto_reload_post_update(self):
return self._auto_reload_post_update
@auto_reload_post_update.setter
def auto_reload_post_update(self, value):
try:
self._auto_reload_post_update = bool(value)
except:
raise ValueError("auto_reload_post_update must be a boolean value")
@property
def backup_current(self):
return self._backup_current
@backup_current.setter
def backup_current(self, value):
if value is None:
self._backup_current = False
else:
self._backup_current = value
@property
def backup_ignore_patterns(self):
return self._backup_ignore_patterns
@backup_ignore_patterns.setter
def backup_ignore_patterns(self, value):
if value is None:
self._backup_ignore_patterns = None
elif not isinstance(value, list):
raise ValueError("Backup pattern must be in list format")
else:
self._backup_ignore_patterns = value
@property
def check_interval(self):
return (self._check_interval_enabled,
self._check_interval_months,
self._check_interval_days,
self._check_interval_hours,
self._check_interval_minutes)
@property
def current_version(self):
return self._current_version
@current_version.setter
def current_version(self, tuple_values):
if tuple_values is None:
self._current_version = None
return
elif type(tuple_values) is not tuple:
try:
tuple(tuple_values)
except:
raise ValueError(
"current_version must be a tuple of integers")
for i in tuple_values:
if type(i) is not int:
raise ValueError(
"current_version must be a tuple of integers")
self._current_version = tuple(tuple_values)
@property
def engine(self):
return self._engine.name
@engine.setter
def engine(self, value):
engine = value.lower()
if engine == "github":
self._engine = GithubEngine()
elif engine == "gitlab":
self._engine = GitlabEngine()
elif engine == "bitbucket":
self._engine = BitbucketEngine()
else:
raise ValueError("Invalid engine selection")
@property
def error(self):
return self._error
@property
def error_msg(self):
return self._error_msg
@property
def fake_install(self):
return self._fake_install
@fake_install.setter
def fake_install(self, value):
if not isinstance(value, bool):
raise ValueError("fake_install must be a boolean value")
self._fake_install = bool(value)
# not currently used
@property
def include_branch_auto_check(self):
return self._include_branch_auto_check
@include_branch_auto_check.setter
def include_branch_auto_check(self, value):
try:
self._include_branch_auto_check = bool(value)
except:
raise ValueError("include_branch_autocheck must be a boolean")
@property
def include_branch_list(self):
return self._include_branch_list
@include_branch_list.setter
def include_branch_list(self, value):
try:
if value is None:
self._include_branch_list = ['master']
elif not isinstance(value, list) or len(value) == 0:
raise ValueError(
"include_branch_list should be a list of valid branches")
else:
self._include_branch_list = value
except:
raise ValueError(
"include_branch_list should be a list of valid branches")
@property
def include_branches(self):
return self._include_branches
@include_branches.setter
def include_branches(self, value):
try:
self._include_branches = bool(value)
except:
raise ValueError("include_branches must be a boolean value")
@property
def json(self):
if len(self._json) == 0:
self.set_updater_json()
return self._json
@property
def latest_release(self):
if self._latest_release is None:
return None
return self._latest_release
@property
def manual_only(self):
return self._manual_only
@manual_only.setter
def manual_only(self, value):
try:
self._manual_only = bool(value)
except:
raise ValueError("manual_only must be a boolean value")
@property
def overwrite_patterns(self):
return self._overwrite_patterns
@overwrite_patterns.setter
def overwrite_patterns(self, value):
if value is None:
self._overwrite_patterns = ["*.py", "*.pyc"]
elif not isinstance(value, list):
raise ValueError("overwrite_patterns needs to be in a list format")
else:
self._overwrite_patterns = value
@property
def private_token(self):
return self._engine.token
@private_token.setter
def private_token(self, value):
if value is None:
self._engine.token = None
else:
self._engine.token = str(value)
@property
def remove_pre_update_patterns(self):
return self._remove_pre_update_patterns
@remove_pre_update_patterns.setter
def remove_pre_update_patterns(self, value):
if value is None:
self._remove_pre_update_patterns = list()
elif not isinstance(value, list):
raise ValueError(
"remove_pre_update_patterns needs to be in a list format")
else:
self._remove_pre_update_patterns = value
@property
def repo(self):
return self._repo
@repo.setter
def repo(self, value):
try:
self._repo = str(value)
except:
raise ValueError("repo must be a string value")
@property
def select_link(self):
return self._select_link
@select_link.setter
def select_link(self, value):
# ensure it is a function assignment, with signature:
# input self, tag; returns link name
if not hasattr(value, "__call__"):
raise ValueError("select_link must be a function")
self._select_link = value
@property
def stage_path(self):
return self._updater_path
@stage_path.setter
def stage_path(self, value):
if value is None:
self.print_verbose("Aborting assigning stage_path, it's null")
return
elif value is not None and not os.path.exists(value):
try:
os.makedirs(value)
except:
self.print_verbose("Error trying to staging path")
self.print_trace()
return
self._updater_path = value
@property
def subfolder_path(self):
return self._subfolder_path
@subfolder_path.setter
def subfolder_path(self, value):
self._subfolder_path = value
@property
def tags(self):
if len(self._tags) == 0:
return list()
tag_names = list()
for tag in self._tags:
tag_names.append(tag["name"])
return tag_names
@property
def tag_latest(self):
if self._tag_latest is None:
return None
return self._tag_latest["name"]
@property
def update_link(self):
return self._update_link
@property
def update_ready(self):
return self._update_ready
@property
def update_version(self):
return self._update_version
@property
def use_releases(self):
return self._use_releases
@use_releases.setter
def use_releases(self, value):
try:
self._use_releases = bool(value)
except:
raise ValueError("use_releases must be a boolean value")
@property
def user(self):
return self._user
@user.setter
def user(self, value):
try:
self._user = str(value)
except:
raise ValueError("User must be a string value")
@property
def verbose(self):
return self._verbose
@verbose.setter
def verbose(self, value):
try:
self._verbose = bool(value)
self.print_verbose("Verbose is enabled")
except:
raise ValueError("Verbose must be a boolean value")
@property
def use_print_traces(self):
return self._use_print_traces
@use_print_traces.setter
def use_print_traces(self, value):
try:
self._use_print_traces = bool(value)
except:
raise ValueError("use_print_traces must be a boolean value")
@property
def version_max_update(self):
return self._version_max_update
@version_max_update.setter
def version_max_update(self, value):
if value is None:
self._version_max_update = None
return
if not isinstance(value, tuple):
raise ValueError("Version maximum must be a tuple")
for subvalue in value:
if type(subvalue) is not int:
raise ValueError("Version elements must be integers")
self._version_max_update = value
@property
def version_min_update(self):
return self._version_min_update
@version_min_update.setter
def version_min_update(self, value):
if value is None:
self._version_min_update = None
return
if not isinstance(value, tuple):
raise ValueError("Version minimum must be a tuple")
for subvalue in value:
if type(subvalue) != int:
raise ValueError("Version elements must be integers")
self._version_min_update = value
@property
def website(self):
return self._website
@website.setter
def website(self, value):
if not self.check_is_url(value):
raise ValueError("Not a valid URL: " + value)
self._website = value
# -------------------------------------------------------------------------
# Parameter validation related functions
# -------------------------------------------------------------------------
@staticmethod
def check_is_url(url):
if not ("http://" in url or "https://" in url):
return False
if "." not in url:
return False
return True
def _get_tag_names(self):
tag_names = list()
self.get_tags()
for tag in self._tags:
tag_names.append(tag["name"])
return tag_names
def set_check_interval(self, enabled=False,
months=0, days=14, hours=0, minutes=0):
"""Set the time interval between automated checks, and if enabled.
Has enabled = False as default to not check against frequency,
if enabled, default is 2 weeks.
"""
if type(enabled) is not bool:
raise ValueError("Enable must be a boolean value")
if type(months) is not int:
raise ValueError("Months must be an integer value")
if type(days) is not int:
raise ValueError("Days must be an integer value")
if type(hours) is not int:
raise ValueError("Hours must be an integer value")
if type(minutes) is not int:
raise ValueError("Minutes must be an integer value")
if not enabled:
self._check_interval_enabled = False
else:
self._check_interval_enabled = True
self._check_interval_months = months
self._check_interval_days = days
self._check_interval_hours = hours
self._check_interval_minutes = minutes
def __repr__(self):
return "<Module updater from {a}>".format(a=__file__)
def __str__(self):
return "Updater, with user: {a}, repository: {b}, url: {c}".format(
a=self._user, b=self._repo, c=self.form_repo_url())
# -------------------------------------------------------------------------
# API-related functions
# -------------------------------------------------------------------------
def form_repo_url(self):
return self._engine.form_repo_url(self)
def form_tags_url(self):
return self._engine.form_tags_url(self)
def form_branch_url(self, branch):
return self._engine.form_branch_url(branch, self)
def get_tags(self):
request = self.form_tags_url()
self.print_verbose("Getting tags from server")
# get all tags, internet call
all_tags = self._engine.parse_tags(self.get_api(request), self)
if all_tags is not None:
self._prefiltered_tag_count = len(all_tags)
else:
self._prefiltered_tag_count = 0
all_tags = list()
# pre-process to skip tags
if self.skip_tag is not None:
self._tags = [tg for tg in all_tags if not self.skip_tag(self, tg)]
else:
self._tags = all_tags
# get additional branches too, if needed, and place in front
# Does NO checking here whether branch is valid
if self._include_branches:
temp_branches = self._include_branch_list.copy()
temp_branches.reverse()
for branch in temp_branches:
request = self.form_branch_url(branch)
include = {
"name": branch.title(),
"zipball_url": request
}
self._tags = [include] + self._tags # append to front
if self._tags is None:
# some error occurred
self._tag_latest = None
self._tags = list()
elif self._prefiltered_tag_count == 0 and not self._include_branches:
self._tag_latest = None
if self._error is None: # if not None, could have had no internet
self._error = "No releases found"
self._error_msg = "No releases or tags found in repository"
self.print_verbose("No releases or tags found in repository")
elif self._prefiltered_tag_count == 0 and self._include_branches:
if not self._error:
self._tag_latest = self._tags[0]
branch = self._include_branch_list[0]
self.print_verbose("{} branch found, no releases: {}".format(
branch, self._tags[0]))
elif ((len(self._tags) - len(self._include_branch_list) == 0
and self._include_branches)
or (len(self._tags) == 0 and not self._include_branches)
and self._prefiltered_tag_count > 0):
self._tag_latest = None
self._error = "No releases available"
self._error_msg = "No versions found within compatible version range"
self.print_verbose(self._error_msg)
else:
if not self._include_branches:
self._tag_latest = self._tags[0]
self.print_verbose(
"Most recent tag found:" + str(self._tags[0]['name']))
else:
# Don't return branch if in list.
n = len(self._include_branch_list)
self._tag_latest = self._tags[n] # guaranteed at least len()=n+1
self.print_verbose(
"Most recent tag found:" + str(self._tags[n]['name']))
def get_raw(self, url):
"""All API calls to base url."""
request = urllib.request.Request(url)
try:
context = ssl._create_unverified_context()
except:
# Some blender packaged python versions don't have this, largely
# useful for local network setups otherwise minimal impact.
context = None
# Setup private request headers if appropriate.
if self._engine.token is not None:
if self._engine.name == "gitlab":
request.add_header('PRIVATE-TOKEN', self._engine.token)
else:
self.print_verbose("Tokens not setup for engine yet")
# Always set user agent.
request.add_header(
'User-Agent', "Python/" + str(platform.python_version()))
# Run the request.
try:
if context:
result = urllib.request.urlopen(request, context=context)
else:
result = urllib.request.urlopen(request)
except urllib.error.HTTPError as e:
if str(e.code) == "403":
self._error = "HTTP error (access denied)"
self._error_msg = str(e.code) + " - server error response"
print(self._error, self._error_msg)
else:
self._error = "HTTP error"
self._error_msg = str(e.code)
print(self._error, self._error_msg)
self.print_trace()
self._update_ready = None
except urllib.error.URLError as e:
reason = str(e.reason)
if "TLSV1_ALERT" in reason or "SSL" in reason.upper():
self._error = "Connection rejected, download manually"
self._error_msg = reason
print(self._error, self._error_msg)
else:
self._error = "URL error, check internet connection"
self._error_msg = reason
print(self._error, self._error_msg)
self.print_trace()
self._update_ready = None
return None
else:
result_string = result.read()
result.close()
return result_string.decode()
def get_api(self, url):
"""Result of all api calls, decoded into json format."""
get = None
get = self.get_raw(url)
if get is not None:
try:
return json.JSONDecoder().decode(get)
except Exception as e:
self._error = "API response has invalid JSON format"
self._error_msg = str(e.reason)
self._update_ready = None
print(self._error, self._error_msg)
self.print_trace()
return None
else:
return None
def stage_repository(self, url):
"""Create a working directory and download the new files"""
local = os.path.join(self._updater_path, "update_staging")
error = None
# Make/clear the staging folder, to ensure the folder is always clean.
self.print_verbose(
"Preparing staging folder for download:\n" + str(local))
if os.path.isdir(local):
try:
shutil.rmtree(local)
os.makedirs(local)
except:
error = "failed to remove existing staging directory"
self.print_trace()
else:
try:
os.makedirs(local)
except:
error = "failed to create staging directory"
self.print_trace()
if error is not None:
self.print_verbose("Error: Aborting update, " + error)
self._error = "Update aborted, staging path error"
self._error_msg = "Error: {}".format(error)
return False
if self._backup_current:
self.create_backup()
self.print_verbose("Now retrieving the new source zip")
self._source_zip = os.path.join(local, "source.zip")
self.print_verbose("Starting download update zip")
try:
request = urllib.request.Request(url)
context = ssl._create_unverified_context()
# Setup private token if appropriate.
if self._engine.token is not None:
if self._engine.name == "gitlab":
request.add_header('PRIVATE-TOKEN', self._engine.token)
else:
self.print_verbose(
"Tokens not setup for selected engine yet")
# Always set user agent
request.add_header(
'User-Agent', "Python/" + str(platform.python_version()))
self.url_retrieve(urllib.request.urlopen(request, context=context),
self._source_zip)
# Add additional checks on file size being non-zero.
self.print_verbose("Successfully downloaded update zip")
return True
except Exception as e:
self._error = "Error retrieving download, bad link?"
self._error_msg = "Error: {}".format(e)
print("Error retrieving download, bad link?")
print("Error: {}".format(e))
self.print_trace()
return False
def create_backup(self):
"""Save a backup of the current installed addon prior to an update."""
self.print_verbose("Backing up current addon folder")
local = os.path.join(self._updater_path, "backup")
tempdest = os.path.join(
self._addon_root, os.pardir, self._addon + "_updater_backup_temp")
self.print_verbose("Backup destination path: " + str(local))
if os.path.isdir(local):
try:
shutil.rmtree(local)
except:
self.print_verbose(
"Failed to removed previous backup folder, continuing")
self.print_trace()
# Remove the temp folder.
# Shouldn't exist but could if previously interrupted.
if os.path.isdir(tempdest):
try:
shutil.rmtree(tempdest)
except:
self.print_verbose(
"Failed to remove existing temp folder, continuing")
self.print_trace()
# Make a full addon copy, temporarily placed outside the addon folder.
if self._backup_ignore_patterns is not None:
try:
shutil.copytree(self._addon_root, tempdest,
ignore=shutil.ignore_patterns(
*self._backup_ignore_patterns))
except:
print("Failed to create backup, still attempting update.")
self.print_trace()
return
else:
try:
shutil.copytree(self._addon_root, tempdest)
except:
print("Failed to create backup, still attempting update.")
self.print_trace()
return
shutil.move(tempdest, local)
# Save the date for future reference.
now = datetime.now()
self._json["backup_date"] = "{m}-{d}-{yr}".format(
m=now.strftime("%B"), d=now.day, yr=now.year)
self.save_updater_json()
def restore_backup(self):
"""Restore the last backed up addon version, user initiated only"""
self.print_verbose("Restoring backup, backing up current addon folder")
backuploc = os.path.join(self._updater_path, "backup")
tempdest = os.path.join(
self._addon_root, os.pardir, self._addon + "_updater_backup_temp")
tempdest = os.path.abspath(tempdest)
# Move instead contents back in place, instead of copy.
shutil.move(backuploc, tempdest)
shutil.rmtree(self._addon_root)
os.rename(tempdest, self._addon_root)
self._json["backup_date"] = ""
self._json["just_restored"] = True
self._json["just_updated"] = True
self.save_updater_json()
self.reload_addon()
def unpack_staged_zip(self, clean=False):
"""Unzip the downloaded file, and validate contents"""
if not os.path.isfile(self._source_zip):
self.print_verbose("Error, update zip not found")
self._error = "Install failed"
self._error_msg = "Downloaded zip not found"
return -1
# Clear the existing source folder in case previous files remain.
outdir = os.path.join(self._updater_path, "source")
try:
shutil.rmtree(outdir)
self.print_verbose("Source folder cleared")
except:
self.print_trace()
# Create parent directories if needed, would not be relevant unless
# installing addon into another location or via an addon manager.
try:
os.mkdir(outdir)
except Exception as err:
print("Error occurred while making extract dir:")
print(str(err))
self.print_trace()
self._error = "Install failed"
self._error_msg = "Failed to make extract directory"
return -1
if not os.path.isdir(outdir):
print("Failed to create source directory")
self._error = "Install failed"
self._error_msg = "Failed to create extract directory"
return -1
self.print_verbose(
"Begin extracting source from zip:" + str(self._source_zip))
zfile = zipfile.ZipFile(self._source_zip, "r")
if not zfile:
self._error = "Install failed"
self._error_msg = "Resulting file is not a zip, cannot extract"
self.print_verbose(self._error_msg)
return -1
# Now extract directly from the first subfolder (not root)
# this avoids adding the first subfolder to the path length,
# which can be too long if the download has the SHA in the name.
zsep = '/' # Not using os.sep, always the / value even on windows.
for name in zfile.namelist():
if zsep not in name:
continue
top_folder = name[:name.index(zsep) + 1]
if name == top_folder + zsep:
continue # skip top level folder
sub_path = name[name.index(zsep) + 1:]
if name.endswith(zsep):
try:
os.mkdir(os.path.join(outdir, sub_path))
self.print_verbose(
"Extract - mkdir: " + os.path.join(outdir, sub_path))
except OSError as exc:
if exc.errno != errno.EEXIST:
self._error = "Install failed"
self._error_msg = "Could not create folder from zip"
self.print_trace()
return -1
else:
with open(os.path.join(outdir, sub_path), "wb") as outfile:
data = zfile.read(name)
outfile.write(data)
self.print_verbose(
"Extract - create: " + os.path.join(outdir, sub_path))
self.print_verbose("Extracted source")
unpath = os.path.join(self._updater_path, "source")
if not os.path.isdir(unpath):
self._error = "Install failed"
self._error_msg = "Extracted path does not exist"
print("Extracted path does not exist: ", unpath)
return -1
if self._subfolder_path:
self._subfolder_path.replace('/', os.path.sep)
self._subfolder_path.replace('\\', os.path.sep)
# Either directly in root of zip/one subfolder, or use specified path.
if not os.path.isfile(os.path.join(unpath, "__init__.py")):
dirlist = os.listdir(unpath)
if len(dirlist) > 0:
if self._subfolder_path == "" or self._subfolder_path is None:
unpath = os.path.join(unpath, dirlist[0])
else:
unpath = os.path.join(unpath, self._subfolder_path)
# Smarter check for additional sub folders for a single folder
# containing the __init__.py file.
if not os.path.isfile(os.path.join(unpath, "__init__.py")):
print("Not a valid addon found")
print("Paths:")
print(dirlist)
self._error = "Install failed"
self._error_msg = "No __init__ file found in new source"
return -1
# Merge code with the addon directory, using blender default behavior,
# plus any modifiers indicated by user (e.g. force remove/keep).
self.deep_merge_directory(self._addon_root, unpath, clean)
# Now save the json state.
# Change to True to trigger the handler on other side if allowing
# reloading within same blender session.
self._json["just_updated"] = True
self.save_updater_json()
self.reload_addon()
self._update_ready = False
return 0
def deep_merge_directory(self, base, merger, clean=False):
"""Merge folder 'merger' into 'base' without deleting existing"""
if not os.path.exists(base):
self.print_verbose("Base path does not exist:" + str(base))
return -1
elif not os.path.exists(merger):
self.print_verbose("Merger path does not exist")
return -1
# Path to be aware of and not overwrite/remove/etc.
staging_path = os.path.join(self._updater_path, "update_staging")
# If clean install is enabled, clear existing files ahead of time
# note: will not delete the update.json, update folder, staging, or
# staging but will delete all other folders/files in addon directory.
error = None
if clean:
try:
# Implement clearing of all folders/files, except the updater
# folder and updater json.
# Careful, this deletes entire subdirectories recursively...
# Make sure that base is not a high level shared folder, but
# is dedicated just to the addon itself.
self.print_verbose(
"clean=True, clearing addon folder to fresh install state")
# Remove root files and folders (except update folder).
files = [f for f in os.listdir(base)
if os.path.isfile(os.path.join(base, f))]
folders = [f for f in os.listdir(base)
if os.path.isdir(os.path.join(base, f))]
for f in files:
os.remove(os.path.join(base, f))
self.print_verbose(
"Clean removing file {}".format(os.path.join(base, f)))
for f in folders:
if os.path.join(base, f) is self._updater_path:
continue
shutil.rmtree(os.path.join(base, f))
self.print_verbose(
"Clean removing folder and contents {}".format(
os.path.join(base, f)))
except Exception as err:
error = "failed to create clean existing addon folder"
print(error, str(err))
self.print_trace()
# Walk through the base addon folder for rules on pre-removing
# but avoid removing/altering backup and updater file.
for path, dirs, files in os.walk(base):
# Prune ie skip updater folder.
dirs[:] = [d for d in dirs
if os.path.join(path, d) not in [self._updater_path]]
for file in files:
for pattern in self.remove_pre_update_patterns:
if fnmatch.filter([file], pattern):
try:
fl = os.path.join(path, file)
os.remove(fl)
self.print_verbose("Pre-removed file " + file)
except OSError:
print("Failed to pre-remove " + file)
self.print_trace()
# Walk through the temp addon sub folder for replacements
# this implements the overwrite rules, which apply after
# the above pre-removal rules. This also performs the
# actual file copying/replacements.
for path, dirs, files in os.walk(merger):
# Verify structure works to prune updater sub folder overwriting.
dirs[:] = [d for d in dirs
if os.path.join(path, d) not in [self._updater_path]]
rel_path = os.path.relpath(path, merger)
dest_path = os.path.join(base, rel_path)
if not os.path.exists(dest_path):
os.makedirs(dest_path)
for file in files:
# Bring in additional logic around copying/replacing.
# Blender default: overwrite .py's, don't overwrite the rest.
dest_file = os.path.join(dest_path, file)
srcFile = os.path.join(path, file)
# Decide to replace if file already exists, and copy new over.
if os.path.isfile(dest_file):
# Otherwise, check each file for overwrite pattern match.
replaced = False
for pattern in self._overwrite_patterns:
if fnmatch.filter([file], pattern):
replaced = True
break
if replaced:
os.remove(dest_file)
os.rename(srcFile, dest_file)
self.print_verbose(
"Overwrote file " + os.path.basename(dest_file))
else:
self.print_verbose(
"Pattern not matched to {}, not overwritten".format(
os.path.basename(dest_file)))
else:
# File did not previously exist, simply move it over.
os.rename(srcFile, dest_file)
self.print_verbose(
"New file " + os.path.basename(dest_file))
# now remove the temp staging folder and downloaded zip
try:
shutil.rmtree(staging_path)
except:
error = ("Error: Failed to remove existing staging directory, "
"consider manually removing ") + staging_path
self.print_verbose(error)
self.print_trace()
def reload_addon(self):
# if post_update false, skip this function
# else, unload/reload addon & trigger popup
if not self._auto_reload_post_update:
print("Restart blender to reload addon and complete update")
return
self.print_verbose("Reloading addon...")
addon_utils.modules(refresh=True)
bpy.utils.refresh_script_paths()
# not allowed in restricted context, such as register module
# toggle to refresh
if "addon_disable" in dir(bpy.ops.wm): # 2.7
bpy.ops.wm.addon_disable(module=self._addon_package)
bpy.ops.wm.addon_refresh()
bpy.ops.wm.addon_enable(module=self._addon_package)
print("2.7 reload complete")
else: # 2.8
bpy.ops.preferences.addon_disable(module=self._addon_package)
bpy.ops.preferences.addon_refresh()
bpy.ops.preferences.addon_enable(module=self._addon_package)
print("2.8 reload complete")
# -------------------------------------------------------------------------
# Other non-api functions and setups
# -------------------------------------------------------------------------
def clear_state(self):
self._update_ready = None
self._update_link = None
self._update_version = None
self._source_zip = None
self._error = None
self._error_msg = None
def url_retrieve(self, url_file, filepath):
"""Custom urlretrieve implementation"""
chunk = 1024 * 8
f = open(filepath, "wb")
while 1:
data = url_file.read(chunk)
if not data:
# print("done.")
break
f.write(data)
# print("Read %s bytes" % len(data))
f.close()
def version_tuple_from_text(self, text):
"""Convert text into a tuple of numbers (int).
Should go through string and remove all non-integers, and for any
given break split into a different section.
"""
if text is None:
return ()
segments = list()
tmp = ''
for char in str(text):
if not char.isdigit():
if len(tmp) > 0:
segments.append(int(tmp))
tmp = ''
else:
tmp += char
if len(tmp) > 0:
segments.append(int(tmp))
if len(segments) == 0:
self.print_verbose("No version strings found text: " + str(text))
if not self._include_branches:
return ()
else:
return (text)
return tuple(segments)
def check_for_update_async(self, callback=None):
"""Called for running check in a background thread"""
is_ready = (
self._json is not None
and "update_ready" in self._json
and self._json["version_text"] != dict()
and self._json["update_ready"])
if is_ready:
self._update_ready = True
self._update_link = self._json["version_text"]["link"]
self._update_version = str(self._json["version_text"]["version"])
# Cached update.
callback(True)
return
# do the check
if not self._check_interval_enabled:
return
elif self._async_checking:
self.print_verbose("Skipping async check, already started")
# already running the bg thread
elif self._update_ready is None:
print("{} updater: Running background check for update".format(
self.addon))
self.start_async_check_update(False, callback)
def check_for_update_now(self, callback=None):
self._error = None
self._error_msg = None
self.print_verbose(
"Check update pressed, first getting current status")
if self._async_checking:
self.print_verbose("Skipping async check, already started")
return # already running the bg thread
elif self._update_ready is None:
self.start_async_check_update(True, callback)
else:
self._update_ready = None
self.start_async_check_update(True, callback)
def check_for_update(self, now=False):
"""Check for update not in a syncrhonous manner.
This function is not async, will always return in sequential fashion
but should have a parent which calls it in another thread.
"""
self.print_verbose("Checking for update function")
# clear the errors if any
self._error = None
self._error_msg = None
# avoid running again in, just return past result if found
# but if force now check, then still do it
if self._update_ready is not None and not now:
return (self._update_ready,
self._update_version,
self._update_link)
if self._current_version is None:
raise ValueError("current_version not yet defined")
if self._repo is None:
raise ValueError("repo not yet defined")
if self._user is None:
raise ValueError("username not yet defined")
self.set_updater_json() # self._json
if not now and not self.past_interval_timestamp():
self.print_verbose(
"Aborting check for updated, check interval not reached")
return (False, None, None)
# check if using tags or releases
# note that if called the first time, this will pull tags from online
if self._fake_install:
self.print_verbose(
"fake_install = True, setting fake version as ready")
self._update_ready = True
self._update_version = "(999,999,999)"
self._update_link = "http://127.0.0.1"
return (self._update_ready,
self._update_version,
self._update_link)
# Primary internet call, sets self._tags and self._tag_latest.
self.get_tags()
self._json["last_check"] = str(datetime.now())
self.save_updater_json()
# Can be () or ('master') in addition to branches, and version tag.
new_version = self.version_tuple_from_text(self.tag_latest)
if len(self._tags) == 0:
self._update_ready = False
self._update_version = None
self._update_link = None
return (False, None, None)
if not self._include_branches:
link = self.select_link(self, self._tags[0])
else:
n = len(self._include_branch_list)
if len(self._tags) == n:
# effectively means no tags found on repo
# so provide the first one as default
link = self.select_link(self, self._tags[0])
else:
link = self.select_link(self, self._tags[n])
if new_version == ():
self._update_ready = False
self._update_version = None
self._update_link = None
return (False, None, None)
elif str(new_version).lower() in self._include_branch_list:
# Handle situation where master/whichever branch is included
# however, this code effectively is not triggered now
# as new_version will only be tag names, not branch names.
if not self._include_branch_auto_check:
# Don't offer update as ready, but set the link for the
# default branch for installing.
self._update_ready = False
self._update_version = new_version
self._update_link = link
self.save_updater_json()
return (True, new_version, link)
else:
# Bypass releases and look at timestamp of last update from a
# branch compared to now, see if commit values match or not.
raise ValueError("include_branch_autocheck: NOT YET DEVELOPED")
else:
# Situation where branches not included.
if new_version > self._current_version:
self._update_ready = True
self._update_version = new_version
self._update_link = link
self.save_updater_json()
return (True, new_version, link)
# If no update, set ready to False from None to show it was checked.
self._update_ready = False
self._update_version = None
self._update_link = None
return (False, None, None)
def set_tag(self, name):
"""Assign the tag name and url to update to"""
tg = None
for tag in self._tags:
if name == tag["name"]:
tg = tag
break
if tg:
new_version = self.version_tuple_from_text(self.tag_latest)
self._update_version = new_version
self._update_link = self.select_link(self, tg)
elif self._include_branches and name in self._include_branch_list:
# scenario if reverting to a specific branch name instead of tag
tg = name
link = self.form_branch_url(tg)
self._update_version = name # this will break things
self._update_link = link
if not tg:
raise ValueError("Version tag not found: " + name)
def run_update(self, force=False, revert_tag=None, clean=False, callback=None):
"""Runs an install, update, or reversion of an addon from online source
Arguments:
force: Install assigned link, even if self.update_ready is False
revert_tag: Version to install, if none uses detected update link
clean: not used, but in future could use to totally refresh addon
callback: used to run function on update completion
"""
self._json["update_ready"] = False
self._json["ignore"] = False # clear ignore flag
self._json["version_text"] = dict()
if revert_tag is not None:
self.set_tag(revert_tag)
self._update_ready = True
# clear the errors if any
self._error = None
self._error_msg = None
self.print_verbose("Running update")
if self._fake_install:
# Change to True, to trigger the reload/"update installed" handler.
self.print_verbose("fake_install=True")
self.print_verbose(
"Just reloading and running any handler triggers")
self._json["just_updated"] = True
self.save_updater_json()
if self._backup_current is True:
self.create_backup()
self.reload_addon()
self._update_ready = False
res = True # fake "success" zip download flag
elif not force:
if not self._update_ready:
self.print_verbose("Update stopped, new version not ready")
if callback:
callback(
self._addon_package,
"Update stopped, new version not ready")
return "Update stopped, new version not ready"
elif self._update_link is None:
# this shouldn't happen if update is ready
self.print_verbose("Update stopped, update link unavailable")
if callback:
callback(self._addon_package,
"Update stopped, update link unavailable")
return "Update stopped, update link unavailable"
if revert_tag is None:
self.print_verbose("Staging update")
else:
self.print_verbose("Staging install")
res = self.stage_repository(self._update_link)
if not res:
print("Error in staging repository: " + str(res))
if callback is not None:
callback(self._addon_package, self._error_msg)
return self._error_msg
res = self.unpack_staged_zip(clean)
if res < 0:
if callback:
callback(self._addon_package, self._error_msg)
return res
else:
if self._update_link is None:
self.print_verbose("Update stopped, could not get link")
return "Update stopped, could not get link"
self.print_verbose("Forcing update")
res = self.stage_repository(self._update_link)
if not res:
print("Error in staging repository: " + str(res))
if callback:
callback(self._addon_package, self._error_msg)
return self._error_msg
res = self.unpack_staged_zip(clean)
if res < 0:
return res
# would need to compare against other versions held in tags
# run the front-end's callback if provided
if callback:
callback(self._addon_package)
# return something meaningful, 0 means it worked
return 0
def past_interval_timestamp(self):
if not self._check_interval_enabled:
return True # ie this exact feature is disabled
if "last_check" not in self._json or self._json["last_check"] == "":
return True
now = datetime.now()
last_check = datetime.strptime(
self._json["last_check"], "%Y-%m-%d %H:%M:%S.%f")
offset = timedelta(
days=self._check_interval_days + 30 * self._check_interval_months,
hours=self._check_interval_hours,
minutes=self._check_interval_minutes)
delta = (now - offset) - last_check
if delta.total_seconds() > 0:
self.print_verbose("Time to check for updates!")
return True
self.print_verbose("Determined it's not yet time to check for updates")
return False
def get_json_path(self):
"""Returns the full path to the JSON state file used by this updater.
Will also rename old file paths to addon-specific path if found.
"""
json_path = os.path.join(
self._updater_path,
"{}_updater_status.json".format(self._addon_package))
old_json_path = os.path.join(self._updater_path, "updater_status.json")
# Rename old file if it exists.
try:
os.rename(old_json_path, json_path)
except FileNotFoundError:
pass
except Exception as err:
print("Other OS error occurred while trying to rename old JSON")
print(err)
self.print_trace()
return json_path
def set_updater_json(self):
"""Load or initialize JSON dictionary data for updater state"""
if self._updater_path is None:
raise ValueError("updater_path is not defined")
elif not os.path.isdir(self._updater_path):
os.makedirs(self._updater_path)
jpath = self.get_json_path()
if os.path.isfile(jpath):
with open(jpath) as data_file:
self._json = json.load(data_file)
self.print_verbose("Read in JSON settings from file")
else:
self._json = {
"last_check": "",
"backup_date": "",
"update_ready": False,
"ignore": False,
"just_restored": False,
"just_updated": False,
"version_text": dict()
}
self.save_updater_json()
def save_updater_json(self):
"""Trigger save of current json structure into file within addon"""
if self._update_ready:
if isinstance(self._update_version, tuple):
self._json["update_ready"] = True
self._json["version_text"]["link"] = self._update_link
self._json["version_text"]["version"] = self._update_version
else:
self._json["update_ready"] = False
self._json["version_text"] = dict()
else:
self._json["update_ready"] = False
self._json["version_text"] = dict()
jpath = self.get_json_path()
if not os.path.isdir(os.path.dirname(jpath)):
print("State error: Directory does not exist, cannot save json: ",
os.path.basename(jpath))
return
try:
with open(jpath, 'w') as outf:
data_out = json.dumps(self._json, indent=4)
outf.write(data_out)
except:
print("Failed to open/save data to json: ", jpath)
self.print_trace()
self.print_verbose("Wrote out updater JSON settings with content:")
self.print_verbose(str(self._json))
def json_reset_postupdate(self):
self._json["just_updated"] = False
self._json["update_ready"] = False
self._json["version_text"] = dict()
self.save_updater_json()
def json_reset_restore(self):
self._json["just_restored"] = False
self._json["update_ready"] = False
self._json["version_text"] = dict()
self.save_updater_json()
self._update_ready = None # Reset so you could check update again.
def ignore_update(self):
self._json["ignore"] = True
self.save_updater_json()
# -------------------------------------------------------------------------
# ASYNC related methods
# -------------------------------------------------------------------------
def start_async_check_update(self, now=False, callback=None):
"""Start a background thread which will check for updates"""
if self._async_checking:
return
self.print_verbose("Starting background checking thread")
check_thread = threading.Thread(target=self.async_check_update,
args=(now, callback,))
check_thread.daemon = True
self._check_thread = check_thread
check_thread.start()
def async_check_update(self, now, callback=None):
"""Perform update check, run as target of background thread"""
self._async_checking = True
self.print_verbose("Checking for update now in background")
try:
self.check_for_update(now=now)
except Exception as exception:
print("Checking for update error:")
print(exception)
self.print_trace()
if not self._error:
self._update_ready = False
self._update_version = None
self._update_link = None
self._error = "Error occurred"
self._error_msg = "Encountered an error while checking for updates"
self._async_checking = False
self._check_thread = None
if callback:
self.print_verbose("Finished check update, doing callback")
callback(self._update_ready)
self.print_verbose("BG thread: Finished check update, no callback")
def stop_async_check_update(self):
"""Method to give impression of stopping check for update.
Currently does nothing but allows user to retry/stop blocking UI from
hitting a refresh button. This does not actually stop the thread, as it
will complete after the connection timeout regardless. If the thread
does complete with a successful response, this will be still displayed
on next UI refresh (ie no update, or update available).
"""
if self._check_thread is not None:
self.print_verbose("Thread will end in normal course.")
# however, "There is no direct kill method on a thread object."
# better to let it run its course
# self._check_thread.stop()
self._async_checking = False
self._error = None
self._error_msg = None
# -----------------------------------------------------------------------------
# Updater Engines
# -----------------------------------------------------------------------------
class BitbucketEngine:
"""Integration to Bitbucket API for git-formatted repositories"""
def __init__(self):
self.api_url = 'https://api.bitbucket.org'
self.token = None
self.name = "bitbucket"
def form_repo_url(self, updater):
return "{}/2.0/repositories/{}/{}".format(
self.api_url, updater.user, updater.repo)
def form_tags_url(self, updater):
return self.form_repo_url(updater) + "/refs/tags?sort=-name"
def form_branch_url(self, branch, updater):
return self.get_zip_url(branch, updater)
def get_zip_url(self, name, updater):
return "https://bitbucket.org/{user}/{repo}/get/{name}.zip".format(
user=updater.user,
repo=updater.repo,
name=name)
def parse_tags(self, response, updater):
if response is None:
return list()
return [
{
"name": tag["name"],
"zipball_url": self.get_zip_url(tag["name"], updater)
} for tag in response["values"]]
class GithubEngine:
"""Integration to Github API"""
def __init__(self):
self.api_url = 'https://api.github.com'
self.token = None
self.name = "github"
def form_repo_url(self, updater):
return "{}/repos/{}/{}".format(
self.api_url, updater.user, updater.repo)
def form_tags_url(self, updater):
if updater.use_releases:
return "{}/releases".format(self.form_repo_url(updater))
else:
return "{}/tags".format(self.form_repo_url(updater))
def form_branch_list_url(self, updater):
return "{}/branches".format(self.form_repo_url(updater))
def form_branch_url(self, branch, updater):
return "{}/zipball/{}".format(self.form_repo_url(updater), branch)
def parse_tags(self, response, updater):
if response is None:
return list()
return response
class GitlabEngine:
"""Integration to GitLab API"""
def __init__(self):
self.api_url = 'https://gitlab.com'
self.token = None
self.name = "gitlab"
def form_repo_url(self, updater):
return "{}/api/v4/projects/{}".format(self.api_url, updater.repo)
def form_tags_url(self, updater):
return "{}/repository/tags".format(self.form_repo_url(updater))
def form_branch_list_url(self, updater):
# does not validate branch name.
return "{}/repository/branches".format(
self.form_repo_url(updater))
def form_branch_url(self, branch, updater):
# Could clash with tag names and if it does, it will download TAG zip
# instead of branch zip to get direct path, would need.
return "{}/repository/archive.zip?sha={}".format(
self.form_repo_url(updater), branch)
def get_zip_url(self, sha, updater):
return "{base}/repository/archive.zip?sha={sha}".format(
base=self.form_repo_url(updater),
sha=sha)
# def get_commit_zip(self, id, updater):
# return self.form_repo_url(updater)+"/repository/archive.zip?sha:"+id
def parse_tags(self, response, updater):
if response is None:
return list()
return [
{
"name": tag["name"],
"zipball_url": self.get_zip_url(tag["commit"]["id"], updater)
} for tag in response]
# -----------------------------------------------------------------------------
# The module-shared class instance,
# should be what's imported to other files
# -----------------------------------------------------------------------------
Updater = SingletonUpdater()
|
[
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.