blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2caae5f00b788e0f8bb0d2dcb65a49a0c99739a9 | 1eaf0d12c9739c1c8be295800cc0cb0a0d9db8e2 | /eval/simulation.py | 171288c84661f4c35f46d532d71f1986050fce86 | [] | no_license | juhuntenburg/nonlinear_coreg | 5b3b95ba14a038c950e409bb9c0199828eb57422 | c4faa41c5c1401eaf6955557750f3d9834dfa271 | refs/heads/master | 2021-01-10T09:25:09.714712 | 2016-01-28T07:34:46 | 2016-01-28T07:34:46 | 50,565,020 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,002 | py | # see readme file for how to get the simulated data
#import libraries
from nipype.pipeline.engine import Node, Workflow
from nipype.interfaces import Function
from nipype.utils.filemanip import filename_to_list
import nipype.interfaces.utility as util
import nipype.interfaces.fsl as fsl
import nipype.interfaces.ants as ants
import nipype.interfaces.c3 as c3
import nipype.interfaces.freesurfer as fs
from epi_t1_nonlinear import create_epi_t1_nonlinear_pipeline
import os
def create_simulation_workflow(name):
pe_voxel_size=2.3 #has to be changed in the shiftmap scaling node directly!
unwarp_direction='y-'
# workflow
simulation = Workflow(name=name)
# inputnode
inputnode=Node(util.IdentityInterface(fields=['dwell',
'anat_head',
'distorted_epi',
'fmap_mask',
'fmap_unmasked',
'shiftmap',
'freesurfer_dir',
'freesurfer_id',
'norm_lin',
'norm_invwarp',
'nonreg_mask'
]),
name='inputnode')
# outputnode
outputnode=Node(util.IdentityInterface(fields=['lin_coreg',
'nonlin_coreg',
'fmap_coreg',
'nonlin_field',
'nonlin_field_masked',
'fmap_field_masked',
]),
name='outputnode')
# epi to t1 nonlinear core workflow
nonreg= create_epi_t1_nonlinear_pipeline('nonreg')
simulation.connect([(inputnode, nonreg, [('distorted_epi', 'inputnode.realigned_epi'),
('freesurfer_dir','inputnode.fs_subjects_dir'),
('freesurfer_id', 'inputnode.fs_subject_id')])])
simulation.connect([(inputnode, nonreg, [('norm_lin', 'inputnode.norm_lin'),
('norm_invwarp', 'inputnode.norm_invwarp'),
('nonreg_mask', 'inputnode.fmap_mask')])])
#make list from transforms
def makelist(string1, string2):
transformlist=[string1, string2]
return transformlist
transformlist = Node(interface=Function(input_names=['string1', 'string2'],output_names=['transformlist'],
function=makelist),name='transformlist')
simulation.connect([(nonreg, transformlist, [('outputnode.nonlin_epi2anat', 'string2'),
('outputnode.lin_anat2epi', 'string1')])])
# apply linear part of warp
applylin = Node(ants.ApplyTransforms(dimension=3,
output_image='lin_coreg.nii.gz',
invert_transform_flags=[True]
),
'applylin')
simulation.connect([(inputnode, applylin, [('distorted_epi', 'input_image'),
('anat_head', 'reference_image')]),
(nonreg, applylin, [('outputnode.lin_anat2epi', 'transforms')]),
(applylin, outputnode, [('output_image', 'lin_coreg')])])
# apply nonlinear warp
applynonlin = Node(ants.ApplyTransforms(dimension=3,
output_image='nonlin_coreg.nii.gz',
invert_transform_flags=[True,False]
),
'applynonlin')
simulation.connect([(inputnode, applynonlin, [('distorted_epi', 'input_image'),
('anat_head', 'reference_image')]),
(transformlist, applynonlin, [('transformlist', 'transforms')]),
(applynonlin, outputnode, [('output_image', 'nonlin_coreg')])])
# reduce ants field
def reduce_ants_field(in_file,out_file):
import nibabel as nb
import os
full_file = nb.load(in_file)
data = full_file.get_data()
reduced_file=nb.Nifti1Image(data[:,:,:,0,1], full_file.get_affine())
nb.save(reduced_file, out_file)
return os.path.abspath(out_file)
reduce_ants = Node(util.Function(input_names=['in_file', 'out_file'],
output_names=['out_file'],
function=reduce_ants_field),
name='reduce_field')
reduce_ants.inputs.out_file='nonlin_field.nii.gz'
simulation.connect([(nonreg, reduce_ants, [('outputnode.nonlin_epi2anat', 'in_file')]),
(reduce_ants, outputnode, [('out_file', 'nonlin_field')])])
# mask ants field
mask= Node(fsl.ImageMaths(op_string='-mul',
out_file='nonlin_field_masked.nii.gz'),
name='mask_fields')
simulation.connect([(inputnode, mask, [('fmap_mask', 'in_file2')]),
(reduce_ants, mask, [('out_file', 'in_file')]),
(mask, outputnode, [('out_file', 'nonlin_field_masked')])])
# mask and scale shiftmap
shiftmap = Node(fsl.MultiImageMaths(op_string='-mul %s -mul 2.3',
out_file='original_field_masked.nii.gz'),
name='shiftmap')
simulation.connect([(inputnode, shiftmap, [('shiftmap', 'in_file'),
(('fmap_mask',filename_to_list), 'operand_files')]),
(shiftmap, outputnode, [('out_file', 'fmap_field_masked')])
])
# re-unwarp using the fmap
fmap = Node(fsl.FUGUE(unwarp_direction=unwarp_direction,
smooth3d=2.0,
save_shift=True,
unwarped_file='fmap_unwarped.nii.gz',
shift_out_file='fmap_field.nii.gz'),
name='fmap')
# function to get dwelltime as float from dwell string variable
def dwell2dwelltime(dwell):
dwellstring='0.'+dwell
dwelltime=float(dwellstring)
return dwelltime
simulation.connect([(inputnode, fmap, [(('dwell', dwell2dwelltime), 'dwell_time'),
('fmap_unmasked','fmap_in_file' ),
('distorted_epi', 'in_file'),
('fmap_mask', 'mask_file')])])
# register fieldmap corrected to anatomy with bbregister
bbregister = Node(interface=fs.BBRegister(init='fsl',
contrast_type='t2',
out_fsl_file = True),
name='bbregister')
simulation.connect([(fmap, bbregister, [('unwarped_file', 'source_file')]),
(inputnode, bbregister, [('freesurfer_dir','subjects_dir'),
('freesurfer_id', 'subject_id')])])
# convert shiftmap and coreg matrix to one warpfield and apply
convertwarp = Node(fsl.utils.ConvertWarp(shiftdir=unwarp_direction,
relout=True,
out_field='fmap_fullwarp.nii.gz'),
name='convertwarp')
applywarp_fmap = Node(fsl.ApplyWarp(interp='trilinear',
relwarp=True,
out_file='fmap_coreg.nii.gz'),
name='applywarp_fmap')
simulation.connect([(inputnode, convertwarp, [('anat_head', 'reference')]),
(fmap, convertwarp, [('shift_out_file', 'shiftmap')]),
(bbregister, convertwarp, [('out_fsl_file', 'postmat')]),
(inputnode, applywarp_fmap, [('distorted_epi', 'in_file'),
('anat_head', 'ref_file')]),
(convertwarp, applywarp_fmap, [('out_field', 'field_file')]),
(applywarp_fmap, outputnode, [('out_file', 'fmap_coreg')])])
return simulation
| [
"[email protected]"
] | |
7454cf6efe86e121133d4f676780dde446ac0859 | 06fe8a3bb7971066a204be73731a9af3e67edbb9 | /soccer/gameplay/tests/test_constants.py | 1493f44ca313729a8e14841601d4ca525ae01788 | [
"Apache-2.0"
] | permissive | sgadgil6/robocup-software | 130099715dafd80155bf07966f186f036280455a | 2b647345f0cdcc50021558b5cccf109239d3e954 | refs/heads/master | 2020-12-26T04:38:28.196256 | 2015-09-29T23:33:58 | 2015-09-29T23:33:58 | 43,398,854 | 1 | 0 | null | 2015-09-29T22:49:27 | 2015-09-29T22:49:27 | null | UTF-8 | Python | false | false | 477 | py | import unittest
import constants
import robocup
class TestConstants(unittest.TestCase):
def test_our_goal_zone(self):
# right in the center of the goal zone
in_zone = robocup.Point(0, constants.Field.PenaltyDist / 2.0)
out_zone = robocup.Point(0, constants.Field.Length / 2.0)
self.assertTrue(constants.Field.OurGoalZoneShape.contains_point(in_zone))
# self.assertFalse(constants.Field.OurGoalZoneShape.contains_point(out_zone))
| [
"[email protected]"
] | |
6599fda660a020fbc1972dae24d4e1cb898e6c27 | 466660115eafd99b72f81339d86c5bcbf4c7efb0 | /codes/15/spectrum_50HzRepeat.py | b04979b971b4acfdd85218fcd1fe02f82e9fb818 | [] | no_license | CoryVegan/scipybook2 | c2bb68c169c632ab389600034beb33ac921b0ba1 | a8fd295c2f2d7ee18f351e5622ca7eeb4649ee50 | refs/heads/master | 2020-03-23T14:50:28.056482 | 2017-08-25T06:00:00 | 2018-06-02T14:18:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 242 | py | # -*- coding: utf-8 -*-
import numpy as np
import pylab as pl
t = np.arange(0, 1.0, 1.0/8000)
x = np.sin(2*np.pi*50*t)[:512]
pl.figure(figsize=(8,3))
pl.plot(np.hstack([x,x,x]))
pl.xlabel("取样点")
pl.subplots_adjust(bottom=0.15)
pl.show() | [
"[email protected]"
] | |
9361603f524a0d1e30f0a17a54c5c6ff4b05b2a6 | a5ad207961fddfb0bab8c7471b6f91b69865e0fc | /app/agis/models/enormal/menu.py | 09ca2c47c0709f53c80e61db7cfffc14356895c6 | [
"LicenseRef-scancode-public-domain"
] | permissive | yotech/agis | b2465fc340e366fbe0267c4000bb0ae728386399 | 4abbecd175337d4942ac133847ce8fc870670571 | refs/heads/master | 2020-12-15T09:10:58.076323 | 2016-05-10T14:36:54 | 2016-05-10T14:36:54 | 32,155,630 | 0 | 2 | null | 2016-05-10T14:36:54 | 2015-03-13T12:52:01 | Python | UTF-8 | Python | false | false | 687 | py | # -*- coding: utf-8 -*-
from gluon import *
from agiscore.gui.mic import Accion
if not request.ajax and request.controller != 'appadmin':
# import si se necesita
from agiscore.gui.escuela import escuela_menu
from agiscore.gui.unidad_organica import unidad_menu
from agiscore.gui.enormal import enormal_menu
# contruir el menú en orden
evento_id = request.args(0)
ev = db.evento(evento_id)
if ev is not None:
ano = db.ano_academico(ev.ano_academico_id)
unidad = db.unidad_organica(ano.unidad_organica_id)
response.menu += escuela_menu()
response.menu += unidad_menu(unidad.id)
response.menu += enormal_menu(ev.id)
| [
"[email protected]"
] | |
63f4970d203971e6b87ce1413f3580da08d7436c | 5f9bdfc588331ef610ba01d7ef90c4f8a96fdc63 | /plot.py | 3d8ca3134805bf8f081d2405810a8936d42eb86c | [] | no_license | imrehg/fuel | 1c3c73c79f0aaf500a6024bd4fc32980987201c7 | 41c2afa3506172eab31ac1618e6ea3706277fef1 | refs/heads/master | 2016-09-10T09:58:26.424418 | 2011-05-05T06:10:21 | 2011-05-05T06:10:21 | 1,702,343 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,270 | py | import numpy as np
import pylab as pl
from matplotlib.transforms import offset_copy
filename = "data.csv"
types = {'names' : ['code', 'name', 'gdp', 'ppp', 'oilout', 'oilin', 'price'],
'formats' : ['S30', 'S30', 'f4', 'f4', 'f4', 'f4', 'f4']
}
data = np.loadtxt(filename, delimiter=",", dtype=types)
fig = pl.figure(figsize=(10,10))
ax = pl.subplot(1,1,1)
transOffset = offset_copy(ax.transData, fig=fig,
x = 0.05, y=0.10, units='inches')
# for i in xrange(len(data['code'])):
# if data['price'][i] <= 0 or data['gdp'][i] <= 0:
# continue
# if data['oilout'][i] > data['oilin'][i]:
# fuel = False
# else:
# fuel = True
# symbol = "kx" if fuel else 'ko'
# pl.plot(np.log(data['gdp'][i]), data['price'][i], symbol)
# # pl.text(data[i,0], data[i,4], '%.1f' % (fuel), transform=transOffset)
# pl.text(np.log(data['gdp'][i]), data['price'][i], data['name'][i], transform=transOffset)
total = []
for i in xrange(len(data['code'])):
if data['price'][i] > 0:
total += [(data['code'][i], data['price'][i])]
total2 = sorted(total, key= lambda x: x[1])
for j, v in enumerate(total2):
pl.plot(j, v[1])
pl.text(j, v[1], v[0], transform=transOffset)
pl.show()
| [
"[email protected]"
] | |
ef3f4ca1f02f57a1d00a845f054e57b10411e8c5 | 25b914aecd6b0cb49294fdc4f2efcfdf5803cc36 | /homeassistant/components/trafikverket_weatherstation/config_flow.py | 103af1c7eb4157ad3f6eb2f593bd8fa981f47a36 | [
"Apache-2.0"
] | permissive | jason0x43/home-assistant | 9114decaa8f7c2f1582f84e79dc06736b402b008 | 8bf6aba1cf44ee841de063755c935ea78040f399 | refs/heads/dev | 2023-03-04T01:14:10.257593 | 2022-01-01T12:11:56 | 2022-01-01T12:11:56 | 230,622,861 | 1 | 1 | Apache-2.0 | 2023-02-22T06:15:07 | 2019-12-28T14:45:43 | Python | UTF-8 | Python | false | false | 2,802 | py | """Adds config flow for Trafikverket Weather integration."""
from __future__ import annotations
from pytrafikverket.trafikverket_weather import TrafikverketWeather
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_API_KEY
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from .const import CONF_STATION, DOMAIN
DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_STATION): cv.string,
}
)
class TVWeatherConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Trafikverket Weatherstation integration."""
VERSION = 1
entry: config_entries.ConfigEntry
async def validate_input(self, sensor_api: str, station: str) -> str:
"""Validate input from user input."""
web_session = async_get_clientsession(self.hass)
weather_api = TrafikverketWeather(web_session, sensor_api)
try:
await weather_api.async_get_weather(station)
except ValueError as err:
return str(err)
return "connected"
async def async_step_import(self, config: dict):
"""Import a configuration from config.yaml."""
self.context.update(
{"title_placeholders": {CONF_STATION: f"YAML import {DOMAIN}"}}
)
self._async_abort_entries_match({CONF_STATION: config[CONF_STATION]})
return await self.async_step_user(user_input=config)
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
name = user_input[CONF_STATION]
api_key = user_input[CONF_API_KEY]
station = user_input[CONF_STATION]
validate = await self.validate_input(api_key, station)
if validate == "connected":
return self.async_create_entry(
title=name,
data={
CONF_API_KEY: api_key,
CONF_STATION: station,
},
)
if validate == "Source: Security, message: Invalid authentication":
errors["base"] = "invalid_auth"
elif validate == "Could not find a weather station with the specified name":
errors["base"] = "invalid_station"
elif validate == "Found multiple weather stations with the specified name":
errors["base"] = "more_stations"
else:
errors["base"] = "cannot_connect"
return self.async_show_form(
step_id="user",
data_schema=DATA_SCHEMA,
errors=errors,
)
| [
"[email protected]"
] | |
b464fca1bb2e46c78e2e37b913633e930aa13bb9 | 1b3fc35ada474601a76de3c2908524336d6ca420 | /day10/my/Meizitu/Meizitu/start.py | 9b9063bac0c96d2345c8dab199a5ff64631d28ad | [] | no_license | dqsdatalabs/Internet-worm | db3677e65d11542887adcde7719b7652757a3e32 | 62f38f58b4fa7643c482077f5ae18fff6fd81915 | refs/heads/master | 2022-01-16T14:29:52.184528 | 2018-12-25T08:46:08 | 2018-12-25T08:46:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 177 | py | from scrapy import cmdline
# cmdline.execute('scrapy crawl meinvtu'.split())
cmdline.execute('scrapy crawl meinvtu2'.split())
# cmdline.execute('scrapy crawl meinvtu3'.split()) | [
"[email protected]"
] | |
11f96be98b5c5a354373930a6d5cc0d583e6f56f | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/4/jg4.py | 512215a3807d0213823f517008cc4bdcbfc83ab2 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'jG4':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
895f55c73c05abed86e355cd53adbf875d22d6f5 | c46ef0ccf030cee783a75d549e3c9bc0810579ff | /tutorial-contents/405_DQN_Reinforcement_learning.py | 20fa80cdeeb85f8daeac040cef09380718a56006 | [
"MIT"
] | permissive | cocodee/PyTorch-Tutorial | 37827b3daa5de1dee6ca174161d948a1933d4453 | a7b14b80913485735a3ee87da6998a511a1f1950 | refs/heads/master | 2021-07-19T16:01:13.106848 | 2017-10-26T22:06:10 | 2017-10-26T22:06:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,348 | py | """
View more, visit my tutorial page: https://morvanzhou.github.io/tutorials/
My Youtube Channel: https://www.youtube.com/user/MorvanZhou
More about Reinforcement learning: https://morvanzhou.github.io/tutorials/machine-learning/reinforcement-learning/
Dependencies:
torch: 0.2
gym: 0.8.1
numpy
"""
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.nn.functional as F
import numpy as np
import gym
# Hyper Parameters
BATCH_SIZE = 32
LR = 0.01 # learning rate
EPSILON = 0.9 # greedy policy
GAMMA = 0.9 # reward discount
TARGET_REPLACE_ITER = 100 # target update frequency
MEMORY_CAPACITY = 2000
env = gym.make('CartPole-v0')
env = env.unwrapped
N_ACTIONS = env.action_space.n
N_STATES = env.observation_space.shape[0]
class Net(nn.Module):
def __init__(self, ):
super(Net, self).__init__()
self.fc1 = nn.Linear(N_STATES, 10)
self.fc1.weight.data.normal_(0, 0.1) # initialization
self.out = nn.Linear(10, N_ACTIONS)
self.out.weight.data.normal_(0, 0.1) # initialization
def forward(self, x):
x = self.fc1(x)
x = F.relu(x)
actions_value = self.out(x)
return actions_value
class DQN(object):
def __init__(self):
self.eval_net, self.target_net = Net(), Net()
self.learn_step_counter = 0 # for target updating
self.memory_counter = 0 # for storing memory
self.memory = np.zeros((MEMORY_CAPACITY, N_STATES * 2 + 2)) # initialize memory
self.optimizer = torch.optim.Adam(self.eval_net.parameters(), lr=LR)
self.loss_func = nn.MSELoss()
def choose_action(self, x):
x = Variable(torch.unsqueeze(torch.FloatTensor(x), 0))
# input only one sample
if np.random.uniform() < EPSILON: # greedy
actions_value = self.eval_net.forward(x)
action = torch.max(actions_value, 1)[1].data.numpy()[0, 0] # return the argmax
else: # random
action = np.random.randint(0, N_ACTIONS)
return action
def store_transition(self, s, a, r, s_):
transition = np.hstack((s, [a, r], s_))
# replace the old memory with new memory
index = self.memory_counter % MEMORY_CAPACITY
self.memory[index, :] = transition
self.memory_counter += 1
def learn(self):
# target parameter update
if self.learn_step_counter % TARGET_REPLACE_ITER == 0:
self.target_net.load_state_dict(self.eval_net.state_dict())
self.learn_step_counter += 1
# sample batch transitions
sample_index = np.random.choice(MEMORY_CAPACITY, BATCH_SIZE)
b_memory = self.memory[sample_index, :]
b_s = Variable(torch.FloatTensor(b_memory[:, :N_STATES]))
b_a = Variable(torch.LongTensor(b_memory[:, N_STATES:N_STATES+1].astype(int)))
b_r = Variable(torch.FloatTensor(b_memory[:, N_STATES+1:N_STATES+2]))
b_s_ = Variable(torch.FloatTensor(b_memory[:, -N_STATES:]))
# q_eval w.r.t the action in experience
q_eval = self.eval_net(b_s).gather(1, b_a) # shape (batch, 1)
q_next = self.target_net(b_s_).detach() # detach from graph, don't backpropagate
q_target = b_r + GAMMA * q_next.max(1)[0].view(BATCH_SIZE, 1) # shape (batch, 1)
loss = self.loss_func(q_eval, q_target)
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
dqn = DQN()
print('\nCollecting experience...')
for i_episode in range(400):
s = env.reset()
ep_r = 0
while True:
env.render()
a = dqn.choose_action(s)
# take action
s_, r, done, info = env.step(a)
# modify the reward
x, x_dot, theta, theta_dot = s_
r1 = (env.x_threshold - abs(x)) / env.x_threshold - 0.8
r2 = (env.theta_threshold_radians - abs(theta)) / env.theta_threshold_radians - 0.5
r = r1 + r2
dqn.store_transition(s, a, r, s_)
ep_r += r
if dqn.memory_counter > MEMORY_CAPACITY:
dqn.learn()
if done:
print('Ep: ', i_episode,
'| Ep_r: ', round(ep_r, 2))
if done:
break
s = s_ | [
"[email protected]"
] | |
3d2649b1dc0da59e87d9650fdc443d6ac3042872 | 521efcd158f4c69a686ed1c63dd8e4b0b68cc011 | /tests/test_utils/timetables.py | 838ecb5323451c89d32a0c27b6e248c5acbe51f3 | [
"BSD-3-Clause",
"MIT",
"Apache-2.0"
] | permissive | coutureai/RaWorkflowOrchestrator | 33fd8e253bfea2f9a82bb122ca79e8cf9dffb003 | cd3ea2579dff7bbab0d6235fcdeba2bb9edfc01f | refs/heads/main | 2022-10-01T06:24:18.560652 | 2021-12-29T04:52:56 | 2021-12-29T04:52:56 | 184,547,783 | 5 | 12 | Apache-2.0 | 2022-11-04T00:02:55 | 2019-05-02T08:38:38 | Python | UTF-8 | Python | false | false | 1,746 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import settings
from airflow.timetables.base import Timetable
from airflow.timetables.interval import CronDataIntervalTimetable, DeltaDataIntervalTimetable
def cron_timetable(expr: str) -> CronDataIntervalTimetable:
return CronDataIntervalTimetable(expr, settings.TIMEZONE)
def delta_timetable(delta) -> DeltaDataIntervalTimetable:
return DeltaDataIntervalTimetable(delta)
class CustomSerializationTimetable(Timetable):
def __init__(self, value: str):
self.value = value
@classmethod
def deserialize(cls, data):
return cls(data["value"])
def __eq__(self, other) -> bool:
"""Only for testing purposes."""
if not isinstance(other, CustomSerializationTimetable):
return False
return self.value == other.value
def serialize(self):
return {"value": self.value}
@property
def summary(self):
return f"{type(self).__name__}({self.value!r})"
| [
"[email protected]"
] | |
615d6cfeafcfe95a0b3e8aefee6f571baf69f697 | dc9d2f036ef72f254db5d0ba9e4cc8dcd95aa4aa | /WebCrawler by Bucky.py | f19ac7e98901e43ff512090c967cf9b3627184c6 | [] | no_license | YhHoo/Python-Tutorials | d43171a21fb8d51271c66bea6f2566ff3effc832 | 9774dd5b3f0b9d0126d4a2bcbac347348e914b71 | refs/heads/master | 2021-09-03T21:23:22.099224 | 2018-01-12T03:33:23 | 2018-01-12T03:33:23 | 110,725,041 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 833 | py | '''
THIS IS BUCKY WEB CRAWLER EXAMPLES
'''
'''
HERE PART 1: Fetch all the href link in <a> tag of the page
'''
import requests
from bs4 import BeautifulSoup
def trade_spider():
url = 'http://www.ckmusic.com.my' # parent URL, rmb don't end with '/'
source_code = requests.get(url) # connect to the web page in url, store all source into source_code
plain_text = source_code.text # convert the source code to just Text
soup = BeautifulSoup(plain_text, "html.parser") # convert it to beautifulSoup Object, in order to use web crawler
for link in soup.findAll('a', {'class': 'prodItemPic'}): # look for tag <a>, where class='prodItemPic'
href = url + link.get('href') # from Tag, only take the href item
print(href)
trade_spider()
input("Code by YH, Press Enter to Terminate =)")
| [
"[email protected]"
] | |
f2276f7e65fd1cded1b291ed6e217a163050705d | baafc9731096b0d51885c4e793dfe14aa779a6a4 | /experiment_domainadapt_meanteacher.py | 4619ba038a8fd6e2f607fafcfc578e087db0bb01 | [
"MIT"
] | permissive | AliaksandrSiarohin/self-ensemble-visual-domain-adapt | ee095a059e588cd84eda1eff733122c736ff6670 | 0d8d036bb8fe65ae2dc8e713d383742c93521cc9 | refs/heads/master | 2020-04-06T15:42:06.353628 | 2018-11-14T18:00:01 | 2018-11-14T18:00:01 | 157,589,370 | 0 | 1 | MIT | 2018-11-14T17:58:33 | 2018-11-14T17:58:33 | null | UTF-8 | Python | false | false | 24,376 | py | """
Incorporates mean teacher, from:
Mean teachers are better role models: Weight-averaged consistency targets improve semi-supervised deep learning results
Antti Tarvainen, Harri Valpola
https://arxiv.org/abs/1703.01780
"""
import click
@click.command()
@click.option('--exp', type=click.Choice(['svhn_mnist', 'mnist_svhn',
'svhn_mnist_rgb', 'mnist_svhn_rgb',
'cifar_stl', 'stl_cifar',
'mnist_usps', 'usps_mnist',
'syndigits_svhn',
'synsigns_gtsrb',
]), default='mnist_svhn',
help='experiment to run')
@click.option('--arch', type=click.Choice([
'',
'mnist-bn-32-64-256',
'grey-32-64-128-gp', 'grey-32-64-128-gp-wn', 'grey-32-64-128-gp-nonorm',
'rgb-128-256-down-gp', 'resnet18-32',
'rgb40-48-96-192-384-gp', 'rgb40-96-192-384-gp',
]), default='', help='network architecture')
@click.option('--loss', type=click.Choice(['var', 'bce']), default='var',
help='augmentation variance loss function')
@click.option('--double_softmax', is_flag=True, default=False, help='apply softmax twice to compute supervised loss')
@click.option('--confidence_thresh', type=float, default=0.96837722, help='augmentation var loss confidence threshold')
@click.option('--rampup', type=int, default=0, help='ramp-up length')
@click.option('--teacher_alpha', type=float, default=0.99, help='Teacher EMA alpha (decay)')
@click.option('--unsup_weight', type=float, default=3.0, help='unsupervised loss weight')
@click.option('--cls_balance', type=float, default=0.005,
help='Weight of class balancing component of unsupervised loss')
@click.option('--cls_balance_loss', type=click.Choice(['bce', 'log', 'bug']), default='bce',
help='Class balancing loss function')
@click.option('--combine_batches', is_flag=True, default=False,
help='Build batches from both source and target samples')
@click.option('--learning_rate', type=float, default=0.001, help='learning rate (Adam)')
@click.option('--standardise_samples', default=False, is_flag=True, help='standardise samples (0 mean unit var)')
@click.option('--src_affine_std', type=float, default=0.1, help='src aug xform: random affine transform std-dev')
@click.option('--src_xlat_range', type=float, default=2.0, help='src aug xform: translation range')
@click.option('--src_hflip', default=False, is_flag=True, help='src aug xform: enable random horizontal flips')
@click.option('--src_intens_flip', is_flag=True, default=False,
help='src aug colour; enable intensity flip')
@click.option('--src_intens_scale_range', type=str, default='',
help='src aug colour; intensity scale range `low:high` (-1.5:1.5 for mnist-svhn)')
@click.option('--src_intens_offset_range', type=str, default='',
help='src aug colour; intensity offset range `low:high` (-0.5:0.5 for mnist-svhn)')
@click.option('--src_gaussian_noise_std', type=float, default=0.1,
help='std aug: standard deviation of Gaussian noise to add to samples')
@click.option('--tgt_affine_std', type=float, default=0.1, help='tgt aug xform: random affine transform std-dev')
@click.option('--tgt_xlat_range', type=float, default=2.0, help='tgt aug xform: translation range')
@click.option('--tgt_hflip', default=False, is_flag=True, help='tgt aug xform: enable random horizontal flips')
@click.option('--tgt_intens_flip', is_flag=True, default=False,
help='tgt aug colour; enable intensity flip')
@click.option('--tgt_intens_scale_range', type=str, default='',
help='tgt aug colour; intensity scale range `low:high` (-1.5:1.5 for mnist-svhn)')
@click.option('--tgt_intens_offset_range', type=str, default='',
help='tgt aug colour; intensity offset range `low:high` (-0.5:0.5 for mnist-svhn)')
@click.option('--tgt_gaussian_noise_std', type=float, default=0.1,
help='tgt aug: standard deviation of Gaussian noise to add to samples')
@click.option('--num_epochs', type=int, default=200, help='number of epochs')
@click.option('--batch_size', type=int, default=64, help='mini-batch size')
@click.option('--epoch_size', type=click.Choice(['large', 'small', 'target']), default='target',
help='epoch size is either that of the smallest dataset, the largest, or the target')
@click.option('--seed', type=int, default=0, help='random seed (0 for time-based)')
@click.option('--log_file', type=str, default='', help='log file path (none to disable)')
@click.option('--model_file', type=str, default='', help='model file path')
@click.option('--device', type=int, default=0, help='Device')
def experiment(exp, arch, loss, double_softmax, confidence_thresh, rampup, teacher_alpha,
unsup_weight, cls_balance, cls_balance_loss,
combine_batches,
learning_rate, standardise_samples,
src_affine_std, src_xlat_range, src_hflip,
src_intens_flip, src_intens_scale_range, src_intens_offset_range, src_gaussian_noise_std,
tgt_affine_std, tgt_xlat_range, tgt_hflip,
tgt_intens_flip, tgt_intens_scale_range, tgt_intens_offset_range, tgt_gaussian_noise_std,
num_epochs, batch_size, epoch_size, seed,
log_file, model_file, device):
settings = locals().copy()
import os
import sys
import pickle
import cmdline_helpers
if log_file == '':
log_file = 'output_aug_log_{}.txt'.format(exp)
elif log_file == 'none':
log_file = None
if log_file is not None:
if os.path.exists(log_file):
print('Output log file {} already exists'.format(log_file))
return
use_rampup = rampup > 0
src_intens_scale_range_lower, src_intens_scale_range_upper, src_intens_offset_range_lower, src_intens_offset_range_upper = \
cmdline_helpers.intens_aug_options(src_intens_scale_range, src_intens_offset_range)
tgt_intens_scale_range_lower, tgt_intens_scale_range_upper, tgt_intens_offset_range_lower, tgt_intens_offset_range_upper = \
cmdline_helpers.intens_aug_options(tgt_intens_scale_range, tgt_intens_offset_range)
import time
import math
import numpy as np
from batchup import data_source, work_pool
import data_loaders
import standardisation
import network_architectures
import augmentation
import torch, torch.cuda
from torch import nn
from torch.nn import functional as F
import optim_weight_ema
with torch.cuda.device(device):
pool = work_pool.WorkerThreadPool(2)
n_chn = 0
if exp == 'svhn_mnist':
d_source = data_loaders.load_svhn(zero_centre=False, greyscale=True)
d_target = data_loaders.load_mnist(invert=False, zero_centre=False, pad32=True, val=False)
elif exp == 'mnist_svhn':
d_source = data_loaders.load_mnist(invert=False, zero_centre=False, pad32=True)
d_target = data_loaders.load_svhn(zero_centre=False, greyscale=True, val=False)
elif exp == 'svhn_mnist_rgb':
d_source = data_loaders.load_svhn(zero_centre=False, greyscale=False)
d_target = data_loaders.load_mnist(invert=False, zero_centre=False, pad32=True, val=False, rgb=True)
elif exp == 'mnist_svhn_rgb':
d_source = data_loaders.load_mnist(invert=False, zero_centre=False, pad32=True, rgb=True)
d_target = data_loaders.load_svhn(zero_centre=False, greyscale=False, val=False)
elif exp == 'cifar_stl':
d_source = data_loaders.load_cifar10(range_01=False)
d_target = data_loaders.load_stl(zero_centre=False, val=False)
elif exp == 'stl_cifar':
d_source = data_loaders.load_stl(zero_centre=False)
d_target = data_loaders.load_cifar10(range_01=False, val=False)
elif exp == 'mnist_usps':
d_source = data_loaders.load_mnist(zero_centre=False)
d_target = data_loaders.load_usps(zero_centre=False, scale28=True, val=False)
elif exp == 'usps_mnist':
d_source = data_loaders.load_usps(zero_centre=False, scale28=True)
d_target = data_loaders.load_mnist(zero_centre=False, val=False)
elif exp == 'syndigits_svhn':
d_source = data_loaders.load_syn_digits(zero_centre=False)
d_target = data_loaders.load_svhn(zero_centre=False, val=False)
elif exp == 'synsigns_gtsrb':
d_source = data_loaders.load_syn_signs(zero_centre=False)
d_target = data_loaders.load_gtsrb(zero_centre=False, val=False)
else:
print('Unknown experiment type \'{}\''.format(exp))
return
# Delete the training ground truths as we should not be using them
del d_target.train_y
if standardise_samples:
standardisation.standardise_dataset(d_source)
standardisation.standardise_dataset(d_target)
n_classes = d_source.n_classes
print('Loaded data')
if arch == '':
if exp in {'mnist_usps', 'usps_mnist'}:
arch = 'mnist-bn-32-64-256'
if exp in {'svhn_mnist', 'mnist_svhn'}:
arch = 'grey-32-64-128-gp'
if exp in {'cifar_stl', 'stl_cifar', 'syndigits_svhn', 'svhn_mnist_rgb', 'mnist_svhn_rgb'}:
arch = 'rgb-128-256-down-gp'
if exp in {'synsigns_gtsrb'}:
arch = 'rgb40-96-192-384-gp'
net_class, expected_shape = network_architectures.get_net_and_shape_for_architecture(arch)
if expected_shape != d_source.train_X.shape[1:]:
print('Architecture {} not compatible with experiment {}; it needs samples of shape {}, '
'data has samples of shape {}'.format(arch, exp, expected_shape, d_source.train_X.shape[1:]))
return
student_net = net_class(n_classes).cuda()
teacher_net = net_class(n_classes).cuda()
student_params = list(student_net.parameters())
teacher_params = list(teacher_net.parameters())
for param in teacher_params:
param.requires_grad = False
student_optimizer = torch.optim.Adam(student_params, lr=learning_rate)
teacher_optimizer = optim_weight_ema.WeightEMA(teacher_params, student_params, alpha=teacher_alpha)
classification_criterion = nn.CrossEntropyLoss()
print('Built network')
src_aug = augmentation.ImageAugmentation(
src_hflip, src_xlat_range, src_affine_std,
intens_flip=src_intens_flip,
intens_scale_range_lower=src_intens_scale_range_lower, intens_scale_range_upper=src_intens_scale_range_upper,
intens_offset_range_lower=src_intens_offset_range_lower,
intens_offset_range_upper=src_intens_offset_range_upper,
gaussian_noise_std=src_gaussian_noise_std
)
tgt_aug = augmentation.ImageAugmentation(
tgt_hflip, tgt_xlat_range, tgt_affine_std,
intens_flip=tgt_intens_flip,
intens_scale_range_lower=tgt_intens_scale_range_lower, intens_scale_range_upper=tgt_intens_scale_range_upper,
intens_offset_range_lower=tgt_intens_offset_range_lower,
intens_offset_range_upper=tgt_intens_offset_range_upper,
gaussian_noise_std=tgt_gaussian_noise_std
)
if combine_batches:
def augment(X_sup, y_src, X_tgt):
X_src_stu, X_src_tea = src_aug.augment_pair(X_sup)
X_tgt_stu, X_tgt_tea = tgt_aug.augment_pair(X_tgt)
return X_src_stu, X_src_tea, y_src, X_tgt_stu, X_tgt_tea
else:
def augment(X_src, y_src, X_tgt):
X_src = src_aug.augment(X_src)
X_tgt_stu, X_tgt_tea = tgt_aug.augment_pair(X_tgt)
return X_src, y_src, X_tgt_stu, X_tgt_tea
rampup_weight_in_list = [0]
cls_bal_fn = network_architectures.get_cls_bal_function(cls_balance_loss)
def compute_aug_loss(stu_out, tea_out):
# Augmentation loss
if use_rampup:
unsup_mask = None
conf_mask_count = None
unsup_mask_count = None
else:
conf_tea = torch.max(tea_out, 1)[0]
unsup_mask = conf_mask = torch.gt(conf_tea, confidence_thresh).float()
unsup_mask_count = conf_mask_count = torch.sum(conf_mask)
if loss == 'bce':
aug_loss = network_architectures.robust_binary_crossentropy(stu_out, tea_out)
else:
d_aug_loss = stu_out - tea_out
aug_loss = d_aug_loss * d_aug_loss
aug_loss = torch.mean(aug_loss, 1)
if use_rampup:
unsup_loss = torch.mean(aug_loss) * rampup_weight_in_list[0]
else:
unsup_loss = torch.mean(aug_loss * unsup_mask)
# Class balance loss
if cls_balance > 0.0:
# Compute per-sample average predicated probability
# Average over samples to get average class prediction
avg_cls_prob = torch.mean(stu_out, 0)
# Compute loss
equalise_cls_loss = cls_bal_fn(avg_cls_prob, float(1.0 / n_classes))
equalise_cls_loss = torch.mean(equalise_cls_loss) * n_classes
if use_rampup:
equalise_cls_loss = equalise_cls_loss * rampup_weight_in_list[0]
else:
if rampup == 0:
equalise_cls_loss = equalise_cls_loss * torch.mean(unsup_mask, 0)
unsup_loss += equalise_cls_loss * cls_balance
return unsup_loss, conf_mask_count, unsup_mask_count
if combine_batches:
def f_train(X_src0, X_src1, y_src, X_tgt0, X_tgt1):
X_src0 = torch.autograd.Variable(torch.from_numpy(X_src0).cuda())
X_src1 = torch.autograd.Variable(torch.from_numpy(X_src1).cuda())
y_src = torch.autograd.Variable(torch.from_numpy(y_src).long().cuda())
X_tgt0 = torch.autograd.Variable(torch.from_numpy(X_tgt0).cuda())
X_tgt1 = torch.autograd.Variable(torch.from_numpy(X_tgt1).cuda())
n_samples = X_src0.size()[0]
n_total = n_samples + X_tgt0.size()[0]
student_optimizer.zero_grad()
student_net.train(mode=True)
teacher_net.train(mode=True)
# Concatenate source and target mini-batches
X0 = torch.cat([X_src0, X_tgt0], 0)
X1 = torch.cat([X_src1, X_tgt1], 0)
student_logits_out = student_net(X0)
student_prob_out = F.softmax(student_logits_out)
src_logits_out = student_logits_out[:n_samples]
src_prob_out = student_prob_out[:n_samples]
teacher_logits_out = teacher_net(X1)
teacher_prob_out = F.softmax(teacher_logits_out)
# Supervised classification loss
if double_softmax:
clf_loss = classification_criterion(src_prob_out, y_src)
else:
clf_loss = classification_criterion(src_logits_out, y_src)
unsup_loss, conf_mask_count, unsup_mask_count = compute_aug_loss(student_prob_out, teacher_prob_out)
loss_expr = clf_loss + unsup_loss * unsup_weight
loss_expr.backward()
student_optimizer.step()
teacher_optimizer.step()
outputs = [float(clf_loss.data.cpu().numpy()) * n_samples, float(unsup_loss.data.cpu().numpy()) * n_total]
if not use_rampup:
mask_count = conf_mask_count.data.cpu()[0] * 0.5
unsup_count = unsup_mask_count.data.cpu()[0] * 0.5
outputs.append(mask_count)
outputs.append(unsup_count)
return tuple(outputs)
else:
def f_train(X_src, y_src, X_tgt0, X_tgt1):
X_src = torch.autograd.Variable(torch.from_numpy(X_src).cuda())
y_src = torch.autograd.Variable(torch.from_numpy(y_src).long().cuda())
X_tgt0 = torch.autograd.Variable(torch.from_numpy(X_tgt0).cuda())
X_tgt1 = torch.autograd.Variable(torch.from_numpy(X_tgt1).cuda())
student_optimizer.zero_grad()
student_net.train(mode=True)
teacher_net.train(mode=True)
src_logits_out = student_net(X_src)
student_tgt_logits_out = student_net(X_tgt0)
student_tgt_prob_out = F.softmax(student_tgt_logits_out)
teacher_tgt_logits_out = teacher_net(X_tgt1)
teacher_tgt_prob_out = F.softmax(teacher_tgt_logits_out)
# Supervised classification loss
if double_softmax:
clf_loss = classification_criterion(F.softmax(src_logits_out), y_src)
else:
clf_loss = classification_criterion(src_logits_out, y_src)
unsup_loss, conf_mask_count, unsup_mask_count = compute_aug_loss(student_tgt_prob_out, teacher_tgt_prob_out)
loss_expr = clf_loss + unsup_loss * unsup_weight
loss_expr.backward()
student_optimizer.step()
teacher_optimizer.step()
n_samples = X_src.size()[0]
outputs = [float(clf_loss.data.cpu().numpy()) * n_samples, float(unsup_loss.data.cpu().numpy()) * n_samples]
if not use_rampup:
mask_count = conf_mask_count.data.cpu()[0]
unsup_count = unsup_mask_count.data.cpu()[0]
outputs.append(mask_count)
outputs.append(unsup_count)
return tuple(outputs)
print('Compiled training function')
def f_pred_src(X_sup):
X_var = torch.autograd.Variable(torch.from_numpy(X_sup).cuda())
student_net.train(mode=False)
teacher_net.train(mode=False)
return (F.softmax(student_net(X_var)).data.cpu().numpy(), F.softmax(teacher_net(X_var)).data.cpu().numpy())
def f_pred_tgt(X_sup):
X_var = torch.autograd.Variable(torch.from_numpy(X_sup).cuda())
student_net.train(mode=False)
teacher_net.train(mode=False)
return (F.softmax(student_net(X_var)).data.cpu().numpy(), F.softmax(teacher_net(X_var)).data.cpu().numpy())
def f_eval_src(X_sup, y_sup):
y_pred_prob_stu, y_pred_prob_tea = f_pred_src(X_sup)
y_pred_stu = np.argmax(y_pred_prob_stu, axis=1)
y_pred_tea = np.argmax(y_pred_prob_tea, axis=1)
return (float((y_pred_stu != y_sup).sum()), float((y_pred_tea != y_sup).sum()))
def f_eval_tgt(X_sup, y_sup):
y_pred_prob_stu, y_pred_prob_tea = f_pred_tgt(X_sup)
y_pred_stu = np.argmax(y_pred_prob_stu, axis=1)
y_pred_tea = np.argmax(y_pred_prob_tea, axis=1)
return (float((y_pred_stu != y_sup).sum()), float((y_pred_tea != y_sup).sum()))
print('Compiled evaluation function')
# Setup output
def log(text):
print(text)
if log_file is not None:
with open(log_file, 'a') as f:
f.write(text + '\n')
f.flush()
f.close()
cmdline_helpers.ensure_containing_dir_exists(log_file)
# Report setttings
log('Settings: {}'.format(', '.join(['{}={}'.format(key, settings[key]) for key in sorted(list(settings.keys()))])))
# Report dataset size
log('Dataset:')
log('SOURCE Train: X.shape={}, y.shape={}'.format(d_source.train_X.shape, d_source.train_y.shape))
log('SOURCE Test: X.shape={}, y.shape={}'.format(d_source.test_X.shape, d_source.test_y.shape))
log('TARGET Train: X.shape={}'.format(d_target.train_X.shape))
log('TARGET Test: X.shape={}, y.shape={}'.format(d_target.test_X.shape, d_target.test_y.shape))
print('Training...')
sup_ds = data_source.ArrayDataSource([d_source.train_X, d_source.train_y], repeats=-1)
tgt_train_ds = data_source.ArrayDataSource([d_target.train_X], repeats=-1)
train_ds = data_source.CompositeDataSource([sup_ds, tgt_train_ds]).map(augment)
train_ds = pool.parallel_data_source(train_ds)
if epoch_size == 'large':
n_samples = max(d_source.train_X.shape[0], d_target.train_X.shape[0])
elif epoch_size == 'small':
n_samples = min(d_source.train_X.shape[0], d_target.train_X.shape[0])
elif epoch_size == 'target':
n_samples = d_target.train_X.shape[0]
n_train_batches = n_samples // batch_size
source_test_ds = data_source.ArrayDataSource([d_source.test_X, d_source.test_y])
target_test_ds = data_source.ArrayDataSource([d_target.test_X, d_target.test_y])
if seed != 0:
shuffle_rng = np.random.RandomState(seed)
else:
shuffle_rng = np.random
train_batch_iter = train_ds.batch_iterator(batch_size=batch_size, shuffle=shuffle_rng)
best_teacher_model_state = {k: v.cpu().numpy() for k, v in teacher_net.state_dict().items()}
best_conf_mask_rate = 0.0
best_src_test_err = 1.0
for epoch in range(num_epochs):
t1 = time.time()
if use_rampup:
if epoch < rampup:
p = max(0.0, float(epoch)) / float(rampup)
p = 1.0 - p
rampup_value = math.exp(-p * p * 5.0)
else:
rampup_value = 1.0
rampup_weight_in_list[0] = rampup_value
train_res = data_source.batch_map_mean(f_train, train_batch_iter, n_batches=n_train_batches)
train_clf_loss = train_res[0]
if combine_batches:
unsup_loss_string = 'unsup (both) loss={:.6f}'.format(train_res[1])
else:
unsup_loss_string = 'unsup (tgt) loss={:.6f}'.format(train_res[1])
src_test_err_stu, src_test_err_tea = source_test_ds.batch_map_mean(f_eval_src, batch_size=batch_size * 2)
tgt_test_err_stu, tgt_test_err_tea = target_test_ds.batch_map_mean(f_eval_tgt, batch_size=batch_size * 2)
if use_rampup:
unsup_loss_string = '{}, rampup={:.3%}'.format(unsup_loss_string, rampup_value)
if src_test_err_stu < best_src_test_err:
best_src_test_err = src_test_err_stu
best_teacher_model_state = {k: v.cpu().numpy() for k, v in teacher_net.state_dict().items()}
improve = '*** '
else:
improve = ''
else:
conf_mask_rate = train_res[-2]
unsup_mask_rate = train_res[-1]
if conf_mask_rate > best_conf_mask_rate:
best_conf_mask_rate = conf_mask_rate
improve = '*** '
best_teacher_model_state = {k: v.cpu().numpy() for k, v in teacher_net.state_dict().items()}
else:
improve = ''
unsup_loss_string = '{}, conf mask={:.3%}, unsup mask={:.3%}'.format(
unsup_loss_string, conf_mask_rate, unsup_mask_rate)
t2 = time.time()
log('{}Epoch {} took {:.2f}s: TRAIN clf loss={:.6f}, {}; '
'SRC TEST ERR={:.3%}, TGT TEST student err={:.3%}, TGT TEST teacher err={:.3%}'.format(
improve, epoch, t2 - t1, train_clf_loss, unsup_loss_string, src_test_err_stu, tgt_test_err_stu, tgt_test_err_tea))
# Save network
if model_file != '':
cmdline_helpers.ensure_containing_dir_exists(model_file)
with open(model_file, 'wb') as f:
pickle.dump(best_teacher_model_state, f)
if __name__ == '__main__':
experiment() | [
"[email protected]"
] | |
f246c19999b8be870b9e29f6507c314d5fea8821 | 5ffc3111779894e3ff161c21933f585acac36721 | /2020F_hw6_submissions/shangstacy/StacyShangCh7P2.py | f668df6e9959d44525ae8e081e5c344848bf84ad | [] | no_license | Eric-Wonbin-Sang/CS110Manager | ac9b8efa5179fdc240e60736d685b2e850447b39 | 31b594b91c8ccd740a7915fb982cc7e7bc280346 | refs/heads/main | 2023-02-09T13:44:27.628976 | 2021-01-04T08:25:29 | 2021-01-04T08:25:29 | 321,807,341 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,118 | py | # StacyShangCh7P2.py
# CS 110 A HW 6
# Stacy Shang
# I pledge my honor that I have abided by the Stevens Honor System -Stacy
# This program accepts a date and checks whether or not the date is valid
def main():
print("This program checks the validity of an inputted date.")
date = input("Enter the date in the form of dd/mm/yy: ")
# dd, mm, yy = date.split("/")
# dd = int(dd)
# mm = int(mm)
# yy = int(yy)
inputDate = date.split("/")
dd = inputDate[0]
mm = inputDate[1]
yy = inputDate[2]
if(mm==1 or mm==3 or m==7 or mm==8 or mm==10 or mm==12):
maxim = 31
elif(mm==4 or mm==6 or mm==9 or mm==11):
maxim = 30
else:
maxim = 28
if(mm<1 or mm>12):
print("Date is invalid")
elif(dd<1 or dd>maxim):
print("Date is invalid")
elif(dd==maxim and mm!=12):
dd=1
mm==mm+1
print("The date is:", dd,mm,yy)
elif(dd==31 and mm==12):
dd=1
mm=1
yy=yy+1
print("The date is:", dd,mm,yy)
else:
dd=dd+1
print("The date is:", dd,mm,yy)
main()
| [
"[email protected]"
] | |
32cfdd87226d303193c8392e399fa29e2acb19e7 | 00d7e9321d418a2d9a607fb9376b862119f2bd4e | /sandbox/demo_turtle_undo.py | 931cafb0e9c9c9f5496dcd7d5da4abb55455705a | [
"MIT"
] | permissive | baluneboy/pims | 92b9b1f64ed658867186e44b92526867696e1923 | 5a07e02588b1b7c8ebf7458b10e81b8ecf84ad13 | refs/heads/master | 2021-11-16T01:55:39.223910 | 2021-08-13T15:19:48 | 2021-08-13T15:19:48 | 33,029,780 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 217 | py | #!/usr/bin/env python
from turtle import *
for i in range(10):
forward(100)
left(90)
forward(10)
left(90)
forward(100)
right(90)
forward(10)
right(90)
for i in range(30):
undo()
| [
"none"
] | none |
30a4f20cc66cd435aa5f1c61e2618945db9d0dca | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03775/s238849097.py | e1fbd653ac8d41a71208142afafc985c89f140a4 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 190 | py | import math
N = int(input())
ans = len(str(N))
for i in range(1, int(math.sqrt(N))+1):
if N % i == 0:
j = N // i
k = max(len(str(i)), len(str(j)))
ans = min(ans, k)
print(ans) | [
"[email protected]"
] | |
208cb47479c2c4a5bef8b3ece331d195ca00839b | 70cdf0741a22c678401a306229003bf036ffe5a6 | /ocbind/network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/__init__.py | f819af22ccad6a5cc866476ee6a03e4789bfe20d | [] | no_license | zsblevins/nanog81-hackathon | 5001e034339d6b0c6452ae2474f06916bcd715cf | 1b64fd207dd69837f947094fbd6d6c1cea3a1070 | refs/heads/main | 2023-03-03T09:39:28.460000 | 2021-02-15T13:41:38 | 2021-02-15T13:41:38 | 336,698,856 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 55,942 | py | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import state
from . import subtlvs
from . import undefined_subtlvs
class instance(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-models/lsp/tlvs/tlv/mt-isis-neighbor-attribute/neighbors/neighbor/instances/instance. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Instance of TLV-222 between the originating
and remote IS.
"""
__slots__ = ('_path_helper', '_extmethods', '__id','__state','__subtlvs','__undefined_subtlvs',)
_yang_name = 'instance'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__id = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
self.__subtlvs = YANGDynClass(base=subtlvs.subtlvs, is_container='container', yang_name="subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
self.__undefined_subtlvs = YANGDynClass(base=undefined_subtlvs.undefined_subtlvs, is_container='container', yang_name="undefined-subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['network-instances', 'network-instance', 'protocols', 'protocol', 'isis', 'levels', 'level', 'link-state-models', 'lsp', 'tlvs', 'tlv', 'mt-isis-neighbor-attribute', 'neighbors', 'neighbor', 'instances', 'instance']
def _get_id(self):
"""
Getter method for id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/id (leafref)
YANG Description: Reference to the unique identifier for the
instance of the multi-topology IS neighbor
TLV instance.
"""
return self.__id
def _set_id(self, v, load=False):
"""
Setter method for id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/id (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_id() directly.
YANG Description: Reference to the unique identifier for the
instance of the multi-topology IS neighbor
TLV instance.
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """id must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)""",
})
self.__id = t
if hasattr(self, '_set'):
self._set()
def _unset_id(self):
self.__id = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/state (container)
YANG Description: State parameters of MT neighbor.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of MT neighbor.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
def _get_subtlvs(self):
"""
Getter method for subtlvs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/subtlvs (container)
YANG Description: This container describes IS Neighbor sub-TLVs.
"""
return self.__subtlvs
def _set_subtlvs(self, v, load=False):
"""
Setter method for subtlvs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/subtlvs (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_subtlvs is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_subtlvs() directly.
YANG Description: This container describes IS Neighbor sub-TLVs.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=subtlvs.subtlvs, is_container='container', yang_name="subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """subtlvs must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=subtlvs.subtlvs, is_container='container', yang_name="subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__subtlvs = t
if hasattr(self, '_set'):
self._set()
def _unset_subtlvs(self):
self.__subtlvs = YANGDynClass(base=subtlvs.subtlvs, is_container='container', yang_name="subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
def _get_undefined_subtlvs(self):
"""
Getter method for undefined_subtlvs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/undefined_subtlvs (container)
YANG Description: This container describes undefined ISIS TLVs.
"""
return self.__undefined_subtlvs
def _set_undefined_subtlvs(self, v, load=False):
"""
Setter method for undefined_subtlvs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/undefined_subtlvs (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_undefined_subtlvs is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_undefined_subtlvs() directly.
YANG Description: This container describes undefined ISIS TLVs.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=undefined_subtlvs.undefined_subtlvs, is_container='container', yang_name="undefined-subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """undefined_subtlvs must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=undefined_subtlvs.undefined_subtlvs, is_container='container', yang_name="undefined-subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__undefined_subtlvs = t
if hasattr(self, '_set'):
self._set()
def _unset_undefined_subtlvs(self):
self.__undefined_subtlvs = YANGDynClass(base=undefined_subtlvs.undefined_subtlvs, is_container='container', yang_name="undefined-subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
id = __builtin__.property(_get_id)
state = __builtin__.property(_get_state)
subtlvs = __builtin__.property(_get_subtlvs)
undefined_subtlvs = __builtin__.property(_get_undefined_subtlvs)
_pyangbind_elements = OrderedDict([('id', id), ('state', state), ('subtlvs', subtlvs), ('undefined_subtlvs', undefined_subtlvs), ])
from . import state
from . import subtlvs
from . import undefined_subtlvs
class instance(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-models/lsp/tlvs/tlv/mt-isis-neighbor-attribute/neighbors/neighbor/instances/instance. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Instance of TLV-222 between the originating
and remote IS.
"""
__slots__ = ('_path_helper', '_extmethods', '__id','__state','__subtlvs','__undefined_subtlvs',)
_yang_name = 'instance'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__id = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
self.__subtlvs = YANGDynClass(base=subtlvs.subtlvs, is_container='container', yang_name="subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
self.__undefined_subtlvs = YANGDynClass(base=undefined_subtlvs.undefined_subtlvs, is_container='container', yang_name="undefined-subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['network-instances', 'network-instance', 'protocols', 'protocol', 'isis', 'levels', 'level', 'link-state-models', 'lsp', 'tlvs', 'tlv', 'mt-isis-neighbor-attribute', 'neighbors', 'neighbor', 'instances', 'instance']
def _get_id(self):
"""
Getter method for id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/id (leafref)
YANG Description: Reference to the unique identifier for the
instance of the multi-topology IS neighbor
TLV instance.
"""
return self.__id
def _set_id(self, v, load=False):
"""
Setter method for id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/id (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_id() directly.
YANG Description: Reference to the unique identifier for the
instance of the multi-topology IS neighbor
TLV instance.
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """id must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)""",
})
self.__id = t
if hasattr(self, '_set'):
self._set()
def _unset_id(self):
self.__id = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/state (container)
YANG Description: State parameters of MT neighbor.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of MT neighbor.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
def _get_subtlvs(self):
"""
Getter method for subtlvs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/subtlvs (container)
YANG Description: This container describes IS Neighbor sub-TLVs.
"""
return self.__subtlvs
def _set_subtlvs(self, v, load=False):
"""
Setter method for subtlvs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/subtlvs (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_subtlvs is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_subtlvs() directly.
YANG Description: This container describes IS Neighbor sub-TLVs.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=subtlvs.subtlvs, is_container='container', yang_name="subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """subtlvs must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=subtlvs.subtlvs, is_container='container', yang_name="subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__subtlvs = t
if hasattr(self, '_set'):
self._set()
def _unset_subtlvs(self):
self.__subtlvs = YANGDynClass(base=subtlvs.subtlvs, is_container='container', yang_name="subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
def _get_undefined_subtlvs(self):
"""
Getter method for undefined_subtlvs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/undefined_subtlvs (container)
YANG Description: This container describes undefined ISIS TLVs.
"""
return self.__undefined_subtlvs
def _set_undefined_subtlvs(self, v, load=False):
"""
Setter method for undefined_subtlvs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/undefined_subtlvs (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_undefined_subtlvs is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_undefined_subtlvs() directly.
YANG Description: This container describes undefined ISIS TLVs.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=undefined_subtlvs.undefined_subtlvs, is_container='container', yang_name="undefined-subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """undefined_subtlvs must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=undefined_subtlvs.undefined_subtlvs, is_container='container', yang_name="undefined-subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__undefined_subtlvs = t
if hasattr(self, '_set'):
self._set()
def _unset_undefined_subtlvs(self):
self.__undefined_subtlvs = YANGDynClass(base=undefined_subtlvs.undefined_subtlvs, is_container='container', yang_name="undefined-subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
id = __builtin__.property(_get_id)
state = __builtin__.property(_get_state)
subtlvs = __builtin__.property(_get_subtlvs)
undefined_subtlvs = __builtin__.property(_get_undefined_subtlvs)
_pyangbind_elements = OrderedDict([('id', id), ('state', state), ('subtlvs', subtlvs), ('undefined_subtlvs', undefined_subtlvs), ])
from . import state
from . import subtlvs
from . import undefined_subtlvs
class instance(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-isis-neighbor-attribute/neighbors/neighbor/instances/instance. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Instance of TLV-222 between the originating
and remote IS.
"""
__slots__ = ('_path_helper', '_extmethods', '__id','__state','__subtlvs','__undefined_subtlvs',)
_yang_name = 'instance'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__id = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
self.__subtlvs = YANGDynClass(base=subtlvs.subtlvs, is_container='container', yang_name="subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
self.__undefined_subtlvs = YANGDynClass(base=undefined_subtlvs.undefined_subtlvs, is_container='container', yang_name="undefined-subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['network-instances', 'network-instance', 'protocols', 'protocol', 'isis', 'levels', 'level', 'link-state-database', 'lsp', 'tlvs', 'tlv', 'mt-isis-neighbor-attribute', 'neighbors', 'neighbor', 'instances', 'instance']
def _get_id(self):
"""
Getter method for id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/id (leafref)
YANG Description: Reference to the unique identifier for the
instance of the multi-topology IS neighbor
TLV instance.
"""
return self.__id
def _set_id(self, v, load=False):
"""
Setter method for id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/id (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_id() directly.
YANG Description: Reference to the unique identifier for the
instance of the multi-topology IS neighbor
TLV instance.
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """id must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)""",
})
self.__id = t
if hasattr(self, '_set'):
self._set()
def _unset_id(self):
self.__id = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/state (container)
YANG Description: State parameters of MT neighbor.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of MT neighbor.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
def _get_subtlvs(self):
"""
Getter method for subtlvs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/subtlvs (container)
YANG Description: This container describes IS Neighbor sub-TLVs.
"""
return self.__subtlvs
def _set_subtlvs(self, v, load=False):
"""
Setter method for subtlvs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/subtlvs (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_subtlvs is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_subtlvs() directly.
YANG Description: This container describes IS Neighbor sub-TLVs.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=subtlvs.subtlvs, is_container='container', yang_name="subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """subtlvs must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=subtlvs.subtlvs, is_container='container', yang_name="subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__subtlvs = t
if hasattr(self, '_set'):
self._set()
def _unset_subtlvs(self):
self.__subtlvs = YANGDynClass(base=subtlvs.subtlvs, is_container='container', yang_name="subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
def _get_undefined_subtlvs(self):
"""
Getter method for undefined_subtlvs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/undefined_subtlvs (container)
YANG Description: This container describes undefined ISIS TLVs.
"""
return self.__undefined_subtlvs
def _set_undefined_subtlvs(self, v, load=False):
"""
Setter method for undefined_subtlvs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/undefined_subtlvs (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_undefined_subtlvs is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_undefined_subtlvs() directly.
YANG Description: This container describes undefined ISIS TLVs.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=undefined_subtlvs.undefined_subtlvs, is_container='container', yang_name="undefined-subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """undefined_subtlvs must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=undefined_subtlvs.undefined_subtlvs, is_container='container', yang_name="undefined-subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__undefined_subtlvs = t
if hasattr(self, '_set'):
self._set()
def _unset_undefined_subtlvs(self):
self.__undefined_subtlvs = YANGDynClass(base=undefined_subtlvs.undefined_subtlvs, is_container='container', yang_name="undefined-subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
id = __builtin__.property(_get_id)
state = __builtin__.property(_get_state)
subtlvs = __builtin__.property(_get_subtlvs)
undefined_subtlvs = __builtin__.property(_get_undefined_subtlvs)
_pyangbind_elements = OrderedDict([('id', id), ('state', state), ('subtlvs', subtlvs), ('undefined_subtlvs', undefined_subtlvs), ])
from . import state
from . import subtlvs
from . import undefined_subtlvs
class instance(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-isis-neighbor-attribute/neighbors/neighbor/instances/instance. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Instance of TLV-222 between the originating
and remote IS.
"""
__slots__ = ('_path_helper', '_extmethods', '__id','__state','__subtlvs','__undefined_subtlvs',)
_yang_name = 'instance'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__id = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
self.__subtlvs = YANGDynClass(base=subtlvs.subtlvs, is_container='container', yang_name="subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
self.__undefined_subtlvs = YANGDynClass(base=undefined_subtlvs.undefined_subtlvs, is_container='container', yang_name="undefined-subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['network-instances', 'network-instance', 'protocols', 'protocol', 'isis', 'levels', 'level', 'link-state-database', 'lsp', 'tlvs', 'tlv', 'mt-isis-neighbor-attribute', 'neighbors', 'neighbor', 'instances', 'instance']
def _get_id(self):
"""
Getter method for id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/id (leafref)
YANG Description: Reference to the unique identifier for the
instance of the multi-topology IS neighbor
TLV instance.
"""
return self.__id
def _set_id(self, v, load=False):
"""
Setter method for id, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/id (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_id() directly.
YANG Description: Reference to the unique identifier for the
instance of the multi-topology IS neighbor
TLV instance.
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """id must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)""",
})
self.__id = t
if hasattr(self, '_set'):
self._set()
def _unset_id(self):
self.__id = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/state (container)
YANG Description: State parameters of MT neighbor.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of MT neighbor.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
def _get_subtlvs(self):
"""
Getter method for subtlvs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/subtlvs (container)
YANG Description: This container describes IS Neighbor sub-TLVs.
"""
return self.__subtlvs
def _set_subtlvs(self, v, load=False):
"""
Setter method for subtlvs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/subtlvs (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_subtlvs is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_subtlvs() directly.
YANG Description: This container describes IS Neighbor sub-TLVs.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=subtlvs.subtlvs, is_container='container', yang_name="subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """subtlvs must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=subtlvs.subtlvs, is_container='container', yang_name="subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__subtlvs = t
if hasattr(self, '_set'):
self._set()
def _unset_subtlvs(self):
self.__subtlvs = YANGDynClass(base=subtlvs.subtlvs, is_container='container', yang_name="subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
def _get_undefined_subtlvs(self):
"""
Getter method for undefined_subtlvs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/undefined_subtlvs (container)
YANG Description: This container describes undefined ISIS TLVs.
"""
return self.__undefined_subtlvs
def _set_undefined_subtlvs(self, v, load=False):
"""
Setter method for undefined_subtlvs, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isis_neighbor_attribute/neighbors/neighbor/instances/instance/undefined_subtlvs (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_undefined_subtlvs is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_undefined_subtlvs() directly.
YANG Description: This container describes undefined ISIS TLVs.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=undefined_subtlvs.undefined_subtlvs, is_container='container', yang_name="undefined-subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """undefined_subtlvs must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=undefined_subtlvs.undefined_subtlvs, is_container='container', yang_name="undefined-subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__undefined_subtlvs = t
if hasattr(self, '_set'):
self._set()
def _unset_undefined_subtlvs(self):
self.__undefined_subtlvs = YANGDynClass(base=undefined_subtlvs.undefined_subtlvs, is_container='container', yang_name="undefined-subtlvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
id = __builtin__.property(_get_id)
state = __builtin__.property(_get_state)
subtlvs = __builtin__.property(_get_subtlvs)
undefined_subtlvs = __builtin__.property(_get_undefined_subtlvs)
_pyangbind_elements = OrderedDict([('id', id), ('state', state), ('subtlvs', subtlvs), ('undefined_subtlvs', undefined_subtlvs), ])
| [
"[email protected]"
] | |
f56fead72b282e8b8135708cb3b9e5e8e1b808cf | 557d51cd6c7459f3492f6e5e6a44e10b9026b805 | /osipkd/views/akuntansi/ak_jurnal_item.py | c48df4ce5028c9d20d58ec888dc49af8df0ab6d5 | [] | no_license | aagusti/zosipkd | e6a3bd8ca71b240dc3141c89dba252291c2c9ea7 | fbb4fcc74d772a9f1692a4e8b45cb63ce77e85f2 | refs/heads/master | 2021-01-17T10:23:14.182025 | 2016-05-04T07:34:05 | 2016-05-04T07:34:05 | 28,517,748 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,936 | py | import os
import uuid
from osipkd.tools import row2dict, xls_reader
from datetime import datetime
from sqlalchemy import not_, func
from sqlalchemy.orm import aliased
from pyramid.view import (view_config,)
from pyramid.httpexceptions import ( HTTPFound, )
import colander
from deform import (Form, widget, ValidationFailure, )
from osipkd.models import DBSession
from osipkd.models.apbd import Jurnal, JurnalItem
from osipkd.models.pemda_model import Rekening, Sap, RekeningSap
from osipkd.models.apbd_anggaran import KegiatanSub, Kegiatan, KegiatanItem
from datatables import ColumnDT, DataTables
from osipkd.views.base_view import BaseViews
SESS_ADD_FAILED = 'Tambah ak-jurnal-item gagal'
SESS_EDIT_FAILED = 'Edit ak-jurnal-item gagal'
class view_ak_jurnal_item(BaseViews):
########
# List #
########
@view_config(route_name='ak-jurnal-item', renderer='templates/ak-jurnal-item/list.pt',
permission='read')
def view_list(self):
ses = self.request.session
req = self.request
params = req.params
url_dict = req.matchdict
return dict(project='EIS')
##########
# Action #
##########
@view_config(route_name='ak-jurnal-item-act', renderer='json',
permission='read')
def ak_jurnal_item_act(self):
ses = self.request.session
req = self.request
params = req.params
url_dict = req.matchdict
pk_id = 'id' in params and params['id'] and int(params['id']) or 0
if url_dict['act']=='grid':
jurnal_id = url_dict['jurnal_id'].isdigit() and url_dict['jurnal_id'] or 0
columns = []
columns.append(ColumnDT('id'))
columns.append(ColumnDT('sapkd'))
columns.append(ColumnDT('sapnm'))
columns.append(ColumnDT('amount', filter=self._number_format))
columns.append(ColumnDT('notes'))
columns.append(ColumnDT('rekkd'))
columns.append(ColumnDT('reknm'))
columns.append(ColumnDT('kegiatan_sub_id'))
columns.append(ColumnDT('rekening_id'))
columns.append(ColumnDT('jurnal_id'))
columns.append(ColumnDT('subkd'))
columns.append(ColumnDT('subnm'))
rek = aliased(Rekening)
sap = aliased(Sap)
sub = aliased(KegiatanSub)
query = DBSession.query(JurnalItem.id,
sap.kode.label('sapkd'),
sap.nama.label('sapnm'),
JurnalItem.amount,
JurnalItem.notes,
rek.kode.label('rekkd'),
rek.nama.label('reknm'),
JurnalItem.kegiatan_sub_id,
JurnalItem.rekening_id,
JurnalItem.jurnal_id,
sub.kode.label('subkd'),
sub.nama.label('subnm'),
).join(Jurnal,
).outerjoin(rek, JurnalItem.rekening_id == rek.id
).outerjoin(sap, JurnalItem.sap_id == sap.id
).outerjoin(sub, JurnalItem.kegiatan_sub_id == sub.id
).filter(JurnalItem.jurnal_id==jurnal_id,
JurnalItem.jurnal_id==Jurnal.id,
).group_by(JurnalItem.id,
sap.kode,
sap.nama,
JurnalItem.amount,
JurnalItem.notes,
rek.kode,
rek.nama,
JurnalItem.kegiatan_sub_id,
JurnalItem.rekening_id,
JurnalItem.jurnal_id,
sub.kode,
sub.nama,
)
rowTable = DataTables(req, JurnalItem, query, columns)
return rowTable.output_result()
################
# Tambah Cepat#
################
@view_config(route_name='ak-jurnal-item-add', renderer='json',
permission='add')
def view_add(self):
ses = self.request.session
req = self.request
params = req.params
url_dict = req.matchdict
jurnal_id = 'jurnal_id' in url_dict and url_dict['jurnal_id'] or 0
controls = dict(req.POST.items())
jurnal_item_id = 'jurnal_item_id' in controls and controls['jurnal_item_id'] or 0
if jurnal_item_id:
row = DBSession.query(JurnalItem)\
.join(Jurnal)\
.filter(JurnalItem.id==jurnal_item_id,
Jurnal.unit_id==ses['unit_id'],
JurnalItem.jurnal_id==jurnal_id).first()
if not row:
return {"success": False, 'msg':'Jurnal tidak ditemukan'}
else:
row = JurnalItem()
row.jurnal_id = jurnal_id
row.kegiatan_sub_id = controls['kegiatan_sub_id'] or 0
row.rekening_id = controls['rekening_id'] or 0
row.sap_id = controls['sap_id'] or 0
row.amount = controls['amount'].replace('.','')
row.notes = controls['notes']
DBSession.add(row)
DBSession.flush()
return {"success": True, 'id': row.id, "msg":'Success Tambah Data'}
try:
pass
except:
return {'success':False, 'msg':'Gagal Tambah Data'}
def query_id(self):
return DBSession.query(JurnalItem).filter(JurnalItem.id==self.request.matchdict['id'],
JurnalItem.jurnal_id==self.request.matchdict['jurnal_id'])
def id_not_found(self):
msg = 'Jurnal Item ID %s Tidak Ditemukan.' % self.request.matchdict['id']
return {'success': False, 'msg':msg}
########
# Edit #
########
@view_config(route_name='ak-jurnal-item-edit', renderer='json',
permission='edit')
def view_edit(self):
request = self.request
row = self.query_id().first()
if not row:
return id_not_found(request)
form = self.get_form(EditSchema)
if request.POST:
if 'simpan' in request.POST:
controls = request.POST.items()
try:
c = form.validate(controls)
except ValidationFailure, e:
return dict(form=form)
save_request(dict(controls), row)
return route_list()
elif SESS_EDIT_FAILED in request.session:
return self.session_failed(SESS_EDIT_FAILED)
values = row.to_dict()
r=DBSession.query(Rekening).filter(Rekening.id==row.rekening_id).first()
if r:
values['rekening_kd'] = r.kode
values['rekening_nm'] = r.nama
else:
values['rekening_id'] = 0
values['rekening_kd'] = ""
values['rekening_nm'] = ""
a=DBSession.query(KegiatanSub).filter(KegiatanSub.id==row.kegiatan_sub_id).first()
if a:
values['kegiatan_sub_kd'] = a.kode
values['kegiatan_sub_nm'] = a.nama
else:
values['kegiatan_sub_id'] = 0
values['kegiatan_sub_kd'] = ""
values['kegiatan_sub_nm'] = ""
aa=DBSession.query(Sap).filter(Sap.id==row.sap_id).first()
if aa:
values['sap_kd'] = aa.kode
values['sap_nm'] = aa.nama
else:
values['sap_id'] = 0
values['sap_kd'] = ""
values['sap_nm'] = ""
form.set_appstruct(values)
return dict(form=form)
##########
# Delete #
##########
@view_config(route_name='ak-jurnal-item-delete', renderer='json',
permission='delete')
def view_delete(self):
request = self.request
ses = self.session
q = self.query_id().join(Jurnal).filter(Jurnal.unit_id==ses['unit_id'])
row = q.first()
if not row:
return self.id_not_found()
q = self.query_id()
q.delete()
DBSession.flush()
return {'success': True, 'msg':'Sukses Hapus Data'}
| [
"[email protected]"
] | |
2257eb1cd70d0cfccc996cfe65d9ed17c6593ca4 | 8a55bdec478d2fb48508deac13ca3aeeda46fa06 | /contrib/devtools/symbol-check.py | 4bcbde4f62eeb0412e726b862b311d0049b94f42 | [
"MIT",
"LicenseRef-scancode-public-domain"
] | permissive | hideoussquid/aureus-core-gui | 83b0525e1afa349e0834e1a3baed5534043cd689 | ce075f2f0f9c99a344a1b0629cfd891526daac7b | refs/heads/master | 2021-01-19T00:04:39.888184 | 2017-04-04T08:15:18 | 2017-04-04T08:15:18 | 87,142,504 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,191 | py | #!/usr/bin/env python
# Copyright (c) 2014 Wladimir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
A script to check that the (Linux) executables produced by gitian only contain
allowed gcc, glibc and libstdc++ version symbols. This makes sure they are
still compatible with the minimum supported Linux distribution versions.
Example usage:
find ../gitian-builder/build -type f -executable | xargs python contrib/devtools/symbol-check.py
'''
from __future__ import division, print_function, unicode_literals
import subprocess
import re
import sys
import os
# Debian 6.0.9 (Squeeze) has:
#
# - g++ version 4.4.5 (https://packages.debian.org/search?suite=default§ion=all&arch=any&searchon=names&keywords=g%2B%2B)
# - libc version 2.11.3 (https://packages.debian.org/search?suite=default§ion=all&arch=any&searchon=names&keywords=libc6)
# - libstdc++ version 4.4.5 (https://packages.debian.org/search?suite=default§ion=all&arch=any&searchon=names&keywords=libstdc%2B%2B6)
#
# Ubuntu 10.04.4 (Lucid Lynx) has:
#
# - g++ version 4.4.3 (http://packages.ubuntu.com/search?keywords=g%2B%2B&searchon=names&suite=lucid§ion=all)
# - libc version 2.11.1 (http://packages.ubuntu.com/search?keywords=libc6&searchon=names&suite=lucid§ion=all)
# - libstdc++ version 4.4.3 (http://packages.ubuntu.com/search?suite=lucid§ion=all&arch=any&keywords=libstdc%2B%2B&searchon=names)
#
# Taking the minimum of these as our target.
#
# According to GNU ABI document (http://gcc.gnu.org/onlinedocs/libstdc++/manual/abi.html) this corresponds to:
# GCC 4.4.0: GCC_4.4.0
# GCC 4.4.2: GLIBCXX_3.4.13, CXXABI_1.3.3
# (glibc) GLIBC_2_11
#
MAX_VERSIONS = {
'GCC': (4,4,0),
'CXXABI': (1,3,3),
'GLIBCXX': (3,4,13),
'GLIBC': (2,11)
}
# See here for a description of _IO_stdin_used:
# https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=634261#109
# Ignore symbols that are exported as part of every executable
IGNORE_EXPORTS = {
b'_edata', b'_end', b'_init', b'__bss_start', b'_fini', b'_IO_stdin_used'
}
READELF_CMD = os.getenv('READELF', '/usr/bin/readelf')
CPPFILT_CMD = os.getenv('CPPFILT', '/usr/bin/c++filt')
# Allowed NEEDED libraries
ALLOWED_LIBRARIES = {
# aureusd and aureus-qt
b'libgcc_s.so.1', # GCC base support
b'libc.so.6', # C library
b'libpthread.so.0', # threading
b'libanl.so.1', # DNS resolve
b'libm.so.6', # math library
b'librt.so.1', # real-time (clock)
b'ld-linux-x86-64.so.2', # 64-bit dynamic linker
b'ld-linux.so.2', # 32-bit dynamic linker
# aureus-qt only
b'libX11-xcb.so.1', # part of X11
b'libX11.so.6', # part of X11
b'libxcb.so.1', # part of X11
b'libfontconfig.so.1', # font support
b'libfreetype.so.6', # font parsing
b'libdl.so.2' # programming interface to dynamic linker
}
class CPPFilt(object):
'''
Demangle C++ symbol names.
Use a pipe to the 'c++filt' command.
'''
def __init__(self):
self.proc = subprocess.Popen(CPPFILT_CMD, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
def __call__(self, mangled):
self.proc.stdin.write(mangled + b'\n')
self.proc.stdin.flush()
return self.proc.stdout.readline().rstrip()
def close(self):
self.proc.stdin.close()
self.proc.stdout.close()
self.proc.wait()
def read_symbols(executable, imports=True):
'''
Parse an ELF executable and return a list of (symbol,version) tuples
for dynamic, imported symbols.
'''
p = subprocess.Popen([READELF_CMD, '--dyn-syms', '-W', executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Could not read symbols for %s: %s' % (executable, stderr.strip()))
syms = []
for line in stdout.split(b'\n'):
line = line.split()
if len(line)>7 and re.match(b'[0-9]+:$', line[0]):
(sym, _, version) = line[7].partition(b'@')
is_import = line[6] == b'UND'
if version.startswith(b'@'):
version = version[1:]
if is_import == imports:
syms.append((sym, version))
return syms
def check_version(max_versions, version):
if b'_' in version:
(lib, _, ver) = version.rpartition(b'_')
else:
lib = version
ver = '0'
ver = tuple([int(x) for x in ver.split(b'.')])
if not lib in max_versions:
return False
return ver <= max_versions[lib]
def read_libraries(filename):
p = subprocess.Popen([READELF_CMD, '-d', '-W', filename], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode:
raise IOError('Error opening file')
libraries = []
for line in stdout.split(b'\n'):
tokens = line.split()
if len(tokens)>2 and tokens[1] == b'(NEEDED)':
match = re.match(b'^Shared library: \[(.*)\]$', b' '.join(tokens[2:]))
if match:
libraries.append(match.group(1))
else:
raise ValueError('Unparseable (NEEDED) specification')
return libraries
if __name__ == '__main__':
cppfilt = CPPFilt()
retval = 0
for filename in sys.argv[1:]:
# Check imported symbols
for sym,version in read_symbols(filename, True):
if version and not check_version(MAX_VERSIONS, version):
print('%s: symbol %s from unsupported version %s' % (filename, cppfilt(sym).decode('utf-8'), version.decode('utf-8')))
retval = 1
# Check exported symbols
for sym,version in read_symbols(filename, False):
if sym in IGNORE_EXPORTS:
continue
print('%s: export of symbol %s not allowed' % (filename, cppfilt(sym).decode('utf-8')))
retval = 1
# Check dependency libraries
for library_name in read_libraries(filename):
if library_name not in ALLOWED_LIBRARIES:
print('%s: NEEDED library %s is not allowed' % (filename, library_name.decode('utf-8')))
retval = 1
exit(retval)
| [
"[email protected]"
] | |
5012edbc0bc687552fcf3a1bbaf558b0a4f335ae | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=3.5_rd=0.5_rw=0.04_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=47/params.py | cfa7029e7fd66ff63cb0e7d697a6e4c827251ce2 | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | py | {'cpus': 4,
'duration': 30,
'final_util': '3.532381',
'max_util': '3.5',
'periods': 'harmonic-2',
'release_master': False,
'res_distr': '0.5',
'res_nmb': '4',
'res_weight': '0.04',
'scheduler': 'GSN-EDF',
'trial': 47,
'utils': 'uni-medium-3'}
| [
"[email protected]"
] | |
0563ccfcace9c68878dcee8c2ae4a64f1e154969 | 493ee321c961421eaf1edc38a2de93556b14cc78 | /py_scripts/sample_nets_discrete_weights.py | a8adc8e54817405bc3cf38f8eb056085061552af | [] | no_license | guillefix/simpbias-tools | 4127b7cb939b301074f30ed480b249e418d2eb2d | 76ab7d1fc45b08ff33f61a39ac49d7df72b5f609 | refs/heads/master | 2020-05-18T12:53:51.388198 | 2019-05-01T14:46:14 | 2019-05-01T14:46:14 | 184,422,339 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,634 | py | import tensorflow as tf
import numpy as np
# from sklearn.metrics import confusion_matrix
# import time
# from datetime import timedelta
import math
from collections import Counter
np.set_printoptions(threshold=np.nan)
from KC_LZ import calc_KC
import pickle
import sys
#(idx, N) = [int(ar) for ar in sys.argv[1:]]
idx = int(sys.argv[1])
input_dim=7
hidden_layer_dim=20
hidden_layer2_dim=20
hidden_layer_dim2 = hidden_layer2_dim
output_dim=1
## VARIABLE declarations
#for many paralell networks
from math import sqrt
W1=[]
b1=[]
W2=[]
b2=[]
W3=[]
b3=[]
variables = []
paral_nets = 5000
a=sqrt(3)*sqrt(2)
b=sqrt(3)
discrete_step=1
for i in range(paral_nets):
scope_name = "net"+str(i)
with tf.variable_scope(scope_name):
W1.append(tf.Variable(np.random.choice(np.arange(-10,10,discrete_step,dtype=np.float16),(input_dim,hidden_layer_dim))))
b1.append(tf.Variable(np.random.choice(np.arange(-10,10,discrete_step,dtype=np.float16),(hidden_layer_dim))))
W2.append(tf.Variable(np.random.choice(np.arange(-10,10,discrete_step,dtype=np.float16),(hidden_layer_dim,hidden_layer_dim2))))
b2.append(tf.Variable(np.random.choice(np.arange(-10,10,discrete_step,dtype=np.float16),(hidden_layer_dim2))))
# W2.append(tf.Variable(tf.random_uniform([hidden_layer_dim,output_dim],-a/sqrt(hidden_layer_dim),a/sqrt(hidden_layer_dim))))
# b2.append(tf.Variable(tf.random_uniform([output_dim],-a/sqrt(hidden_layer_dim),a/sqrt(hidden_layer_dim))))
W3.append(tf.Variable(np.random.choice(np.arange(-10,10,discrete_step,dtype=np.float16),(hidden_layer_dim2,output_dim))))
b3.append(tf.Variable(np.random.choice(np.arange(-10,10,discrete_step,dtype=np.float16),(output_dim))))
# W1.append(tf.Variable(tf.random_normal([input_dim,hidden_layer_dim],stddev=1/sqrt(input_dim))))
# b1.append(tf.Variable(tf.random_normal([hidden_layer_dim],stddev=1/sqrt(input_dim))))
# W2.append(tf.Variable(tf.random_normal([hidden_layer_dim,hidden_layer2_dim],stddev=1/sqrt(hidden_layer_dim))))
# b2.append(tf.Variable(tf.random_normal([hidden_layer2_dim],stddev=1/sqrt(hidden_layer_dim))))
# W3.append(tf.Variable(tf.random_normal([hidden_layer2_dim,output_dim],stddev=1/sqrt(hidden_layer2_dim))))
# b3.append(tf.Variable(tf.random_normal([output_dim],stddev=1/sqrt(hidden_layer2_dim))))
variables.append(tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope=scope_name))
x = tf.placeholder(tf.float16, shape=[None, input_dim], name='x')
## NETWORK construction
outputs = []
for i in range(paral_nets):
h = tf.matmul(x, W1[i]) + b1[i]
# h = tf.matmul(x, W1[i])
# h = tf.sign(h)
h = tf.nn.relu(h)
h2 = tf.matmul(h, W2[i]) + b2[i]
# h2 = tf.sign(h2)
h2 = tf.nn.relu(h2)
logits = tf.matmul(h2, W3[i]) + b3[i]
# logits = tf.matmul(h, W2[i]) + b2[i]
# logits = tf.matmul(h, W2[i])
o = tf.sign(logits)
# outputs.append((o+1)/2)
outputs.append(tf.reduce_join(tf.reduce_join(tf.as_string(tf.cast((o+1)//2,tf.int8)), 0),0))
session = tf.Session()
train_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)
param_shape = []
val_placeholders = []
ops=[]
param_size=[]
for i,var in enumerate(train_vars):
param_shape.append(tuple(var.get_shape().as_list()))
param_size.append(np.prod(var.get_shape().as_list()))
val_placeholders.append(tf.placeholder(tf.float16, shape = param_shape[i], name="val_"+str(i)))
ops.append(var.assign_add(val_placeholders[i]))
def get_param_vec():
params = [p.flatten() for p in session.run(train_vars)]
return np.concatenate(params)
def update_params(params_change):
j = 0
change_feed_dict = {}
for i,var in enumerate(train_vars):
#print(i)
val_change = params_change[j:j+param_size[i]]
j += param_size[i]
val_change = val_change.reshape(param_shape[i])
change_feed_dict["val_"+str(i)+":0"]=val_change
session.run(ops,feed_dict=change_feed_dict)
inputs = [[float(xx) for xx in "{0:07b}".format(i)] for i in range(2**7)]
# N=10
#cnt = Counter()
#weights = {}
#for i in range(N):
# if i%(N/100) == 0:
session.run(tf.global_variables_initializer())
fs = session.run(outputs, feed_dict={x:inputs})
#phenos = [[] for i in range(paral_nets)]
phenos = [fs[i] for i in range(paral_nets)]
#varss = session.run(variables,feed_dict={x:inputs})
#for i,f in enumerate(fs):
# if f in weights:
# weights[f].append(varss[i])
# else:
# weights[f]=[varss[i]]
robs=[0.0 for x in phenos]
cnt = Counter(fs)
#phenos = [x+[fs[i]] for i,x in enumerate(phenos)]
param_num=input_dim*hidden_layer_dim + hidden_layer_dim + hidden_layer_dim*hidden_layer2_dim + hidden_layer2_dim + hidden_layer2_dim*output_dim + output_dim
change_vec_ind = np.zeros(param_num)
for i in range(param_num):
print(str(i+1)+"/"+str(param_num))
change_vec_ind[i] = discrete_step
change_vec = np.concatenate([change_vec_ind for j in range(paral_nets)])
update_params(change_vec)
#session.run(tf.global_variables_initializer())
fs = session.run(outputs, feed_dict={x:inputs})
#phenos = [x+[fs[j]] for j,x in enumerate(phenos)]
robs = [xx+(1.0 if fs[j]==phenos[j] else 0.0) for j,xx in enumerate(robs)]
change_vec_ind[i] = -discrete_step
change_vec = np.concatenate([change_vec_ind for j in range(paral_nets)])
update_params(change_vec)
change_vec_ind[i] = 0
#robs=[]
freqs=[]
for i,p in enumerate(phenos):
robs[i] = robs[i]/param_num
freqs.append(cnt[p])
pickle.dump(cnt, open( str(idx)+"_cnt_"+str(paral_nets)+"_"+str(input_dim)+"_"+str(hidden_layer_dim)+"_"+str(hidden_layer2_dim)+"_"+str(output_dim)+"_"+str(discrete_step)+"_relu.p", "wb" ), -1)
pickle.dump(phenos, open( str(idx)+"_phenos_"+str(paral_nets)+"_"+str(input_dim)+"_"+str(hidden_layer_dim)+"_"+str(hidden_layer2_dim)+"_"+str(output_dim)+"_"+str(discrete_step)+"_relu.p", "wb" ), -1)
pickle.dump(robs, open( str(idx)+"_robs_"+str(paral_nets)+"_"+str(input_dim)+"_"+str(hidden_layer_dim)+"_"+str(hidden_layer2_dim)+"_"+str(output_dim)+"_"+str(discrete_step)+"_relu.p", "wb" ), -1)
pickle.dump(freqs, open( str(idx)+"_freqs"+str(paral_nets)+"_"+str(input_dim)+"_"+str(hidden_layer_dim)+"_"+str(hidden_layer2_dim)+"_"+str(output_dim)+"_"+str(discrete_step)+"_relu.p", "wb" ), -1)
#pickle.dump(weights, open( str(idx)+"_weights_"+str(N*paral_nets)+"_"+str(input_dim)+"_"+str(hidden_layer_dim)+"_"+str(hidden_layer2_dim)+"_"+"sallinputs_relu.p", "wb" ), -1)
#with open(str(idx)+"_comp_freq_7_20_20_1_relu", "w") as f:
# for fun,val in cnt.most_common():
# f.write(str(calc_KC(str(fun)))+"\t"+str(val)+"\n")
| [
"[email protected]"
] | |
70715af74016fb193e1dc17cc87fb1632d93526a | cedf3c65061222b3099852f27dde39ff0dfe492b | /blog/migrations/0006_auto_20200101_2255.py | 41d4c082f868854719fb951f9bc736479520f459 | [] | no_license | smrkhan123/myblog | 1fd87b2c528cb2edccaf3f60c66a8c298774447a | 8d0c33fcc74c582f21a32150d3460cce62ad6dd2 | refs/heads/master | 2020-12-08T21:50:32.352065 | 2020-01-10T18:23:56 | 2020-01-10T18:23:56 | 233,105,398 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 353 | py | # Generated by Django 2.2.4 on 2020-01-01 17:25
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('blog', '0005_comment'),
]
operations = [
migrations.RenameField(
model_name='comment',
old_name='comment',
new_name='comments',
),
]
| [
"[email protected]"
] | |
146c641d33adeb6defa68ddc3f71c766beb13d5a | fa571a842f04bcbc77ff203a5ed6f6ee776eed6d | /codes/tuple10.py | 8744bc79540b217dd7df2fc0a2637049f35feee4 | [] | no_license | krishna-rawat-hp/PythonProgramming | b25c0916475724e6d2de4b7d59cf40b5b5e8330b | d24df17ca6aff9271c44ef8c73b80c00cd065ded | refs/heads/master | 2023-03-11T19:24:34.529059 | 2021-02-27T14:09:22 | 2021-02-27T14:09:22 | 282,611,873 | 0 | 0 | null | 2020-07-26T10:38:54 | 2020-07-26T08:52:49 | null | UTF-8 | Python | false | false | 441 | py | # Python tuple Built-in function
tup1 = (1,2,3,4,5)
# Example-1 length of tuple
print("Length of tuple: ",len(tup1))
# Example-2 min value in tuple
print("Minimum value in tuple: ",min(tup1))
# Example-3 Max value in tuple
print("Maximum value in tuple: ",max(tup1))
# Example-4 tuple() method in python tuples
str = "Krishna"
tupstr = tuple(str)
print("simple string: ",type(str), str)
print("tuple of string: ", type(tupstr), tupstr)
| [
"[email protected]"
] | |
84694bc46f6a3fdcf329eda57ccb7f50aaa76f3c | 8f9ea3f14bdf2187de759939b2bbc87fe68ccfc0 | /tensorflow/python/estimator/model_fn.py | 3edf9fe940b19c7a0b1a7c21a9674189faba5acb | [
"Apache-2.0"
] | permissive | davidstanke/bazel-mvn-demo | 4ea43f0ba293a28b916a27eab5f0812e9b753c2c | cff14dddce15ea7152988da576673bd15bab6c6e | refs/heads/master | 2022-10-20T07:52:29.651851 | 2018-11-22T13:17:51 | 2018-11-22T13:17:51 | 157,782,756 | 2 | 0 | Apache-2.0 | 2022-10-04T23:47:05 | 2018-11-15T22:54:09 | C++ | UTF-8 | Python | false | false | 15,435 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Classes and methods related to model_fn."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import six
from tensorflow.python.estimator.export.export_output import ExportOutput
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.saved_model import tag_constants
from tensorflow.python.training import monitored_session
from tensorflow.python.training import session_run_hook
from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import tf_export
@tf_export('estimator.ModeKeys')
class ModeKeys(object):
"""Standard names for model modes.
The following standard keys are defined:
* `TRAIN`: training mode.
* `EVAL`: evaluation mode.
* `PREDICT`: inference mode.
"""
TRAIN = 'train'
EVAL = 'eval'
PREDICT = 'infer'
LOSS_METRIC_KEY = 'loss'
AVERAGE_LOSS_METRIC_KEY = 'average_loss'
# Mapping of the modes to appropriate tag_constants that are used for saving.
EXPORT_TAG_MAP = {
ModeKeys.PREDICT: [tag_constants.SERVING],
ModeKeys.TRAIN: [tag_constants.TRAINING],
ModeKeys.EVAL: [tag_constants.EVAL],
}
@tf_export('estimator.EstimatorSpec')
class EstimatorSpec(
collections.namedtuple('EstimatorSpec', [
'mode', 'predictions', 'loss', 'train_op', 'eval_metric_ops',
'export_outputs', 'training_chief_hooks', 'training_hooks', 'scaffold',
'evaluation_hooks', 'prediction_hooks'
])):
"""Ops and objects returned from a `model_fn` and passed to an `Estimator`.
`EstimatorSpec` fully defines the model to be run by an `Estimator`.
"""
def __new__(cls,
mode,
predictions=None,
loss=None,
train_op=None,
eval_metric_ops=None,
export_outputs=None,
training_chief_hooks=None,
training_hooks=None,
scaffold=None,
evaluation_hooks=None,
prediction_hooks=None):
"""Creates a validated `EstimatorSpec` instance.
Depending on the value of `mode`, different arguments are required. Namely
* For `mode == ModeKeys.TRAIN`: required fields are `loss` and `train_op`.
* For `mode == ModeKeys.EVAL`: required field is `loss`.
* For `mode == ModeKeys.PREDICT`: required fields are `predictions`.
model_fn can populate all arguments independent of mode. In this case, some
arguments will be ignored by an `Estimator`. E.g. `train_op` will be
ignored in eval and infer modes. Example:
```python
def my_model_fn(mode, features, labels):
predictions = ...
loss = ...
train_op = ...
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op)
```
Alternatively, model_fn can just populate the arguments appropriate to the
given mode. Example:
```python
def my_model_fn(mode, features, labels):
if (mode == tf.estimator.ModeKeys.TRAIN or
mode == tf.estimator.ModeKeys.EVAL):
loss = ...
else:
loss = None
if mode == tf.estimator.ModeKeys.TRAIN:
train_op = ...
else:
train_op = None
if mode == tf.estimator.ModeKeys.PREDICT:
predictions = ...
else:
predictions = None
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op)
```
Args:
mode: A `ModeKeys`. Specifies if this is training, evaluation or
prediction.
predictions: Predictions `Tensor` or dict of `Tensor`.
loss: Training loss `Tensor`. Must be either scalar, or with shape `[1]`.
train_op: Op for the training step.
eval_metric_ops: Dict of metric results keyed by name. The values of the
dict are the results of calling a metric function, namely a
`(metric_tensor, update_op)` tuple. `metric_tensor` should be evaluated
without any impact on state (typically is a pure computation results
based on variables.). For example, it should not trigger the `update_op`
or requires any input fetching.
export_outputs: Describes the output signatures to be exported to
`SavedModel` and used during serving.
A dict `{name: output}` where:
* name: An arbitrary name for this output.
* output: an `ExportOutput` object such as `ClassificationOutput`,
`RegressionOutput`, or `PredictOutput`.
Single-headed models only need to specify one entry in this dictionary.
Multi-headed models should specify one entry for each head, one of
which must be named using
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY.
training_chief_hooks: Iterable of `tf.train.SessionRunHook` objects to
run on the chief worker during training.
training_hooks: Iterable of `tf.train.SessionRunHook` objects to run
on all workers during training.
scaffold: A `tf.train.Scaffold` object that can be used to set
initialization, saver, and more to be used in training.
evaluation_hooks: Iterable of `tf.train.SessionRunHook` objects to
run during evaluation.
prediction_hooks: Iterable of `tf.train.SessionRunHook` objects to
run during predictions.
Returns:
A validated `EstimatorSpec` object.
Raises:
ValueError: If validation fails.
TypeError: If any of the arguments is not the expected type.
"""
# Validate train_op.
if train_op is None:
if mode == ModeKeys.TRAIN:
raise ValueError('Missing train_op.')
else:
_check_is_tensor_or_operation(train_op, 'train_op')
# Validate loss.
if loss is None:
if mode in (ModeKeys.TRAIN, ModeKeys.EVAL):
raise ValueError('Missing loss.')
else:
loss = _check_is_tensor(loss, 'loss')
loss_shape = loss.get_shape()
if loss_shape.num_elements() not in (None, 1):
raise ValueError('Loss must be scalar, given: {}'.format(loss))
if not loss_shape.is_compatible_with(tensor_shape.scalar()):
loss = array_ops.reshape(loss, [])
# Validate predictions.
if predictions is None:
if mode == ModeKeys.PREDICT:
raise ValueError('Missing predictions.')
predictions = {}
else:
if isinstance(predictions, dict):
predictions = {
k: _check_is_tensor(v, 'predictions[{}]'.format(k))
for k, v in six.iteritems(predictions)
}
else:
predictions = _check_is_tensor(predictions, 'predictions')
# Validate eval_metric_ops.
if eval_metric_ops is None:
eval_metric_ops = {}
else:
if not isinstance(eval_metric_ops, dict):
raise TypeError(
'eval_metric_ops must be a dict, given: {}'.format(eval_metric_ops))
for key, metric_value_and_update in six.iteritems(eval_metric_ops):
if (not isinstance(metric_value_and_update, tuple) or
len(metric_value_and_update) != 2):
raise TypeError(
'Values of eval_metric_ops must be (metric_value, update_op) '
'tuples, given: {} for key: {}'.format(
metric_value_and_update, key))
metric_value, metric_update = metric_value_and_update
for metric_value_member in nest.flatten(metric_value):
# Allow (possibly nested) tuples for metric values, but require that
# each of them be Tensors or Operations.
_check_is_tensor_or_operation(metric_value_member,
'eval_metric_ops[{}]'.format(key))
_check_is_tensor_or_operation(metric_update,
'eval_metric_ops[{}]'.format(key))
# Validate export_outputs.
if export_outputs is not None:
if not isinstance(export_outputs, dict):
raise TypeError('export_outputs must be dict, given: {}'.format(
export_outputs))
for v in six.itervalues(export_outputs):
if not isinstance(v, ExportOutput):
raise TypeError(
'Values in export_outputs must be ExportOutput objects. '
'Given: {}'.format(export_outputs))
# Note export_outputs is allowed to be empty.
if len(export_outputs) == 1:
(key, value), = export_outputs.items()
if key != signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY:
export_outputs[
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY] = value
if len(export_outputs) > 1:
if (signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY
not in export_outputs):
raise ValueError(
'Multiple export_outputs were provided, but none of them is '
'specified as the default. Do this by naming one of them with '
'signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY.')
# Validate that all tensors and ops are from the default graph.
default_graph = ops.get_default_graph()
# We enumerate possible error causes here to aid in debugging.
error_message_template = (
'{0} with "{1}" must be from the default graph. '
'Possible causes of this error include: \n\n'
'1) {0} was created outside the context of the default graph.'
'\n\n'
'2) The object passed through to EstimatorSpec was not created '
'in the most recent call to "model_fn".')
if isinstance(predictions, dict):
for key, value in six.iteritems(predictions):
if value.graph is not default_graph:
raise ValueError(error_message_template.format(
'prediction values',
'{0}: {1}'.format(key, value.name)))
elif predictions is not None:
# 'predictions' must be a single Tensor.
if predictions.graph is not default_graph:
raise ValueError(error_message_template.format(
'prediction values', predictions.name))
if loss is not None and loss.graph is not default_graph:
raise ValueError(error_message_template.format('loss', loss.name))
if train_op is not None and train_op.graph is not default_graph:
raise ValueError(error_message_template.format('train_op', train_op.name))
for key, value in list(six.iteritems(eval_metric_ops)):
values = nest.flatten(value)
for value in values:
if value.graph is not default_graph:
raise ValueError(error_message_template.format(
'eval_metric_ops',
'{0}: {1}'.format(key, value.name)))
# Validate hooks.
training_chief_hooks = tuple(training_chief_hooks or [])
training_hooks = tuple(training_hooks or [])
evaluation_hooks = tuple(evaluation_hooks or [])
prediction_hooks = tuple(prediction_hooks or [])
for hook in (training_hooks + training_chief_hooks + evaluation_hooks +
prediction_hooks):
if not isinstance(hook, session_run_hook.SessionRunHook):
raise TypeError(
'All hooks must be SessionRunHook instances, given: {}'.format(
hook))
scaffold = scaffold or monitored_session.Scaffold()
# Validate scaffold.
if not isinstance(scaffold, monitored_session.Scaffold):
raise TypeError(
'scaffold must be tf.train.Scaffold. Given: {}'.format(scaffold))
return super(EstimatorSpec, cls).__new__(
cls,
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op,
eval_metric_ops=eval_metric_ops,
export_outputs=export_outputs,
training_chief_hooks=training_chief_hooks,
training_hooks=training_hooks,
scaffold=scaffold,
evaluation_hooks=evaluation_hooks,
prediction_hooks=prediction_hooks)
def _replace(self, **kwds):
"""Return a new EstimatorSpec replacing specified fields with new values."""
if 'mode' in kwds:
if self.mode != kwds['mode']:
raise ValueError('mode of EstimatorSpec cannot be changed.')
new_fields = map(kwds.pop, self._fields, list(self))
return EstimatorSpec(*new_fields)
class _TPUEstimatorSpec(collections.namedtuple('TPUEstimatorSpec', [
'mode',
'predictions',
'loss',
'train_op',
'eval_metrics',
'export_outputs',
'scaffold_fn',
'host_call'])):
"""Ops and objects returned from a `model_fn` and passed to `TPUEstimator`.
This is a simplified implementation of `tf.contrib.tpu.EstimatorSpec`. See
tensorflow/contrib/tpu/python/tpu/tpu_estimator.py for more detailed
documentation.
"""
def __new__(cls,
mode,
predictions=None,
loss=None,
train_op=None,
eval_metrics=None,
export_outputs=None,
scaffold_fn=None,
host_call=None):
"""Creates a `_TPUEstimatorSpec` instance."""
return super(_TPUEstimatorSpec, cls).__new__(cls,
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op,
eval_metrics=eval_metrics,
export_outputs=export_outputs,
scaffold_fn=scaffold_fn,
host_call=host_call)
def as_estimator_spec(self):
"""Creates an equivalent `EstimatorSpec` used by CPU train/eval."""
if not self.eval_metrics:
eval_metric_ops = None
else:
metric_fn, tensors = self.eval_metrics
eval_metric_ops = metric_fn(**tensors)
return EstimatorSpec(mode=self.mode,
predictions=self.predictions,
loss=self.loss,
train_op=self.train_op,
eval_metric_ops=eval_metric_ops,
export_outputs=self.export_outputs)
def _check_is_tensor_or_operation(x, name):
if not (isinstance(x, ops.Operation) or isinstance(x, ops.Tensor)):
raise TypeError('{} must be Operation or Tensor, given: {}'.format(name, x))
def _check_is_tensor(x, tensor_name):
"""Returns `x` if it is a `Tensor`, raises TypeError otherwise."""
if not isinstance(x, ops.Tensor):
raise TypeError('{} must be Tensor, given: {}'.format(tensor_name, x))
return x
| [
"[email protected]"
] | |
f9ab128fb82107ad73c13d8bff645ad4cfd837d4 | 2f63688febd21dc3ae6b19abfa79ad313c820154 | /AlgoExpert/coding_interview_questions/Dynamic_Programming/Max_Subset_Sum_No_Adjacent.py | 35e8fc100f69daaeac1de7c8a7e1b7ba9ce4c161 | [] | no_license | novayo/LeetCode | cadd03587ee4ed6e35f60294070165afc1539ac8 | 54d0b3c237e0ffed8782915d6b75b7c6a0fe0de7 | refs/heads/master | 2023-08-14T00:35:15.528520 | 2023-07-30T05:56:05 | 2023-07-30T05:56:05 | 200,248,146 | 8 | 1 | null | 2022-11-19T04:37:54 | 2019-08-02T14:24:19 | Python | UTF-8 | Python | false | false | 387 | py | '''
main idea: dp
time comp: O(n)
space comp: O(1)
- where n is the length of the input array
'''
def maxSubsetSumNoAdjacent(array):
# Write your code here.
if not array:
return 0
if len(array) <= 2:
return max(array)
a = array[0]
b = array[1]
c = a + array[2]
for i in range(3, len(array)):
d = array[i] + max(a, b)
a = b
b = c
c = d
return max(a, b, c)
| [
"[email protected]"
] | |
6098d1665fc44c5b9392cbb7fc9e9de0a2f639aa | c4c159a21d2f1ea0d7dfaa965aeff01c8ef70dce | /flask/flaskenv/Lib/site-packages/tensorflow/python/keras/engine/base_layer_utils.py | 9ff1ab45c3de06a9b41edbb8de2197633c8c4ac7 | [] | no_license | AhsonAslam/webapi | 54cf7466aac4685da1105f9fb84c686e38f92121 | 1b2bfa4614e7afdc57c9210b0674506ea70b20b5 | refs/heads/master | 2020-07-27T06:05:36.057953 | 2019-09-17T06:35:33 | 2019-09-17T06:35:33 | 208,895,450 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 130 | py | version https://git-lfs.github.com/spec/v1
oid sha256:a687562a5029ffbfb9a948ea45a12f73bca931230a551f19634b253752a45fe1
size 21452
| [
"github@cuba12345"
] | github@cuba12345 |
817c752e00db2148a4dd5635779329f98737565d | 52a3beeb07ad326115084a47a9e698efbaec054b | /horizon/.venv/lib/python2.7/site-packages/openstackclient/identity/v3/policy.py | 74a783b06a575e12b7a843a14ab6debc95d39f85 | [
"Apache-2.0"
] | permissive | bopopescu/sample_scripts | 3dade0710ecdc8f9251dc60164747830f8de6877 | f9edce63c0a4d636f672702153662bd77bfd400d | refs/heads/master | 2022-11-17T19:19:34.210886 | 2018-06-11T04:14:27 | 2018-06-11T04:14:27 | 282,088,840 | 0 | 0 | null | 2020-07-24T00:57:31 | 2020-07-24T00:57:31 | null | UTF-8 | Python | false | false | 5,046 | py | # Copyright 2012-2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Identity v3 Policy action implementations"""
import six
import sys
from openstackclient.common import command
from openstackclient.common import utils
from openstackclient.i18n import _
class CreatePolicy(command.ShowOne):
"""Create new policy"""
def get_parser(self, prog_name):
parser = super(CreatePolicy, self).get_parser(prog_name)
parser.add_argument(
'--type',
metavar='<type>',
default="application/json",
help=_('New MIME type of the policy rules file '
'(defaults to application/json)'),
)
parser.add_argument(
'rules',
metavar='<filename>',
help=_('New serialized policy rules file'),
)
return parser
def take_action(self, parsed_args):
blob = utils.read_blob_file_contents(parsed_args.rules)
identity_client = self.app.client_manager.identity
policy = identity_client.policies.create(
blob=blob, type=parsed_args.type
)
policy._info.pop('links')
policy._info.update({'rules': policy._info.pop('blob')})
return zip(*sorted(six.iteritems(policy._info)))
class DeletePolicy(command.Command):
"""Delete policy"""
def get_parser(self, prog_name):
parser = super(DeletePolicy, self).get_parser(prog_name)
parser.add_argument(
'policy',
metavar='<policy>',
help=_('Policy to delete'),
)
return parser
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
identity_client.policies.delete(parsed_args.policy)
class ListPolicy(command.Lister):
"""List policies"""
def get_parser(self, prog_name):
parser = super(ListPolicy, self).get_parser(prog_name)
parser.add_argument(
'--long',
action='store_true',
default=False,
help=_('List additional fields in output'),
)
return parser
def take_action(self, parsed_args):
if parsed_args.long:
columns = ('ID', 'Type', 'Blob')
column_headers = ('ID', 'Type', 'Rules')
else:
columns = ('ID', 'Type')
column_headers = columns
data = self.app.client_manager.identity.policies.list()
return (column_headers,
(utils.get_item_properties(
s, columns,
formatters={},
) for s in data))
class SetPolicy(command.Command):
"""Set policy properties"""
def get_parser(self, prog_name):
parser = super(SetPolicy, self).get_parser(prog_name)
parser.add_argument(
'policy',
metavar='<policy>',
help=_('Policy to modify'),
)
parser.add_argument(
'--type',
metavar='<type>',
help=_('New MIME type of the policy rules file'),
)
parser.add_argument(
'--rules',
metavar='<filename>',
help=_('New serialized policy rules file'),
)
return parser
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
blob = None
if parsed_args.rules:
blob = utils.read_blob_file_contents(parsed_args.rules)
kwargs = {}
if blob:
kwargs['blob'] = blob
if parsed_args.type:
kwargs['type'] = parsed_args.type
if not kwargs:
sys.stdout.write(_('Policy not updated, no arguments present\n'))
return
identity_client.policies.update(parsed_args.policy, **kwargs)
class ShowPolicy(command.ShowOne):
"""Display policy details"""
def get_parser(self, prog_name):
parser = super(ShowPolicy, self).get_parser(prog_name)
parser.add_argument(
'policy',
metavar='<policy>',
help=_('Policy to display'),
)
return parser
def take_action(self, parsed_args):
identity_client = self.app.client_manager.identity
policy = utils.find_resource(identity_client.policies,
parsed_args.policy)
policy._info.pop('links')
policy._info.update({'rules': policy._info.pop('blob')})
return zip(*sorted(six.iteritems(policy._info)))
| [
"[email protected]"
] | |
42499fc2f051b56c4c4ec3113f61c2d0327f6fe2 | e9fa26be4d9e7a725d3f889b6aa9cf5029f47c2d | /lib/sqlalchemy/sql/selectable.py | 590c4d32dd9059e0da44d631c08a57ab22ee146c | [
"BSD-3-Clause"
] | permissive | ahriman-ru/azure-test | 2e0ee70a8b453534cf82e44860f04dddd3ab8095 | 3c377163192139d046b4ec20ce5b44a0492aecd8 | refs/heads/master | 2021-07-17T03:53:14.025065 | 2017-10-20T12:04:11 | 2017-10-20T12:04:11 | 100,267,395 | 0 | 0 | null | 2017-08-14T12:54:01 | 2017-08-14T12:54:01 | null | UTF-8 | Python | false | false | 131,244 | py | # sql/selectable.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""The :class:`.FromClause` class of SQL expression elements, representing
SQL tables and derived rowsets.
"""
from .elements import ClauseElement, TextClause, ClauseList, \
and_, Grouping, UnaryExpression, literal_column, BindParameter
from .elements import _clone, \
_literal_as_text, _interpret_as_column_or_from, _expand_cloned,\
_select_iterables, _anonymous_label, _clause_element_as_expr,\
_cloned_intersection, _cloned_difference, True_, \
_literal_as_label_reference, _literal_and_labels_as_label_reference
from .base import Immutable, Executable, _generative, \
ColumnCollection, ColumnSet, _from_objects, Generative
from . import type_api
from .. import inspection
from .. import util
from .. import exc
from operator import attrgetter
from . import operators
import operator
import collections
from .annotation import Annotated
import itertools
from sqlalchemy.sql.visitors import Visitable
def _interpret_as_from(element):
insp = inspection.inspect(element, raiseerr=False)
if insp is None:
if isinstance(element, util.string_types):
util.warn_limited(
"Textual SQL FROM expression %(expr)r should be "
"explicitly declared as text(%(expr)r), "
"or use table(%(expr)r) for more specificity",
{"expr": util.ellipses_string(element)})
return TextClause(util.text_type(element))
try:
return insp.selectable
except AttributeError:
raise exc.ArgumentError("FROM expression expected")
def _interpret_as_select(element):
element = _interpret_as_from(element)
if isinstance(element, Alias):
element = element.original
if not isinstance(element, SelectBase):
element = element.select()
return element
class _OffsetLimitParam(BindParameter):
@property
def _limit_offset_value(self):
return self.effective_value
def _offset_or_limit_clause(element, name=None, type_=None):
"""Convert the given value to an "offset or limit" clause.
This handles incoming integers and converts to an expression; if
an expression is already given, it is passed through.
"""
if element is None:
return None
elif hasattr(element, '__clause_element__'):
return element.__clause_element__()
elif isinstance(element, Visitable):
return element
else:
value = util.asint(element)
return _OffsetLimitParam(name, value, type_=type_, unique=True)
def _offset_or_limit_clause_asint(clause, attrname):
"""Convert the "offset or limit" clause of a select construct to an
integer.
This is only possible if the value is stored as a simple bound parameter.
Otherwise, a compilation error is raised.
"""
if clause is None:
return None
try:
value = clause._limit_offset_value
except AttributeError:
raise exc.CompileError(
"This SELECT structure does not use a simple "
"integer value for %s" % attrname)
else:
return util.asint(value)
def subquery(alias, *args, **kwargs):
r"""Return an :class:`.Alias` object derived
from a :class:`.Select`.
name
alias name
\*args, \**kwargs
all other arguments are delivered to the
:func:`select` function.
"""
return Select(*args, **kwargs).alias(alias)
def alias(selectable, name=None, flat=False):
"""Return an :class:`.Alias` object.
An :class:`.Alias` represents any :class:`.FromClause`
with an alternate name assigned within SQL, typically using the ``AS``
clause when generated, e.g. ``SELECT * FROM table AS aliasname``.
Similar functionality is available via the
:meth:`~.FromClause.alias` method
available on all :class:`.FromClause` subclasses.
When an :class:`.Alias` is created from a :class:`.Table` object,
this has the effect of the table being rendered
as ``tablename AS aliasname`` in a SELECT statement.
For :func:`.select` objects, the effect is that of creating a named
subquery, i.e. ``(select ...) AS aliasname``.
The ``name`` parameter is optional, and provides the name
to use in the rendered SQL. If blank, an "anonymous" name
will be deterministically generated at compile time.
Deterministic means the name is guaranteed to be unique against
other constructs used in the same statement, and will also be the
same name for each successive compilation of the same statement
object.
:param selectable: any :class:`.FromClause` subclass,
such as a table, select statement, etc.
:param name: string name to be assigned as the alias.
If ``None``, a name will be deterministically generated
at compile time.
:param flat: Will be passed through to if the given selectable
is an instance of :class:`.Join` - see :meth:`.Join.alias`
for details.
.. versionadded:: 0.9.0
"""
return _interpret_as_from(selectable).alias(name=name, flat=flat)
def lateral(selectable, name=None):
"""Return a :class:`.Lateral` object.
:class:`.Lateral` is an :class:`.Alias` subclass that represents
a subquery with the LATERAL keyword applied to it.
The special behavior of a LATERAL subquery is that it appears in the
FROM clause of an enclosing SELECT, but may correlate to other
FROM clauses of that SELECT. It is a special case of subquery
only supported by a small number of backends, currently more recent
PostgreSQL versions.
.. versionadded:: 1.1
.. seealso::
:ref:`lateral_selects` - overview of usage.
"""
return _interpret_as_from(selectable).lateral(name=name)
def tablesample(selectable, sampling, name=None, seed=None):
"""Return a :class:`.TableSample` object.
:class:`.TableSample` is an :class:`.Alias` subclass that represents
a table with the TABLESAMPLE clause applied to it.
:func:`~.expression.tablesample`
is also available from the :class:`.FromClause` class via the
:meth:`.FromClause.tablesample` method.
The TABLESAMPLE clause allows selecting a randomly selected approximate
percentage of rows from a table. It supports multiple sampling methods,
most commonly BERNOULLI and SYSTEM.
e.g.::
from sqlalchemy import func
selectable = people.tablesample(
func.bernoulli(1),
name='alias',
seed=func.random())
stmt = select([selectable.c.people_id])
Assuming ``people`` with a column ``people_id``, the above
statement would render as::
SELECT alias.people_id FROM
people AS alias TABLESAMPLE bernoulli(:bernoulli_1)
REPEATABLE (random())
.. versionadded:: 1.1
:param sampling: a ``float`` percentage between 0 and 100 or
:class:`.functions.Function`.
:param name: optional alias name
:param seed: any real-valued SQL expression. When specified, the
REPEATABLE sub-clause is also rendered.
"""
return _interpret_as_from(selectable).tablesample(
sampling, name=name, seed=seed)
class Selectable(ClauseElement):
"""mark a class as being selectable"""
__visit_name__ = 'selectable'
is_selectable = True
@property
def selectable(self):
return self
class HasPrefixes(object):
_prefixes = ()
@_generative
def prefix_with(self, *expr, **kw):
r"""Add one or more expressions following the statement keyword, i.e.
SELECT, INSERT, UPDATE, or DELETE. Generative.
This is used to support backend-specific prefix keywords such as those
provided by MySQL.
E.g.::
stmt = table.insert().prefix_with("LOW_PRIORITY", dialect="mysql")
Multiple prefixes can be specified by multiple calls
to :meth:`.prefix_with`.
:param \*expr: textual or :class:`.ClauseElement` construct which
will be rendered following the INSERT, UPDATE, or DELETE
keyword.
:param \**kw: A single keyword 'dialect' is accepted. This is an
optional string dialect name which will
limit rendering of this prefix to only that dialect.
"""
dialect = kw.pop('dialect', None)
if kw:
raise exc.ArgumentError("Unsupported argument(s): %s" %
",".join(kw))
self._setup_prefixes(expr, dialect)
def _setup_prefixes(self, prefixes, dialect=None):
self._prefixes = self._prefixes + tuple(
[(_literal_as_text(p, warn=False), dialect) for p in prefixes])
class HasSuffixes(object):
_suffixes = ()
@_generative
def suffix_with(self, *expr, **kw):
r"""Add one or more expressions following the statement as a whole.
This is used to support backend-specific suffix keywords on
certain constructs.
E.g.::
stmt = select([col1, col2]).cte().suffix_with(
"cycle empno set y_cycle to 1 default 0", dialect="oracle")
Multiple suffixes can be specified by multiple calls
to :meth:`.suffix_with`.
:param \*expr: textual or :class:`.ClauseElement` construct which
will be rendered following the target clause.
:param \**kw: A single keyword 'dialect' is accepted. This is an
optional string dialect name which will
limit rendering of this suffix to only that dialect.
"""
dialect = kw.pop('dialect', None)
if kw:
raise exc.ArgumentError("Unsupported argument(s): %s" %
",".join(kw))
self._setup_suffixes(expr, dialect)
def _setup_suffixes(self, suffixes, dialect=None):
self._suffixes = self._suffixes + tuple(
[(_literal_as_text(p, warn=False), dialect) for p in suffixes])
class FromClause(Selectable):
"""Represent an element that can be used within the ``FROM``
clause of a ``SELECT`` statement.
The most common forms of :class:`.FromClause` are the
:class:`.Table` and the :func:`.select` constructs. Key
features common to all :class:`.FromClause` objects include:
* a :attr:`.c` collection, which provides per-name access to a collection
of :class:`.ColumnElement` objects.
* a :attr:`.primary_key` attribute, which is a collection of all those
:class:`.ColumnElement` objects that indicate the ``primary_key`` flag.
* Methods to generate various derivations of a "from" clause, including
:meth:`.FromClause.alias`, :meth:`.FromClause.join`,
:meth:`.FromClause.select`.
"""
__visit_name__ = 'fromclause'
named_with_column = False
_hide_froms = []
_is_join = False
_is_select = False
_is_from_container = False
_textual = False
"""a marker that allows us to easily distinguish a :class:`.TextAsFrom`
or similar object from other kinds of :class:`.FromClause` objects."""
schema = None
"""Define the 'schema' attribute for this :class:`.FromClause`.
This is typically ``None`` for most objects except that of
:class:`.Table`, where it is taken as the value of the
:paramref:`.Table.schema` argument.
"""
def _translate_schema(self, effective_schema, map_):
return effective_schema
_memoized_property = util.group_expirable_memoized_property(["_columns"])
@util.deprecated(
'1.1',
message="``FromClause.count()`` is deprecated. Counting "
"rows requires that the correct column expression and "
"accommodations for joins, DISTINCT, etc. must be made, "
"otherwise results may not be what's expected. "
"Please use an appropriate ``func.count()`` expression "
"directly.")
@util.dependencies("sqlalchemy.sql.functions")
def count(self, functions, whereclause=None, **params):
"""return a SELECT COUNT generated against this
:class:`.FromClause`.
The function generates COUNT against the
first column in the primary key of the table, or against
the first column in the table overall. Explicit use of
``func.count()`` should be preferred::
row_count = conn.scalar(
select([func.count('*')]).select_from(table)
)
.. seealso::
:data:`.func`
"""
if self.primary_key:
col = list(self.primary_key)[0]
else:
col = list(self.columns)[0]
return Select(
[functions.func.count(col).label('tbl_row_count')],
whereclause,
from_obj=[self],
**params)
def select(self, whereclause=None, **params):
"""return a SELECT of this :class:`.FromClause`.
.. seealso::
:func:`~.sql.expression.select` - general purpose
method which allows for arbitrary column lists.
"""
return Select([self], whereclause, **params)
def join(self, right, onclause=None, isouter=False, full=False):
"""Return a :class:`.Join` from this :class:`.FromClause`
to another :class:`FromClause`.
E.g.::
from sqlalchemy import join
j = user_table.join(address_table,
user_table.c.id == address_table.c.user_id)
stmt = select([user_table]).select_from(j)
would emit SQL along the lines of::
SELECT user.id, user.name FROM user
JOIN address ON user.id = address.user_id
:param right: the right side of the join; this is any
:class:`.FromClause` object such as a :class:`.Table` object, and
may also be a selectable-compatible object such as an ORM-mapped
class.
:param onclause: a SQL expression representing the ON clause of the
join. If left at ``None``, :meth:`.FromClause.join` will attempt to
join the two tables based on a foreign key relationship.
:param isouter: if True, render a LEFT OUTER JOIN, instead of JOIN.
:param full: if True, render a FULL OUTER JOIN, instead of LEFT OUTER
JOIN. Implies :paramref:`.FromClause.join.isouter`.
.. versionadded:: 1.1
.. seealso::
:func:`.join` - standalone function
:class:`.Join` - the type of object produced
"""
return Join(self, right, onclause, isouter, full)
def outerjoin(self, right, onclause=None, full=False):
"""Return a :class:`.Join` from this :class:`.FromClause`
to another :class:`FromClause`, with the "isouter" flag set to
True.
E.g.::
from sqlalchemy import outerjoin
j = user_table.outerjoin(address_table,
user_table.c.id == address_table.c.user_id)
The above is equivalent to::
j = user_table.join(
address_table,
user_table.c.id == address_table.c.user_id,
isouter=True)
:param right: the right side of the join; this is any
:class:`.FromClause` object such as a :class:`.Table` object, and
may also be a selectable-compatible object such as an ORM-mapped
class.
:param onclause: a SQL expression representing the ON clause of the
join. If left at ``None``, :meth:`.FromClause.join` will attempt to
join the two tables based on a foreign key relationship.
:param full: if True, render a FULL OUTER JOIN, instead of
LEFT OUTER JOIN.
.. versionadded:: 1.1
.. seealso::
:meth:`.FromClause.join`
:class:`.Join`
"""
return Join(self, right, onclause, True, full)
def alias(self, name=None, flat=False):
"""return an alias of this :class:`.FromClause`.
This is shorthand for calling::
from sqlalchemy import alias
a = alias(self, name=name)
See :func:`~.expression.alias` for details.
"""
return Alias(self, name)
def lateral(self, name=None):
"""Return a LATERAL alias of this :class:`.FromClause`.
The return value is the :class:`.Lateral` construct also
provided by the top-level :func:`~.expression.lateral` function.
.. versionadded:: 1.1
.. seealso::
:ref:`lateral_selects` - overview of usage.
"""
return Lateral(self, name)
def tablesample(self, sampling, name=None, seed=None):
"""Return a TABLESAMPLE alias of this :class:`.FromClause`.
The return value is the :class:`.TableSample` construct also
provided by the top-level :func:`~.expression.tablesample` function.
.. versionadded:: 1.1
.. seealso::
:func:`~.expression.tablesample` - usage guidelines and parameters
"""
return TableSample(self, sampling, name, seed)
def is_derived_from(self, fromclause):
"""Return True if this FromClause is 'derived' from the given
FromClause.
An example would be an Alias of a Table is derived from that Table.
"""
# this is essentially an "identity" check in the base class.
# Other constructs override this to traverse through
# contained elements.
return fromclause in self._cloned_set
def _is_lexical_equivalent(self, other):
"""Return True if this FromClause and the other represent
the same lexical identity.
This tests if either one is a copy of the other, or
if they are the same via annotation identity.
"""
return self._cloned_set.intersection(other._cloned_set)
@util.dependencies("sqlalchemy.sql.util")
def replace_selectable(self, sqlutil, old, alias):
"""replace all occurrences of FromClause 'old' with the given Alias
object, returning a copy of this :class:`.FromClause`.
"""
return sqlutil.ClauseAdapter(alias).traverse(self)
def correspond_on_equivalents(self, column, equivalents):
"""Return corresponding_column for the given column, or if None
search for a match in the given dictionary.
"""
col = self.corresponding_column(column, require_embedded=True)
if col is None and col in equivalents:
for equiv in equivalents[col]:
nc = self.corresponding_column(equiv, require_embedded=True)
if nc:
return nc
return col
def corresponding_column(self, column, require_embedded=False):
"""Given a :class:`.ColumnElement`, return the exported
:class:`.ColumnElement` object from this :class:`.Selectable`
which corresponds to that original
:class:`~sqlalchemy.schema.Column` via a common ancestor
column.
:param column: the target :class:`.ColumnElement` to be matched
:param require_embedded: only return corresponding columns for
the given :class:`.ColumnElement`, if the given
:class:`.ColumnElement` is actually present within a sub-element
of this :class:`.FromClause`. Normally the column will match if
it merely shares a common ancestor with one of the exported
columns of this :class:`.FromClause`.
"""
def embedded(expanded_proxy_set, target_set):
for t in target_set.difference(expanded_proxy_set):
if not set(_expand_cloned([t])
).intersection(expanded_proxy_set):
return False
return True
# don't dig around if the column is locally present
if self.c.contains_column(column):
return column
col, intersect = None, None
target_set = column.proxy_set
cols = self.c._all_columns
for c in cols:
expanded_proxy_set = set(_expand_cloned(c.proxy_set))
i = target_set.intersection(expanded_proxy_set)
if i and (not require_embedded
or embedded(expanded_proxy_set, target_set)):
if col is None:
# no corresponding column yet, pick this one.
col, intersect = c, i
elif len(i) > len(intersect):
# 'c' has a larger field of correspondence than
# 'col'. i.e. selectable.c.a1_x->a1.c.x->table.c.x
# matches a1.c.x->table.c.x better than
# selectable.c.x->table.c.x does.
col, intersect = c, i
elif i == intersect:
# they have the same field of correspondence. see
# which proxy_set has fewer columns in it, which
# indicates a closer relationship with the root
# column. Also take into account the "weight"
# attribute which CompoundSelect() uses to give
# higher precedence to columns based on vertical
# position in the compound statement, and discard
# columns that have no reference to the target
# column (also occurs with CompoundSelect)
col_distance = util.reduce(
operator.add,
[sc._annotations.get('weight', 1) for sc in
col.proxy_set if sc.shares_lineage(column)])
c_distance = util.reduce(
operator.add,
[sc._annotations.get('weight', 1) for sc in
c.proxy_set if sc.shares_lineage(column)])
if c_distance < col_distance:
col, intersect = c, i
return col
@property
def description(self):
"""a brief description of this FromClause.
Used primarily for error message formatting.
"""
return getattr(self, 'name', self.__class__.__name__ + " object")
def _reset_exported(self):
"""delete memoized collections when a FromClause is cloned."""
self._memoized_property.expire_instance(self)
@_memoized_property
def columns(self):
"""A named-based collection of :class:`.ColumnElement` objects
maintained by this :class:`.FromClause`.
The :attr:`.columns`, or :attr:`.c` collection, is the gateway
to the construction of SQL expressions using table-bound or
other selectable-bound columns::
select([mytable]).where(mytable.c.somecolumn == 5)
"""
if '_columns' not in self.__dict__:
self._init_collections()
self._populate_column_collection()
return self._columns.as_immutable()
@_memoized_property
def primary_key(self):
"""Return the collection of Column objects which comprise the
primary key of this FromClause."""
self._init_collections()
self._populate_column_collection()
return self.primary_key
@_memoized_property
def foreign_keys(self):
"""Return the collection of ForeignKey objects which this
FromClause references."""
self._init_collections()
self._populate_column_collection()
return self.foreign_keys
c = property(attrgetter('columns'),
doc="An alias for the :attr:`.columns` attribute.")
_select_iterable = property(attrgetter('columns'))
def _init_collections(self):
assert '_columns' not in self.__dict__
assert 'primary_key' not in self.__dict__
assert 'foreign_keys' not in self.__dict__
self._columns = ColumnCollection()
self.primary_key = ColumnSet()
self.foreign_keys = set()
@property
def _cols_populated(self):
return '_columns' in self.__dict__
def _populate_column_collection(self):
"""Called on subclasses to establish the .c collection.
Each implementation has a different way of establishing
this collection.
"""
def _refresh_for_new_column(self, column):
"""Given a column added to the .c collection of an underlying
selectable, produce the local version of that column, assuming this
selectable ultimately should proxy this column.
this is used to "ping" a derived selectable to add a new column
to its .c. collection when a Column has been added to one of the
Table objects it ultimtely derives from.
If the given selectable hasn't populated its .c. collection yet,
it should at least pass on the message to the contained selectables,
but it will return None.
This method is currently used by Declarative to allow Table
columns to be added to a partially constructed inheritance
mapping that may have already produced joins. The method
isn't public right now, as the full span of implications
and/or caveats aren't yet clear.
It's also possible that this functionality could be invoked by
default via an event, which would require that
selectables maintain a weak referencing collection of all
derivations.
"""
if not self._cols_populated:
return None
elif (column.key in self.columns and
self.columns[column.key] is column):
return column
else:
return None
class Join(FromClause):
"""represent a ``JOIN`` construct between two :class:`.FromClause`
elements.
The public constructor function for :class:`.Join` is the module-level
:func:`.join()` function, as well as the :meth:`.FromClause.join` method
of any :class:`.FromClause` (e.g. such as :class:`.Table`).
.. seealso::
:func:`.join`
:meth:`.FromClause.join`
"""
__visit_name__ = 'join'
_is_join = True
def __init__(self, left, right, onclause=None, isouter=False, full=False):
"""Construct a new :class:`.Join`.
The usual entrypoint here is the :func:`~.expression.join`
function or the :meth:`.FromClause.join` method of any
:class:`.FromClause` object.
"""
self.left = _interpret_as_from(left)
self.right = _interpret_as_from(right).self_group()
if onclause is None:
self.onclause = self._match_primaries(self.left, self.right)
else:
self.onclause = onclause
self.isouter = isouter
self.full = full
@classmethod
def _create_outerjoin(cls, left, right, onclause=None, full=False):
"""Return an ``OUTER JOIN`` clause element.
The returned object is an instance of :class:`.Join`.
Similar functionality is also available via the
:meth:`~.FromClause.outerjoin()` method on any
:class:`.FromClause`.
:param left: The left side of the join.
:param right: The right side of the join.
:param onclause: Optional criterion for the ``ON`` clause, is
derived from foreign key relationships established between
left and right otherwise.
To chain joins together, use the :meth:`.FromClause.join` or
:meth:`.FromClause.outerjoin` methods on the resulting
:class:`.Join` object.
"""
return cls(left, right, onclause, isouter=True, full=full)
@classmethod
def _create_join(cls, left, right, onclause=None, isouter=False,
full=False):
"""Produce a :class:`.Join` object, given two :class:`.FromClause`
expressions.
E.g.::
j = join(user_table, address_table,
user_table.c.id == address_table.c.user_id)
stmt = select([user_table]).select_from(j)
would emit SQL along the lines of::
SELECT user.id, user.name FROM user
JOIN address ON user.id = address.user_id
Similar functionality is available given any
:class:`.FromClause` object (e.g. such as a :class:`.Table`) using
the :meth:`.FromClause.join` method.
:param left: The left side of the join.
:param right: the right side of the join; this is any
:class:`.FromClause` object such as a :class:`.Table` object, and
may also be a selectable-compatible object such as an ORM-mapped
class.
:param onclause: a SQL expression representing the ON clause of the
join. If left at ``None``, :meth:`.FromClause.join` will attempt to
join the two tables based on a foreign key relationship.
:param isouter: if True, render a LEFT OUTER JOIN, instead of JOIN.
:param full: if True, render a FULL OUTER JOIN, instead of JOIN.
.. versionadded:: 1.1
.. seealso::
:meth:`.FromClause.join` - method form, based on a given left side
:class:`.Join` - the type of object produced
"""
return cls(left, right, onclause, isouter, full)
@property
def description(self):
return "Join object on %s(%d) and %s(%d)" % (
self.left.description,
id(self.left),
self.right.description,
id(self.right))
def is_derived_from(self, fromclause):
return fromclause is self or \
self.left.is_derived_from(fromclause) or \
self.right.is_derived_from(fromclause)
def self_group(self, against=None):
return FromGrouping(self)
@util.dependencies("sqlalchemy.sql.util")
def _populate_column_collection(self, sqlutil):
columns = [c for c in self.left.columns] + \
[c for c in self.right.columns]
self.primary_key.extend(sqlutil.reduce_columns(
(c for c in columns if c.primary_key), self.onclause))
self._columns.update((col._label, col) for col in columns)
self.foreign_keys.update(itertools.chain(
*[col.foreign_keys for col in columns]))
def _refresh_for_new_column(self, column):
col = self.left._refresh_for_new_column(column)
if col is None:
col = self.right._refresh_for_new_column(column)
if col is not None:
if self._cols_populated:
self._columns[col._label] = col
self.foreign_keys.update(col.foreign_keys)
if col.primary_key:
self.primary_key.add(col)
return col
return None
def _copy_internals(self, clone=_clone, **kw):
self._reset_exported()
self.left = clone(self.left, **kw)
self.right = clone(self.right, **kw)
self.onclause = clone(self.onclause, **kw)
def get_children(self, **kwargs):
return self.left, self.right, self.onclause
def _match_primaries(self, left, right):
if isinstance(left, Join):
left_right = left.right
else:
left_right = None
return self._join_condition(left, right, a_subset=left_right)
@classmethod
def _join_condition(cls, a, b, ignore_nonexistent_tables=False,
a_subset=None,
consider_as_foreign_keys=None):
"""create a join condition between two tables or selectables.
e.g.::
join_condition(tablea, tableb)
would produce an expression along the lines of::
tablea.c.id==tableb.c.tablea_id
The join is determined based on the foreign key relationships
between the two selectables. If there are multiple ways
to join, or no way to join, an error is raised.
:param ignore_nonexistent_tables: Deprecated - this
flag is no longer used. Only resolution errors regarding
the two given tables are propagated.
:param a_subset: An optional expression that is a sub-component
of ``a``. An attempt will be made to join to just this sub-component
first before looking at the full ``a`` construct, and if found
will be successful even if there are other ways to join to ``a``.
This allows the "right side" of a join to be passed thereby
providing a "natural join".
"""
constraints = cls._joincond_scan_left_right(
a, a_subset, b, consider_as_foreign_keys)
if len(constraints) > 1:
cls._joincond_trim_constraints(
a, b, constraints, consider_as_foreign_keys)
if len(constraints) == 0:
if isinstance(b, FromGrouping):
hint = " Perhaps you meant to convert the right side to a "\
"subquery using alias()?"
else:
hint = ""
raise exc.NoForeignKeysError(
"Can't find any foreign key relationships "
"between '%s' and '%s'.%s" %
(a.description, b.description, hint))
crit = [(x == y) for x, y in list(constraints.values())[0]]
if len(crit) == 1:
return (crit[0])
else:
return and_(*crit)
@classmethod
def _joincond_scan_left_right(
cls, a, a_subset, b, consider_as_foreign_keys):
constraints = collections.defaultdict(list)
for left in (a_subset, a):
if left is None:
continue
for fk in sorted(
b.foreign_keys,
key=lambda fk: fk.parent._creation_order):
if consider_as_foreign_keys is not None and \
fk.parent not in consider_as_foreign_keys:
continue
try:
col = fk.get_referent(left)
except exc.NoReferenceError as nrte:
if nrte.table_name == left.name:
raise
else:
continue
if col is not None:
constraints[fk.constraint].append((col, fk.parent))
if left is not b:
for fk in sorted(
left.foreign_keys,
key=lambda fk: fk.parent._creation_order):
if consider_as_foreign_keys is not None and \
fk.parent not in consider_as_foreign_keys:
continue
try:
col = fk.get_referent(b)
except exc.NoReferenceError as nrte:
if nrte.table_name == b.name:
raise
else:
continue
if col is not None:
constraints[fk.constraint].append((col, fk.parent))
if constraints:
break
return constraints
@classmethod
def _joincond_trim_constraints(
cls, a, b, constraints, consider_as_foreign_keys):
# more than one constraint matched. narrow down the list
# to include just those FKCs that match exactly to
# "consider_as_foreign_keys".
if consider_as_foreign_keys:
for const in list(constraints):
if set(f.parent for f in const.elements) != set(
consider_as_foreign_keys):
del constraints[const]
# if still multiple constraints, but
# they all refer to the exact same end result, use it.
if len(constraints) > 1:
dedupe = set(tuple(crit) for crit in constraints.values())
if len(dedupe) == 1:
key = list(constraints)[0]
constraints = {key: constraints[key]}
if len(constraints) != 1:
raise exc.AmbiguousForeignKeysError(
"Can't determine join between '%s' and '%s'; "
"tables have more than one foreign key "
"constraint relationship between them. "
"Please specify the 'onclause' of this "
"join explicitly." % (a.description, b.description))
def select(self, whereclause=None, **kwargs):
r"""Create a :class:`.Select` from this :class:`.Join`.
The equivalent long-hand form, given a :class:`.Join` object
``j``, is::
from sqlalchemy import select
j = select([j.left, j.right], **kw).\
where(whereclause).\
select_from(j)
:param whereclause: the WHERE criterion that will be sent to
the :func:`select()` function
:param \**kwargs: all other kwargs are sent to the
underlying :func:`select()` function.
"""
collist = [self.left, self.right]
return Select(collist, whereclause, from_obj=[self], **kwargs)
@property
def bind(self):
return self.left.bind or self.right.bind
@util.dependencies("sqlalchemy.sql.util")
def alias(self, sqlutil, name=None, flat=False):
r"""return an alias of this :class:`.Join`.
The default behavior here is to first produce a SELECT
construct from this :class:`.Join`, then to produce an
:class:`.Alias` from that. So given a join of the form::
j = table_a.join(table_b, table_a.c.id == table_b.c.a_id)
The JOIN by itself would look like::
table_a JOIN table_b ON table_a.id = table_b.a_id
Whereas the alias of the above, ``j.alias()``, would in a
SELECT context look like::
(SELECT table_a.id AS table_a_id, table_b.id AS table_b_id,
table_b.a_id AS table_b_a_id
FROM table_a
JOIN table_b ON table_a.id = table_b.a_id) AS anon_1
The equivalent long-hand form, given a :class:`.Join` object
``j``, is::
from sqlalchemy import select, alias
j = alias(
select([j.left, j.right]).\
select_from(j).\
with_labels(True).\
correlate(False),
name=name
)
The selectable produced by :meth:`.Join.alias` features the same
columns as that of the two individual selectables presented under
a single name - the individual columns are "auto-labeled", meaning
the ``.c.`` collection of the resulting :class:`.Alias` represents
the names of the individual columns using a
``<tablename>_<columname>`` scheme::
j.c.table_a_id
j.c.table_b_a_id
:meth:`.Join.alias` also features an alternate
option for aliasing joins which produces no enclosing SELECT and
does not normally apply labels to the column names. The
``flat=True`` option will call :meth:`.FromClause.alias`
against the left and right sides individually.
Using this option, no new ``SELECT`` is produced;
we instead, from a construct as below::
j = table_a.join(table_b, table_a.c.id == table_b.c.a_id)
j = j.alias(flat=True)
we get a result like this::
table_a AS table_a_1 JOIN table_b AS table_b_1 ON
table_a_1.id = table_b_1.a_id
The ``flat=True`` argument is also propagated to the contained
selectables, so that a composite join such as::
j = table_a.join(
table_b.join(table_c,
table_b.c.id == table_c.c.b_id),
table_b.c.a_id == table_a.c.id
).alias(flat=True)
Will produce an expression like::
table_a AS table_a_1 JOIN (
table_b AS table_b_1 JOIN table_c AS table_c_1
ON table_b_1.id = table_c_1.b_id
) ON table_a_1.id = table_b_1.a_id
The standalone :func:`~.expression.alias` function as well as the
base :meth:`.FromClause.alias` method also support the ``flat=True``
argument as a no-op, so that the argument can be passed to the
``alias()`` method of any selectable.
.. versionadded:: 0.9.0 Added the ``flat=True`` option to create
"aliases" of joins without enclosing inside of a SELECT
subquery.
:param name: name given to the alias.
:param flat: if True, produce an alias of the left and right
sides of this :class:`.Join` and return the join of those
two selectables. This produces join expression that does not
include an enclosing SELECT.
.. versionadded:: 0.9.0
.. seealso::
:func:`~.expression.alias`
"""
if flat:
assert name is None, "Can't send name argument with flat"
left_a, right_a = self.left.alias(flat=True), \
self.right.alias(flat=True)
adapter = sqlutil.ClauseAdapter(left_a).\
chain(sqlutil.ClauseAdapter(right_a))
return left_a.join(right_a, adapter.traverse(self.onclause),
isouter=self.isouter, full=self.full)
else:
return self.select(use_labels=True, correlate=False).alias(name)
@property
def _hide_froms(self):
return itertools.chain(*[_from_objects(x.left, x.right)
for x in self._cloned_set])
@property
def _from_objects(self):
return [self] + \
self.onclause._from_objects + \
self.left._from_objects + \
self.right._from_objects
class Alias(FromClause):
"""Represents an table or selectable alias (AS).
Represents an alias, as typically applied to any table or
sub-select within a SQL statement using the ``AS`` keyword (or
without the keyword on certain databases such as Oracle).
This object is constructed from the :func:`~.expression.alias` module
level function as well as the :meth:`.FromClause.alias` method available
on all :class:`.FromClause` subclasses.
"""
__visit_name__ = 'alias'
named_with_column = True
_is_from_container = True
def __init__(self, selectable, name=None):
baseselectable = selectable
while isinstance(baseselectable, Alias):
baseselectable = baseselectable.element
self.original = baseselectable
self.supports_execution = baseselectable.supports_execution
if self.supports_execution:
self._execution_options = baseselectable._execution_options
self.element = selectable
if name is None:
if self.original.named_with_column:
name = getattr(self.original, 'name', None)
name = _anonymous_label('%%(%d %s)s' % (id(self), name
or 'anon'))
self.name = name
def self_group(self, target=None):
if isinstance(target, CompoundSelect) and \
isinstance(self.original, Select) and \
self.original._needs_parens_for_grouping():
return FromGrouping(self)
return super(Alias, self).self_group(target)
@property
def description(self):
if util.py3k:
return self.name
else:
return self.name.encode('ascii', 'backslashreplace')
def as_scalar(self):
try:
return self.element.as_scalar()
except AttributeError:
raise AttributeError("Element %s does not support "
"'as_scalar()'" % self.element)
def is_derived_from(self, fromclause):
if fromclause in self._cloned_set:
return True
return self.element.is_derived_from(fromclause)
def _populate_column_collection(self):
for col in self.element.columns._all_columns:
col._make_proxy(self)
def _refresh_for_new_column(self, column):
col = self.element._refresh_for_new_column(column)
if col is not None:
if not self._cols_populated:
return None
else:
return col._make_proxy(self)
else:
return None
def _copy_internals(self, clone=_clone, **kw):
# don't apply anything to an aliased Table
# for now. May want to drive this from
# the given **kw.
if isinstance(self.element, TableClause):
return
self._reset_exported()
self.element = clone(self.element, **kw)
baseselectable = self.element
while isinstance(baseselectable, Alias):
baseselectable = baseselectable.element
self.original = baseselectable
def get_children(self, column_collections=True, **kw):
if column_collections:
for c in self.c:
yield c
yield self.element
@property
def _from_objects(self):
return [self]
@property
def bind(self):
return self.element.bind
class Lateral(Alias):
"""Represent a LATERAL subquery.
This object is constructed from the :func:`~.expression.lateral` module
level function as well as the :meth:`.FromClause.lateral` method available
on all :class:`.FromClause` subclasses.
While LATERAL is part of the SQL standard, curently only more recent
PostgreSQL versions provide support for this keyword.
.. versionadded:: 1.1
.. seealso::
:ref:`lateral_selects` - overview of usage.
"""
__visit_name__ = 'lateral'
class TableSample(Alias):
"""Represent a TABLESAMPLE clause.
This object is constructed from the :func:`~.expression.tablesample` module
level function as well as the :meth:`.FromClause.tablesample` method available
on all :class:`.FromClause` subclasses.
.. versionadded:: 1.1
.. seealso::
:func:`~.expression.tablesample`
"""
__visit_name__ = 'tablesample'
def __init__(self, selectable, sampling,
name=None,
seed=None):
self.sampling = sampling
self.seed = seed
super(TableSample, self).__init__(selectable, name=name)
@util.dependencies("sqlalchemy.sql.functions")
def _get_method(self, functions):
if isinstance(self.sampling, functions.Function):
return self.sampling
else:
return functions.func.system(self.sampling)
class CTE(Generative, HasSuffixes, Alias):
"""Represent a Common Table Expression.
The :class:`.CTE` object is obtained using the
:meth:`.SelectBase.cte` method from any selectable.
See that method for complete examples.
.. versionadded:: 0.7.6
"""
__visit_name__ = 'cte'
def __init__(self, selectable,
name=None,
recursive=False,
_cte_alias=None,
_restates=frozenset(),
_suffixes=None):
self.recursive = recursive
self._cte_alias = _cte_alias
self._restates = _restates
if _suffixes:
self._suffixes = _suffixes
super(CTE, self).__init__(selectable, name=name)
def _copy_internals(self, clone=_clone, **kw):
super(CTE, self)._copy_internals(clone, **kw)
if self._cte_alias is not None:
self._cte_alias = self
self._restates = frozenset([
clone(elem, **kw) for elem in self._restates
])
@util.dependencies("sqlalchemy.sql.dml")
def _populate_column_collection(self, dml):
if isinstance(self.element, dml.UpdateBase):
for col in self.element._returning:
col._make_proxy(self)
else:
for col in self.element.columns._all_columns:
col._make_proxy(self)
def alias(self, name=None, flat=False):
return CTE(
self.original,
name=name,
recursive=self.recursive,
_cte_alias=self,
_suffixes=self._suffixes
)
def union(self, other):
return CTE(
self.original.union(other),
name=self.name,
recursive=self.recursive,
_restates=self._restates.union([self]),
_suffixes=self._suffixes
)
def union_all(self, other):
return CTE(
self.original.union_all(other),
name=self.name,
recursive=self.recursive,
_restates=self._restates.union([self]),
_suffixes=self._suffixes
)
class HasCTE(object):
"""Mixin that declares a class to include CTE support.
.. versionadded:: 1.1
"""
def cte(self, name=None, recursive=False):
r"""Return a new :class:`.CTE`, or Common Table Expression instance.
Common table expressions are a SQL standard whereby SELECT
statements can draw upon secondary statements specified along
with the primary statement, using a clause called "WITH".
Special semantics regarding UNION can also be employed to
allow "recursive" queries, where a SELECT statement can draw
upon the set of rows that have previously been selected.
CTEs can also be applied to DML constructs UPDATE, INSERT
and DELETE on some databases, both as a source of CTE rows
when combined with RETURNING, as well as a consumer of
CTE rows.
SQLAlchemy detects :class:`.CTE` objects, which are treated
similarly to :class:`.Alias` objects, as special elements
to be delivered to the FROM clause of the statement as well
as to a WITH clause at the top of the statement.
.. versionchanged:: 1.1 Added support for UPDATE/INSERT/DELETE as
CTE, CTEs added to UPDATE/INSERT/DELETE.
:param name: name given to the common table expression. Like
:meth:`._FromClause.alias`, the name can be left as ``None``
in which case an anonymous symbol will be used at query
compile time.
:param recursive: if ``True``, will render ``WITH RECURSIVE``.
A recursive common table expression is intended to be used in
conjunction with UNION ALL in order to derive rows
from those already selected.
The following examples include two from PostgreSQL's documentation at
http://www.postgresql.org/docs/current/static/queries-with.html,
as well as additional examples.
Example 1, non recursive::
from sqlalchemy import (Table, Column, String, Integer,
MetaData, select, func)
metadata = MetaData()
orders = Table('orders', metadata,
Column('region', String),
Column('amount', Integer),
Column('product', String),
Column('quantity', Integer)
)
regional_sales = select([
orders.c.region,
func.sum(orders.c.amount).label('total_sales')
]).group_by(orders.c.region).cte("regional_sales")
top_regions = select([regional_sales.c.region]).\
where(
regional_sales.c.total_sales >
select([
func.sum(regional_sales.c.total_sales)/10
])
).cte("top_regions")
statement = select([
orders.c.region,
orders.c.product,
func.sum(orders.c.quantity).label("product_units"),
func.sum(orders.c.amount).label("product_sales")
]).where(orders.c.region.in_(
select([top_regions.c.region])
)).group_by(orders.c.region, orders.c.product)
result = conn.execute(statement).fetchall()
Example 2, WITH RECURSIVE::
from sqlalchemy import (Table, Column, String, Integer,
MetaData, select, func)
metadata = MetaData()
parts = Table('parts', metadata,
Column('part', String),
Column('sub_part', String),
Column('quantity', Integer),
)
included_parts = select([
parts.c.sub_part,
parts.c.part,
parts.c.quantity]).\
where(parts.c.part=='our part').\
cte(recursive=True)
incl_alias = included_parts.alias()
parts_alias = parts.alias()
included_parts = included_parts.union_all(
select([
parts_alias.c.sub_part,
parts_alias.c.part,
parts_alias.c.quantity
]).
where(parts_alias.c.part==incl_alias.c.sub_part)
)
statement = select([
included_parts.c.sub_part,
func.sum(included_parts.c.quantity).
label('total_quantity')
]).\
group_by(included_parts.c.sub_part)
result = conn.execute(statement).fetchall()
Example 3, an upsert using UPDATE and INSERT with CTEs::
from datetime import date
from sqlalchemy import (MetaData, Table, Column, Integer,
Date, select, literal, and_, exists)
metadata = MetaData()
visitors = Table('visitors', metadata,
Column('product_id', Integer, primary_key=True),
Column('date', Date, primary_key=True),
Column('count', Integer),
)
# add 5 visitors for the product_id == 1
product_id = 1
day = date.today()
count = 5
update_cte = (
visitors.update()
.where(and_(visitors.c.product_id == product_id,
visitors.c.date == day))
.values(count=visitors.c.count + count)
.returning(literal(1))
.cte('update_cte')
)
upsert = visitors.insert().from_select(
[visitors.c.product_id, visitors.c.date, visitors.c.count],
select([literal(product_id), literal(day), literal(count)])
.where(~exists(update_cte.select()))
)
connection.execute(upsert)
.. seealso::
:meth:`.orm.query.Query.cte` - ORM version of
:meth:`.HasCTE.cte`.
"""
return CTE(self, name=name, recursive=recursive)
class FromGrouping(FromClause):
"""Represent a grouping of a FROM clause"""
__visit_name__ = 'grouping'
def __init__(self, element):
self.element = element
def _init_collections(self):
pass
@property
def columns(self):
return self.element.columns
@property
def primary_key(self):
return self.element.primary_key
@property
def foreign_keys(self):
return self.element.foreign_keys
def is_derived_from(self, element):
return self.element.is_derived_from(element)
def alias(self, **kw):
return FromGrouping(self.element.alias(**kw))
@property
def _hide_froms(self):
return self.element._hide_froms
def get_children(self, **kwargs):
return self.element,
def _copy_internals(self, clone=_clone, **kw):
self.element = clone(self.element, **kw)
@property
def _from_objects(self):
return self.element._from_objects
def __getattr__(self, attr):
return getattr(self.element, attr)
def __getstate__(self):
return {'element': self.element}
def __setstate__(self, state):
self.element = state['element']
class TableClause(Immutable, FromClause):
"""Represents a minimal "table" construct.
This is a lightweight table object that has only a name and a
collection of columns, which are typically produced
by the :func:`.expression.column` function::
from sqlalchemy import table, column
user = table("user",
column("id"),
column("name"),
column("description"),
)
The :class:`.TableClause` construct serves as the base for
the more commonly used :class:`~.schema.Table` object, providing
the usual set of :class:`~.expression.FromClause` services including
the ``.c.`` collection and statement generation methods.
It does **not** provide all the additional schema-level services
of :class:`~.schema.Table`, including constraints, references to other
tables, or support for :class:`.MetaData`-level services. It's useful
on its own as an ad-hoc construct used to generate quick SQL
statements when a more fully fledged :class:`~.schema.Table`
is not on hand.
"""
__visit_name__ = 'table'
named_with_column = True
implicit_returning = False
""":class:`.TableClause` doesn't support having a primary key or column
-level defaults, so implicit returning doesn't apply."""
_autoincrement_column = None
"""No PK or default support so no autoincrement column."""
def __init__(self, name, *columns):
"""Produce a new :class:`.TableClause`.
The object returned is an instance of :class:`.TableClause`, which
represents the "syntactical" portion of the schema-level
:class:`~.schema.Table` object.
It may be used to construct lightweight table constructs.
.. versionchanged:: 1.0.0 :func:`.expression.table` can now
be imported from the plain ``sqlalchemy`` namespace like any
other SQL element.
:param name: Name of the table.
:param columns: A collection of :func:`.expression.column` constructs.
"""
super(TableClause, self).__init__()
self.name = self.fullname = name
self._columns = ColumnCollection()
self.primary_key = ColumnSet()
self.foreign_keys = set()
for c in columns:
self.append_column(c)
def _init_collections(self):
pass
@util.memoized_property
def description(self):
if util.py3k:
return self.name
else:
return self.name.encode('ascii', 'backslashreplace')
def append_column(self, c):
self._columns[c.key] = c
c.table = self
def get_children(self, column_collections=True, **kwargs):
if column_collections:
return [c for c in self.c]
else:
return []
@util.dependencies("sqlalchemy.sql.dml")
def insert(self, dml, values=None, inline=False, **kwargs):
"""Generate an :func:`.insert` construct against this
:class:`.TableClause`.
E.g.::
table.insert().values(name='foo')
See :func:`.insert` for argument and usage information.
"""
return dml.Insert(self, values=values, inline=inline, **kwargs)
@util.dependencies("sqlalchemy.sql.dml")
def update(
self, dml, whereclause=None, values=None, inline=False, **kwargs):
"""Generate an :func:`.update` construct against this
:class:`.TableClause`.
E.g.::
table.update().where(table.c.id==7).values(name='foo')
See :func:`.update` for argument and usage information.
"""
return dml.Update(self, whereclause=whereclause,
values=values, inline=inline, **kwargs)
@util.dependencies("sqlalchemy.sql.dml")
def delete(self, dml, whereclause=None, **kwargs):
"""Generate a :func:`.delete` construct against this
:class:`.TableClause`.
E.g.::
table.delete().where(table.c.id==7)
See :func:`.delete` for argument and usage information.
"""
return dml.Delete(self, whereclause, **kwargs)
@property
def _from_objects(self):
return [self]
class ForUpdateArg(ClauseElement):
@classmethod
def parse_legacy_select(self, arg):
"""Parse the for_update argument of :func:`.select`.
:param mode: Defines the lockmode to use.
``None`` - translates to no lockmode
``'update'`` - translates to ``FOR UPDATE``
(standard SQL, supported by most dialects)
``'nowait'`` - translates to ``FOR UPDATE NOWAIT``
(supported by Oracle, PostgreSQL 8.1 upwards)
``'read'`` - translates to ``LOCK IN SHARE MODE`` (for MySQL),
and ``FOR SHARE`` (for PostgreSQL)
``'read_nowait'`` - translates to ``FOR SHARE NOWAIT``
(supported by PostgreSQL). ``FOR SHARE`` and
``FOR SHARE NOWAIT`` (PostgreSQL).
"""
if arg in (None, False):
return None
nowait = read = False
if arg == 'nowait':
nowait = True
elif arg == 'read':
read = True
elif arg == 'read_nowait':
read = nowait = True
elif arg is not True:
raise exc.ArgumentError("Unknown for_update argument: %r" % arg)
return ForUpdateArg(read=read, nowait=nowait)
@property
def legacy_for_update_value(self):
if self.read and not self.nowait:
return "read"
elif self.read and self.nowait:
return "read_nowait"
elif self.nowait:
return "nowait"
else:
return True
def _copy_internals(self, clone=_clone, **kw):
if self.of is not None:
self.of = [clone(col, **kw) for col in self.of]
def __init__(
self, nowait=False, read=False, of=None,
skip_locked=False, key_share=False):
"""Represents arguments specified to :meth:`.Select.for_update`.
.. versionadded:: 0.9.0
"""
self.nowait = nowait
self.read = read
self.skip_locked = skip_locked
self.key_share = key_share
if of is not None:
self.of = [_interpret_as_column_or_from(elem)
for elem in util.to_list(of)]
else:
self.of = None
class SelectBase(HasCTE, Executable, FromClause):
"""Base class for SELECT statements.
This includes :class:`.Select`, :class:`.CompoundSelect` and
:class:`.TextAsFrom`.
"""
def as_scalar(self):
"""return a 'scalar' representation of this selectable, which can be
used as a column expression.
Typically, a select statement which has only one column in its columns
clause is eligible to be used as a scalar expression.
The returned object is an instance of
:class:`ScalarSelect`.
"""
return ScalarSelect(self)
def label(self, name):
"""return a 'scalar' representation of this selectable, embedded as a
subquery with a label.
.. seealso::
:meth:`~.SelectBase.as_scalar`.
"""
return self.as_scalar().label(name)
@_generative
@util.deprecated('0.6',
message="``autocommit()`` is deprecated. Use "
":meth:`.Executable.execution_options` with the "
"'autocommit' flag.")
def autocommit(self):
"""return a new selectable with the 'autocommit' flag set to
True.
"""
self._execution_options = \
self._execution_options.union({'autocommit': True})
def _generate(self):
"""Override the default _generate() method to also clear out
exported collections."""
s = self.__class__.__new__(self.__class__)
s.__dict__ = self.__dict__.copy()
s._reset_exported()
return s
@property
def _from_objects(self):
return [self]
class GenerativeSelect(SelectBase):
"""Base class for SELECT statements where additional elements can be
added.
This serves as the base for :class:`.Select` and :class:`.CompoundSelect`
where elements such as ORDER BY, GROUP BY can be added and column
rendering can be controlled. Compare to :class:`.TextAsFrom`, which,
while it subclasses :class:`.SelectBase` and is also a SELECT construct,
represents a fixed textual string which cannot be altered at this level,
only wrapped as a subquery.
.. versionadded:: 0.9.0 :class:`.GenerativeSelect` was added to
provide functionality specific to :class:`.Select` and
:class:`.CompoundSelect` while allowing :class:`.SelectBase` to be
used for other SELECT-like objects, e.g. :class:`.TextAsFrom`.
"""
_order_by_clause = ClauseList()
_group_by_clause = ClauseList()
_limit_clause = None
_offset_clause = None
_for_update_arg = None
def __init__(self,
use_labels=False,
for_update=False,
limit=None,
offset=None,
order_by=None,
group_by=None,
bind=None,
autocommit=None):
self.use_labels = use_labels
if for_update is not False:
self._for_update_arg = (ForUpdateArg.
parse_legacy_select(for_update))
if autocommit is not None:
util.warn_deprecated('autocommit on select() is '
'deprecated. Use .execution_options(a'
'utocommit=True)')
self._execution_options = \
self._execution_options.union(
{'autocommit': autocommit})
if limit is not None:
self._limit_clause = _offset_or_limit_clause(limit)
if offset is not None:
self._offset_clause = _offset_or_limit_clause(offset)
self._bind = bind
if order_by is not None:
self._order_by_clause = ClauseList(
*util.to_list(order_by),
_literal_as_text=_literal_and_labels_as_label_reference)
if group_by is not None:
self._group_by_clause = ClauseList(
*util.to_list(group_by),
_literal_as_text=_literal_as_label_reference)
@property
def for_update(self):
"""Provide legacy dialect support for the ``for_update`` attribute.
"""
if self._for_update_arg is not None:
return self._for_update_arg.legacy_for_update_value
else:
return None
@for_update.setter
def for_update(self, value):
self._for_update_arg = ForUpdateArg.parse_legacy_select(value)
@_generative
def with_for_update(self, nowait=False, read=False, of=None,
skip_locked=False, key_share=False):
"""Specify a ``FOR UPDATE`` clause for this :class:`.GenerativeSelect`.
E.g.::
stmt = select([table]).with_for_update(nowait=True)
On a database like PostgreSQL or Oracle, the above would render a
statement like::
SELECT table.a, table.b FROM table FOR UPDATE NOWAIT
on other backends, the ``nowait`` option is ignored and instead
would produce::
SELECT table.a, table.b FROM table FOR UPDATE
When called with no arguments, the statement will render with
the suffix ``FOR UPDATE``. Additional arguments can then be
provided which allow for common database-specific
variants.
:param nowait: boolean; will render ``FOR UPDATE NOWAIT`` on Oracle
and PostgreSQL dialects.
:param read: boolean; will render ``LOCK IN SHARE MODE`` on MySQL,
``FOR SHARE`` on PostgreSQL. On PostgreSQL, when combined with
``nowait``, will render ``FOR SHARE NOWAIT``.
:param of: SQL expression or list of SQL expression elements
(typically :class:`.Column` objects or a compatible expression) which
will render into a ``FOR UPDATE OF`` clause; supported by PostgreSQL
and Oracle. May render as a table or as a column depending on
backend.
:param skip_locked: boolean, will render ``FOR UPDATE SKIP LOCKED``
on Oracle and PostgreSQL dialects or ``FOR SHARE SKIP LOCKED`` if
``read=True`` is also specified.
.. versionadded:: 1.1.0
:param key_share: boolean, will render ``FOR NO KEY UPDATE``,
or if combined with ``read=True`` will render ``FOR KEY SHARE``,
on the PostgreSQL dialect.
.. versionadded:: 1.1.0
"""
self._for_update_arg = ForUpdateArg(nowait=nowait, read=read, of=of,
skip_locked=skip_locked,
key_share=key_share)
@_generative
def apply_labels(self):
"""return a new selectable with the 'use_labels' flag set to True.
This will result in column expressions being generated using labels
against their table name, such as "SELECT somecolumn AS
tablename_somecolumn". This allows selectables which contain multiple
FROM clauses to produce a unique set of column names regardless of
name conflicts among the individual FROM clauses.
"""
self.use_labels = True
@property
def _limit(self):
"""Get an integer value for the limit. This should only be used
by code that cannot support a limit as a BindParameter or
other custom clause as it will throw an exception if the limit
isn't currently set to an integer.
"""
return _offset_or_limit_clause_asint(self._limit_clause, "limit")
@property
def _simple_int_limit(self):
"""True if the LIMIT clause is a simple integer, False
if it is not present or is a SQL expression.
"""
return isinstance(self._limit_clause, _OffsetLimitParam)
@property
def _simple_int_offset(self):
"""True if the OFFSET clause is a simple integer, False
if it is not present or is a SQL expression.
"""
return isinstance(self._offset_clause, _OffsetLimitParam)
@property
def _offset(self):
"""Get an integer value for the offset. This should only be used
by code that cannot support an offset as a BindParameter or
other custom clause as it will throw an exception if the
offset isn't currently set to an integer.
"""
return _offset_or_limit_clause_asint(self._offset_clause, "offset")
@_generative
def limit(self, limit):
"""return a new selectable with the given LIMIT criterion
applied.
This is a numerical value which usually renders as a ``LIMIT``
expression in the resulting select. Backends that don't
support ``LIMIT`` will attempt to provide similar
functionality.
.. versionchanged:: 1.0.0 - :meth:`.Select.limit` can now
accept arbitrary SQL expressions as well as integer values.
:param limit: an integer LIMIT parameter, or a SQL expression
that provides an integer result.
"""
self._limit_clause = _offset_or_limit_clause(limit)
@_generative
def offset(self, offset):
"""return a new selectable with the given OFFSET criterion
applied.
This is a numeric value which usually renders as an ``OFFSET``
expression in the resulting select. Backends that don't
support ``OFFSET`` will attempt to provide similar
functionality.
.. versionchanged:: 1.0.0 - :meth:`.Select.offset` can now
accept arbitrary SQL expressions as well as integer values.
:param offset: an integer OFFSET parameter, or a SQL expression
that provides an integer result.
"""
self._offset_clause = _offset_or_limit_clause(offset)
@_generative
def order_by(self, *clauses):
"""return a new selectable with the given list of ORDER BY
criterion applied.
The criterion will be appended to any pre-existing ORDER BY
criterion.
"""
self.append_order_by(*clauses)
@_generative
def group_by(self, *clauses):
"""return a new selectable with the given list of GROUP BY
criterion applied.
The criterion will be appended to any pre-existing GROUP BY
criterion.
"""
self.append_group_by(*clauses)
def append_order_by(self, *clauses):
"""Append the given ORDER BY criterion applied to this selectable.
The criterion will be appended to any pre-existing ORDER BY criterion.
This is an **in-place** mutation method; the
:meth:`~.GenerativeSelect.order_by` method is preferred, as it
provides standard :term:`method chaining`.
"""
if len(clauses) == 1 and clauses[0] is None:
self._order_by_clause = ClauseList()
else:
if getattr(self, '_order_by_clause', None) is not None:
clauses = list(self._order_by_clause) + list(clauses)
self._order_by_clause = ClauseList(
*clauses,
_literal_as_text=_literal_and_labels_as_label_reference)
def append_group_by(self, *clauses):
"""Append the given GROUP BY criterion applied to this selectable.
The criterion will be appended to any pre-existing GROUP BY criterion.
This is an **in-place** mutation method; the
:meth:`~.GenerativeSelect.group_by` method is preferred, as it
provides standard :term:`method chaining`.
"""
if len(clauses) == 1 and clauses[0] is None:
self._group_by_clause = ClauseList()
else:
if getattr(self, '_group_by_clause', None) is not None:
clauses = list(self._group_by_clause) + list(clauses)
self._group_by_clause = ClauseList(
*clauses, _literal_as_text=_literal_as_label_reference)
@property
def _label_resolve_dict(self):
raise NotImplementedError()
def _copy_internals(self, clone=_clone, **kw):
if self._limit_clause is not None:
self._limit_clause = clone(self._limit_clause, **kw)
if self._offset_clause is not None:
self._offset_clause = clone(self._offset_clause, **kw)
class CompoundSelect(GenerativeSelect):
"""Forms the basis of ``UNION``, ``UNION ALL``, and other
SELECT-based set operations.
.. seealso::
:func:`.union`
:func:`.union_all`
:func:`.intersect`
:func:`.intersect_all`
:func:`.except`
:func:`.except_all`
"""
__visit_name__ = 'compound_select'
UNION = util.symbol('UNION')
UNION_ALL = util.symbol('UNION ALL')
EXCEPT = util.symbol('EXCEPT')
EXCEPT_ALL = util.symbol('EXCEPT ALL')
INTERSECT = util.symbol('INTERSECT')
INTERSECT_ALL = util.symbol('INTERSECT ALL')
_is_from_container = True
def __init__(self, keyword, *selects, **kwargs):
self._auto_correlate = kwargs.pop('correlate', False)
self.keyword = keyword
self.selects = []
numcols = None
# some DBs do not like ORDER BY in the inner queries of a UNION, etc.
for n, s in enumerate(selects):
s = _clause_element_as_expr(s)
if not numcols:
numcols = len(s.c._all_columns)
elif len(s.c._all_columns) != numcols:
raise exc.ArgumentError(
'All selectables passed to '
'CompoundSelect must have identical numbers of '
'columns; select #%d has %d columns, select '
'#%d has %d' %
(1, len(self.selects[0].c._all_columns),
n + 1, len(s.c._all_columns))
)
self.selects.append(s.self_group(self))
GenerativeSelect.__init__(self, **kwargs)
@property
def _label_resolve_dict(self):
d = dict(
(c.key, c) for c in self.c
)
return d, d, d
@classmethod
def _create_union(cls, *selects, **kwargs):
r"""Return a ``UNION`` of multiple selectables.
The returned object is an instance of
:class:`.CompoundSelect`.
A similar :func:`union()` method is available on all
:class:`.FromClause` subclasses.
\*selects
a list of :class:`.Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.UNION, *selects, **kwargs)
@classmethod
def _create_union_all(cls, *selects, **kwargs):
r"""Return a ``UNION ALL`` of multiple selectables.
The returned object is an instance of
:class:`.CompoundSelect`.
A similar :func:`union_all()` method is available on all
:class:`.FromClause` subclasses.
\*selects
a list of :class:`.Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.UNION_ALL, *selects, **kwargs)
@classmethod
def _create_except(cls, *selects, **kwargs):
r"""Return an ``EXCEPT`` of multiple selectables.
The returned object is an instance of
:class:`.CompoundSelect`.
\*selects
a list of :class:`.Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.EXCEPT, *selects, **kwargs)
@classmethod
def _create_except_all(cls, *selects, **kwargs):
r"""Return an ``EXCEPT ALL`` of multiple selectables.
The returned object is an instance of
:class:`.CompoundSelect`.
\*selects
a list of :class:`.Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.EXCEPT_ALL, *selects, **kwargs)
@classmethod
def _create_intersect(cls, *selects, **kwargs):
r"""Return an ``INTERSECT`` of multiple selectables.
The returned object is an instance of
:class:`.CompoundSelect`.
\*selects
a list of :class:`.Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(CompoundSelect.INTERSECT, *selects, **kwargs)
@classmethod
def _create_intersect_all(cls, *selects, **kwargs):
r"""Return an ``INTERSECT ALL`` of multiple selectables.
The returned object is an instance of
:class:`.CompoundSelect`.
\*selects
a list of :class:`.Select` instances.
\**kwargs
available keyword arguments are the same as those of
:func:`select`.
"""
return CompoundSelect(
CompoundSelect.INTERSECT_ALL, *selects, **kwargs)
def _scalar_type(self):
return self.selects[0]._scalar_type()
def self_group(self, against=None):
return FromGrouping(self)
def is_derived_from(self, fromclause):
for s in self.selects:
if s.is_derived_from(fromclause):
return True
return False
def _populate_column_collection(self):
for cols in zip(*[s.c._all_columns for s in self.selects]):
# this is a slightly hacky thing - the union exports a
# column that resembles just that of the *first* selectable.
# to get at a "composite" column, particularly foreign keys,
# you have to dig through the proxies collection which we
# generate below. We may want to improve upon this, such as
# perhaps _make_proxy can accept a list of other columns
# that are "shared" - schema.column can then copy all the
# ForeignKeys in. this would allow the union() to have all
# those fks too.
proxy = cols[0]._make_proxy(
self, name=cols[0]._label if self.use_labels else None,
key=cols[0]._key_label if self.use_labels else None)
# hand-construct the "_proxies" collection to include all
# derived columns place a 'weight' annotation corresponding
# to how low in the list of select()s the column occurs, so
# that the corresponding_column() operation can resolve
# conflicts
proxy._proxies = [
c._annotate({'weight': i + 1}) for (i, c) in enumerate(cols)]
def _refresh_for_new_column(self, column):
for s in self.selects:
s._refresh_for_new_column(column)
if not self._cols_populated:
return None
raise NotImplementedError("CompoundSelect constructs don't support "
"addition of columns to underlying "
"selectables")
def _copy_internals(self, clone=_clone, **kw):
super(CompoundSelect, self)._copy_internals(clone, **kw)
self._reset_exported()
self.selects = [clone(s, **kw) for s in self.selects]
if hasattr(self, '_col_map'):
del self._col_map
for attr in (
'_order_by_clause', '_group_by_clause', '_for_update_arg'):
if getattr(self, attr) is not None:
setattr(self, attr, clone(getattr(self, attr), **kw))
def get_children(self, column_collections=True, **kwargs):
return (column_collections and list(self.c) or []) \
+ [self._order_by_clause, self._group_by_clause] \
+ list(self.selects)
def bind(self):
if self._bind:
return self._bind
for s in self.selects:
e = s.bind
if e:
return e
else:
return None
def _set_bind(self, bind):
self._bind = bind
bind = property(bind, _set_bind)
class Select(HasPrefixes, HasSuffixes, GenerativeSelect):
"""Represents a ``SELECT`` statement.
"""
__visit_name__ = 'select'
_prefixes = ()
_suffixes = ()
_hints = util.immutabledict()
_statement_hints = ()
_distinct = False
_from_cloned = None
_correlate = ()
_correlate_except = None
_memoized_property = SelectBase._memoized_property
_is_select = True
def __init__(self,
columns=None,
whereclause=None,
from_obj=None,
distinct=False,
having=None,
correlate=True,
prefixes=None,
suffixes=None,
**kwargs):
"""Construct a new :class:`.Select`.
Similar functionality is also available via the
:meth:`.FromClause.select` method on any :class:`.FromClause`.
All arguments which accept :class:`.ClauseElement` arguments also
accept string arguments, which will be converted as appropriate into
either :func:`text()` or :func:`literal_column()` constructs.
.. seealso::
:ref:`coretutorial_selecting` - Core Tutorial description of
:func:`.select`.
:param columns:
A list of :class:`.ColumnElement` or :class:`.FromClause`
objects which will form the columns clause of the resulting
statement. For those objects that are instances of
:class:`.FromClause` (typically :class:`.Table` or :class:`.Alias`
objects), the :attr:`.FromClause.c` collection is extracted
to form a collection of :class:`.ColumnElement` objects.
This parameter will also accept :class:`.Text` constructs as
given, as well as ORM-mapped classes.
.. note::
The :paramref:`.select.columns` parameter is not available
in the method form of :func:`.select`, e.g.
:meth:`.FromClause.select`.
.. seealso::
:meth:`.Select.column`
:meth:`.Select.with_only_columns`
:param whereclause:
A :class:`.ClauseElement` expression which will be used to form the
``WHERE`` clause. It is typically preferable to add WHERE
criterion to an existing :class:`.Select` using method chaining
with :meth:`.Select.where`.
.. seealso::
:meth:`.Select.where`
:param from_obj:
A list of :class:`.ClauseElement` objects which will be added to the
``FROM`` clause of the resulting statement. This is equivalent
to calling :meth:`.Select.select_from` using method chaining on
an existing :class:`.Select` object.
.. seealso::
:meth:`.Select.select_from` - full description of explicit
FROM clause specification.
:param autocommit:
Deprecated. Use ``.execution_options(autocommit=<True|False>)``
to set the autocommit option.
.. seealso::
:meth:`.Executable.execution_options`
:param bind=None:
an :class:`~.Engine` or :class:`~.Connection` instance
to which the
resulting :class:`.Select` object will be bound. The
:class:`.Select` object will otherwise automatically bind to
whatever :class:`~.base.Connectable` instances can be located within
its contained :class:`.ClauseElement` members.
:param correlate=True:
indicates that this :class:`.Select` object should have its
contained :class:`.FromClause` elements "correlated" to an enclosing
:class:`.Select` object. It is typically preferable to specify
correlations on an existing :class:`.Select` construct using
:meth:`.Select.correlate`.
.. seealso::
:meth:`.Select.correlate` - full description of correlation.
:param distinct=False:
when ``True``, applies a ``DISTINCT`` qualifier to the columns
clause of the resulting statement.
The boolean argument may also be a column expression or list
of column expressions - this is a special calling form which
is understood by the PostgreSQL dialect to render the
``DISTINCT ON (<columns>)`` syntax.
``distinct`` is also available on an existing :class:`.Select`
object via the :meth:`~.Select.distinct` method.
.. seealso::
:meth:`.Select.distinct`
:param for_update=False:
when ``True``, applies ``FOR UPDATE`` to the end of the
resulting statement.
.. deprecated:: 0.9.0 - use
:meth:`.Select.with_for_update` to specify the
structure of the ``FOR UPDATE`` clause.
``for_update`` accepts various string values interpreted by
specific backends, including:
* ``"read"`` - on MySQL, translates to ``LOCK IN SHARE MODE``;
on PostgreSQL, translates to ``FOR SHARE``.
* ``"nowait"`` - on PostgreSQL and Oracle, translates to
``FOR UPDATE NOWAIT``.
* ``"read_nowait"`` - on PostgreSQL, translates to
``FOR SHARE NOWAIT``.
.. seealso::
:meth:`.Select.with_for_update` - improved API for
specifying the ``FOR UPDATE`` clause.
:param group_by:
a list of :class:`.ClauseElement` objects which will comprise the
``GROUP BY`` clause of the resulting select. This parameter
is typically specified more naturally using the
:meth:`.Select.group_by` method on an existing :class:`.Select`.
.. seealso::
:meth:`.Select.group_by`
:param having:
a :class:`.ClauseElement` that will comprise the ``HAVING`` clause
of the resulting select when ``GROUP BY`` is used. This parameter
is typically specified more naturally using the
:meth:`.Select.having` method on an existing :class:`.Select`.
.. seealso::
:meth:`.Select.having`
:param limit=None:
a numerical value which usually renders as a ``LIMIT``
expression in the resulting select. Backends that don't
support ``LIMIT`` will attempt to provide similar
functionality. This parameter is typically specified more naturally
using the :meth:`.Select.limit` method on an existing
:class:`.Select`.
.. seealso::
:meth:`.Select.limit`
:param offset=None:
a numeric value which usually renders as an ``OFFSET``
expression in the resulting select. Backends that don't
support ``OFFSET`` will attempt to provide similar
functionality. This parameter is typically specified more naturally
using the :meth:`.Select.offset` method on an existing
:class:`.Select`.
.. seealso::
:meth:`.Select.offset`
:param order_by:
a scalar or list of :class:`.ClauseElement` objects which will
comprise the ``ORDER BY`` clause of the resulting select.
This parameter is typically specified more naturally using the
:meth:`.Select.order_by` method on an existing :class:`.Select`.
.. seealso::
:meth:`.Select.order_by`
:param use_labels=False:
when ``True``, the statement will be generated using labels
for each column in the columns clause, which qualify each
column with its parent table's (or aliases) name so that name
conflicts between columns in different tables don't occur.
The format of the label is <tablename>_<column>. The "c"
collection of the resulting :class:`.Select` object will use these
names as well for targeting column members.
This parameter can also be specified on an existing
:class:`.Select` object using the :meth:`.Select.apply_labels`
method.
.. seealso::
:meth:`.Select.apply_labels`
"""
self._auto_correlate = correlate
if distinct is not False:
if distinct is True:
self._distinct = True
else:
self._distinct = [
_literal_as_text(e)
for e in util.to_list(distinct)
]
if from_obj is not None:
self._from_obj = util.OrderedSet(
_interpret_as_from(f)
for f in util.to_list(from_obj))
else:
self._from_obj = util.OrderedSet()
try:
cols_present = bool(columns)
except TypeError:
raise exc.ArgumentError("columns argument to select() must "
"be a Python list or other iterable")
if cols_present:
self._raw_columns = []
for c in columns:
c = _interpret_as_column_or_from(c)
if isinstance(c, ScalarSelect):
c = c.self_group(against=operators.comma_op)
self._raw_columns.append(c)
else:
self._raw_columns = []
if whereclause is not None:
self._whereclause = _literal_as_text(
whereclause).self_group(against=operators._asbool)
else:
self._whereclause = None
if having is not None:
self._having = _literal_as_text(
having).self_group(against=operators._asbool)
else:
self._having = None
if prefixes:
self._setup_prefixes(prefixes)
if suffixes:
self._setup_suffixes(suffixes)
GenerativeSelect.__init__(self, **kwargs)
@property
def _froms(self):
# would love to cache this,
# but there's just enough edge cases, particularly now that
# declarative encourages construction of SQL expressions
# without tables present, to just regen this each time.
froms = []
seen = set()
translate = self._from_cloned
for item in itertools.chain(
_from_objects(*self._raw_columns),
_from_objects(self._whereclause)
if self._whereclause is not None else (),
self._from_obj
):
if item is self:
raise exc.InvalidRequestError(
"select() construct refers to itself as a FROM")
if translate and item in translate:
item = translate[item]
if not seen.intersection(item._cloned_set):
froms.append(item)
seen.update(item._cloned_set)
return froms
def _get_display_froms(self, explicit_correlate_froms=None,
implicit_correlate_froms=None):
"""Return the full list of 'from' clauses to be displayed.
Takes into account a set of existing froms which may be
rendered in the FROM clause of enclosing selects; this Select
may want to leave those absent if it is automatically
correlating.
"""
froms = self._froms
toremove = set(itertools.chain(*[
_expand_cloned(f._hide_froms)
for f in froms]))
if toremove:
# if we're maintaining clones of froms,
# add the copies out to the toremove list. only include
# clones that are lexical equivalents.
if self._from_cloned:
toremove.update(
self._from_cloned[f] for f in
toremove.intersection(self._from_cloned)
if self._from_cloned[f]._is_lexical_equivalent(f)
)
# filter out to FROM clauses not in the list,
# using a list to maintain ordering
froms = [f for f in froms if f not in toremove]
if self._correlate:
to_correlate = self._correlate
if to_correlate:
froms = [
f for f in froms if f not in
_cloned_intersection(
_cloned_intersection(
froms, explicit_correlate_froms or ()),
to_correlate
)
]
if self._correlate_except is not None:
froms = [
f for f in froms if f not in
_cloned_difference(
_cloned_intersection(
froms, explicit_correlate_froms or ()),
self._correlate_except
)
]
if self._auto_correlate and \
implicit_correlate_froms and \
len(froms) > 1:
froms = [
f for f in froms if f not in
_cloned_intersection(froms, implicit_correlate_froms)
]
if not len(froms):
raise exc.InvalidRequestError("Select statement '%s"
"' returned no FROM clauses "
"due to auto-correlation; "
"specify correlate(<tables>) "
"to control correlation "
"manually." % self)
return froms
def _scalar_type(self):
elem = self._raw_columns[0]
cols = list(elem._select_iterable)
return cols[0].type
@property
def froms(self):
"""Return the displayed list of FromClause elements."""
return self._get_display_froms()
def with_statement_hint(self, text, dialect_name='*'):
"""add a statement hint to this :class:`.Select`.
This method is similar to :meth:`.Select.with_hint` except that
it does not require an individual table, and instead applies to the
statement as a whole.
Hints here are specific to the backend database and may include
directives such as isolation levels, file directives, fetch directives,
etc.
.. versionadded:: 1.0.0
.. seealso::
:meth:`.Select.with_hint`
"""
return self.with_hint(None, text, dialect_name)
@_generative
def with_hint(self, selectable, text, dialect_name='*'):
r"""Add an indexing or other executional context hint for the given
selectable to this :class:`.Select`.
The text of the hint is rendered in the appropriate
location for the database backend in use, relative
to the given :class:`.Table` or :class:`.Alias` passed as the
``selectable`` argument. The dialect implementation
typically uses Python string substitution syntax
with the token ``%(name)s`` to render the name of
the table or alias. E.g. when using Oracle, the
following::
select([mytable]).\
with_hint(mytable, "index(%(name)s ix_mytable)")
Would render SQL as::
select /*+ index(mytable ix_mytable) */ ... from mytable
The ``dialect_name`` option will limit the rendering of a particular
hint to a particular backend. Such as, to add hints for both Oracle
and Sybase simultaneously::
select([mytable]).\
with_hint(mytable, "index(%(name)s ix_mytable)", 'oracle').\
with_hint(mytable, "WITH INDEX ix_mytable", 'sybase')
.. seealso::
:meth:`.Select.with_statement_hint`
"""
if selectable is None:
self._statement_hints += ((dialect_name, text), )
else:
self._hints = self._hints.union(
{(selectable, dialect_name): text})
@property
def type(self):
raise exc.InvalidRequestError("Select objects don't have a type. "
"Call as_scalar() on this Select "
"object to return a 'scalar' version "
"of this Select.")
@_memoized_property.method
def locate_all_froms(self):
"""return a Set of all FromClause elements referenced by this Select.
This set is a superset of that returned by the ``froms`` property,
which is specifically for those FromClause elements that would
actually be rendered.
"""
froms = self._froms
return froms + list(_from_objects(*froms))
@property
def inner_columns(self):
"""an iterator of all ColumnElement expressions which would
be rendered into the columns clause of the resulting SELECT statement.
"""
return _select_iterables(self._raw_columns)
@_memoized_property
def _label_resolve_dict(self):
with_cols = dict(
(c._resolve_label or c._label or c.key, c)
for c in _select_iterables(self._raw_columns)
if c._allow_label_resolve)
only_froms = dict(
(c.key, c) for c in
_select_iterables(self.froms) if c._allow_label_resolve)
only_cols = with_cols.copy()
for key, value in only_froms.items():
with_cols.setdefault(key, value)
return with_cols, only_froms, only_cols
def is_derived_from(self, fromclause):
if self in fromclause._cloned_set:
return True
for f in self.locate_all_froms():
if f.is_derived_from(fromclause):
return True
return False
def _copy_internals(self, clone=_clone, **kw):
super(Select, self)._copy_internals(clone, **kw)
# Select() object has been cloned and probably adapted by the
# given clone function. Apply the cloning function to internal
# objects
# 1. keep a dictionary of the froms we've cloned, and what
# they've become. This is consulted later when we derive
# additional froms from "whereclause" and the columns clause,
# which may still reference the uncloned parent table.
# as of 0.7.4 we also put the current version of _froms, which
# gets cleared on each generation. previously we were "baking"
# _froms into self._from_obj.
self._from_cloned = from_cloned = dict(
(f, clone(f, **kw)) for f in self._from_obj.union(self._froms))
# 3. update persistent _from_obj with the cloned versions.
self._from_obj = util.OrderedSet(from_cloned[f] for f in
self._from_obj)
# the _correlate collection is done separately, what can happen
# here is the same item is _correlate as in _from_obj but the
# _correlate version has an annotation on it - (specifically
# RelationshipProperty.Comparator._criterion_exists() does
# this). Also keep _correlate liberally open with its previous
# contents, as this set is used for matching, not rendering.
self._correlate = set(clone(f) for f in
self._correlate).union(self._correlate)
# 4. clone other things. The difficulty here is that Column
# objects are not actually cloned, and refer to their original
# .table, resulting in the wrong "from" parent after a clone
# operation. Hence _from_cloned and _from_obj supersede what is
# present here.
self._raw_columns = [clone(c, **kw) for c in self._raw_columns]
for attr in '_whereclause', '_having', '_order_by_clause', \
'_group_by_clause', '_for_update_arg':
if getattr(self, attr) is not None:
setattr(self, attr, clone(getattr(self, attr), **kw))
# erase exported column list, _froms collection,
# etc.
self._reset_exported()
def get_children(self, column_collections=True, **kwargs):
"""return child elements as per the ClauseElement specification."""
return (column_collections and list(self.columns) or []) + \
self._raw_columns + list(self._froms) + \
[x for x in
(self._whereclause, self._having,
self._order_by_clause, self._group_by_clause)
if x is not None]
@_generative
def column(self, column):
"""return a new select() construct with the given column expression
added to its columns clause.
E.g.::
my_select = my_select.column(table.c.new_column)
See the documentation for :meth:`.Select.with_only_columns`
for guidelines on adding /replacing the columns of a
:class:`.Select` object.
"""
self.append_column(column)
@util.dependencies("sqlalchemy.sql.util")
def reduce_columns(self, sqlutil, only_synonyms=True):
"""Return a new :func`.select` construct with redundantly
named, equivalently-valued columns removed from the columns clause.
"Redundant" here means two columns where one refers to the
other either based on foreign key, or via a simple equality
comparison in the WHERE clause of the statement. The primary purpose
of this method is to automatically construct a select statement
with all uniquely-named columns, without the need to use
table-qualified labels as :meth:`.apply_labels` does.
When columns are omitted based on foreign key, the referred-to
column is the one that's kept. When columns are omitted based on
WHERE eqivalence, the first column in the columns clause is the
one that's kept.
:param only_synonyms: when True, limit the removal of columns
to those which have the same name as the equivalent. Otherwise,
all columns that are equivalent to another are removed.
.. versionadded:: 0.8
"""
return self.with_only_columns(
sqlutil.reduce_columns(
self.inner_columns,
only_synonyms=only_synonyms,
*(self._whereclause, ) + tuple(self._from_obj)
)
)
@_generative
def with_only_columns(self, columns):
r"""Return a new :func:`.select` construct with its columns
clause replaced with the given columns.
This method is exactly equivalent to as if the original
:func:`.select` had been called with the given columns
clause. I.e. a statement::
s = select([table1.c.a, table1.c.b])
s = s.with_only_columns([table1.c.b])
should be exactly equivalent to::
s = select([table1.c.b])
This means that FROM clauses which are only derived
from the column list will be discarded if the new column
list no longer contains that FROM::
>>> table1 = table('t1', column('a'), column('b'))
>>> table2 = table('t2', column('a'), column('b'))
>>> s1 = select([table1.c.a, table2.c.b])
>>> print s1
SELECT t1.a, t2.b FROM t1, t2
>>> s2 = s1.with_only_columns([table2.c.b])
>>> print s2
SELECT t2.b FROM t1
The preferred way to maintain a specific FROM clause
in the construct, assuming it won't be represented anywhere
else (i.e. not in the WHERE clause, etc.) is to set it using
:meth:`.Select.select_from`::
>>> s1 = select([table1.c.a, table2.c.b]).\
... select_from(table1.join(table2,
... table1.c.a==table2.c.a))
>>> s2 = s1.with_only_columns([table2.c.b])
>>> print s2
SELECT t2.b FROM t1 JOIN t2 ON t1.a=t2.a
Care should also be taken to use the correct
set of column objects passed to :meth:`.Select.with_only_columns`.
Since the method is essentially equivalent to calling the
:func:`.select` construct in the first place with the given
columns, the columns passed to :meth:`.Select.with_only_columns`
should usually be a subset of those which were passed
to the :func:`.select` construct, not those which are available
from the ``.c`` collection of that :func:`.select`. That
is::
s = select([table1.c.a, table1.c.b]).select_from(table1)
s = s.with_only_columns([table1.c.b])
and **not**::
# usually incorrect
s = s.with_only_columns([s.c.b])
The latter would produce the SQL::
SELECT b
FROM (SELECT t1.a AS a, t1.b AS b
FROM t1), t1
Since the :func:`.select` construct is essentially being
asked to select both from ``table1`` as well as itself.
"""
self._reset_exported()
rc = []
for c in columns:
c = _interpret_as_column_or_from(c)
if isinstance(c, ScalarSelect):
c = c.self_group(against=operators.comma_op)
rc.append(c)
self._raw_columns = rc
@_generative
def where(self, whereclause):
"""return a new select() construct with the given expression added to
its WHERE clause, joined to the existing clause via AND, if any.
"""
self.append_whereclause(whereclause)
@_generative
def having(self, having):
"""return a new select() construct with the given expression added to
its HAVING clause, joined to the existing clause via AND, if any.
"""
self.append_having(having)
@_generative
def distinct(self, *expr):
r"""Return a new select() construct which will apply DISTINCT to its
columns clause.
:param \*expr: optional column expressions. When present,
the PostgreSQL dialect will render a ``DISTINCT ON (<expressions>>)``
construct.
"""
if expr:
expr = [_literal_as_label_reference(e) for e in expr]
if isinstance(self._distinct, list):
self._distinct = self._distinct + expr
else:
self._distinct = expr
else:
self._distinct = True
@_generative
def select_from(self, fromclause):
r"""return a new :func:`.select` construct with the
given FROM expression
merged into its list of FROM objects.
E.g.::
table1 = table('t1', column('a'))
table2 = table('t2', column('b'))
s = select([table1.c.a]).\
select_from(
table1.join(table2, table1.c.a==table2.c.b)
)
The "from" list is a unique set on the identity of each element,
so adding an already present :class:`.Table` or other selectable
will have no effect. Passing a :class:`.Join` that refers
to an already present :class:`.Table` or other selectable will have
the effect of concealing the presence of that selectable as
an individual element in the rendered FROM list, instead
rendering it into a JOIN clause.
While the typical purpose of :meth:`.Select.select_from` is to
replace the default, derived FROM clause with a join, it can
also be called with individual table elements, multiple times
if desired, in the case that the FROM clause cannot be fully
derived from the columns clause::
select([func.count('*')]).select_from(table1)
"""
self.append_from(fromclause)
@_generative
def correlate(self, *fromclauses):
r"""return a new :class:`.Select` which will correlate the given FROM
clauses to that of an enclosing :class:`.Select`.
Calling this method turns off the :class:`.Select` object's
default behavior of "auto-correlation". Normally, FROM elements
which appear in a :class:`.Select` that encloses this one via
its :term:`WHERE clause`, ORDER BY, HAVING or
:term:`columns clause` will be omitted from this :class:`.Select`
object's :term:`FROM clause`.
Setting an explicit correlation collection using the
:meth:`.Select.correlate` method provides a fixed list of FROM objects
that can potentially take place in this process.
When :meth:`.Select.correlate` is used to apply specific FROM clauses
for correlation, the FROM elements become candidates for
correlation regardless of how deeply nested this :class:`.Select`
object is, relative to an enclosing :class:`.Select` which refers to
the same FROM object. This is in contrast to the behavior of
"auto-correlation" which only correlates to an immediate enclosing
:class:`.Select`. Multi-level correlation ensures that the link
between enclosed and enclosing :class:`.Select` is always via
at least one WHERE/ORDER BY/HAVING/columns clause in order for
correlation to take place.
If ``None`` is passed, the :class:`.Select` object will correlate
none of its FROM entries, and all will render unconditionally
in the local FROM clause.
:param \*fromclauses: a list of one or more :class:`.FromClause`
constructs, or other compatible constructs (i.e. ORM-mapped
classes) to become part of the correlate collection.
.. versionchanged:: 0.8.0 ORM-mapped classes are accepted by
:meth:`.Select.correlate`.
.. versionchanged:: 0.8.0 The :meth:`.Select.correlate` method no
longer unconditionally removes entries from the FROM clause;
instead, the candidate FROM entries must also be matched by a FROM
entry located in an enclosing :class:`.Select`, which ultimately
encloses this one as present in the WHERE clause, ORDER BY clause,
HAVING clause, or columns clause of an enclosing :meth:`.Select`.
.. versionchanged:: 0.8.2 explicit correlation takes place
via any level of nesting of :class:`.Select` objects; in previous
0.8 versions, correlation would only occur relative to the
immediate enclosing :class:`.Select` construct.
.. seealso::
:meth:`.Select.correlate_except`
:ref:`correlated_subqueries`
"""
self._auto_correlate = False
if fromclauses and fromclauses[0] is None:
self._correlate = ()
else:
self._correlate = set(self._correlate).union(
_interpret_as_from(f) for f in fromclauses)
@_generative
def correlate_except(self, *fromclauses):
r"""return a new :class:`.Select` which will omit the given FROM
clauses from the auto-correlation process.
Calling :meth:`.Select.correlate_except` turns off the
:class:`.Select` object's default behavior of
"auto-correlation" for the given FROM elements. An element
specified here will unconditionally appear in the FROM list, while
all other FROM elements remain subject to normal auto-correlation
behaviors.
.. versionchanged:: 0.8.2 The :meth:`.Select.correlate_except`
method was improved to fully prevent FROM clauses specified here
from being omitted from the immediate FROM clause of this
:class:`.Select`.
If ``None`` is passed, the :class:`.Select` object will correlate
all of its FROM entries.
.. versionchanged:: 0.8.2 calling ``correlate_except(None)`` will
correctly auto-correlate all FROM clauses.
:param \*fromclauses: a list of one or more :class:`.FromClause`
constructs, or other compatible constructs (i.e. ORM-mapped
classes) to become part of the correlate-exception collection.
.. seealso::
:meth:`.Select.correlate`
:ref:`correlated_subqueries`
"""
self._auto_correlate = False
if fromclauses and fromclauses[0] is None:
self._correlate_except = ()
else:
self._correlate_except = set(self._correlate_except or ()).union(
_interpret_as_from(f) for f in fromclauses)
def append_correlation(self, fromclause):
"""append the given correlation expression to this select()
construct.
This is an **in-place** mutation method; the
:meth:`~.Select.correlate` method is preferred, as it provides
standard :term:`method chaining`.
"""
self._auto_correlate = False
self._correlate = set(self._correlate).union(
_interpret_as_from(f) for f in fromclause)
def append_column(self, column):
"""append the given column expression to the columns clause of this
select() construct.
E.g.::
my_select.append_column(some_table.c.new_column)
This is an **in-place** mutation method; the
:meth:`~.Select.column` method is preferred, as it provides standard
:term:`method chaining`.
See the documentation for :meth:`.Select.with_only_columns`
for guidelines on adding /replacing the columns of a
:class:`.Select` object.
"""
self._reset_exported()
column = _interpret_as_column_or_from(column)
if isinstance(column, ScalarSelect):
column = column.self_group(against=operators.comma_op)
self._raw_columns = self._raw_columns + [column]
def append_prefix(self, clause):
"""append the given columns clause prefix expression to this select()
construct.
This is an **in-place** mutation method; the
:meth:`~.Select.prefix_with` method is preferred, as it provides
standard :term:`method chaining`.
"""
clause = _literal_as_text(clause)
self._prefixes = self._prefixes + (clause,)
def append_whereclause(self, whereclause):
"""append the given expression to this select() construct's WHERE
criterion.
The expression will be joined to existing WHERE criterion via AND.
This is an **in-place** mutation method; the
:meth:`~.Select.where` method is preferred, as it provides standard
:term:`method chaining`.
"""
self._reset_exported()
self._whereclause = and_(
True_._ifnone(self._whereclause), whereclause)
def append_having(self, having):
"""append the given expression to this select() construct's HAVING
criterion.
The expression will be joined to existing HAVING criterion via AND.
This is an **in-place** mutation method; the
:meth:`~.Select.having` method is preferred, as it provides standard
:term:`method chaining`.
"""
self._reset_exported()
self._having = and_(True_._ifnone(self._having), having)
def append_from(self, fromclause):
"""append the given FromClause expression to this select() construct's
FROM clause.
This is an **in-place** mutation method; the
:meth:`~.Select.select_from` method is preferred, as it provides
standard :term:`method chaining`.
"""
self._reset_exported()
fromclause = _interpret_as_from(fromclause)
self._from_obj = self._from_obj.union([fromclause])
@_memoized_property
def _columns_plus_names(self):
if self.use_labels:
names = set()
def name_for_col(c):
if c._label is None or not c._render_label_in_columns_clause:
return (None, c)
name = c._label
if name in names:
name = c.anon_label
else:
names.add(name)
return name, c
return [
name_for_col(c)
for c in util.unique_list(
_select_iterables(self._raw_columns))
]
else:
return [
(None, c)
for c in util.unique_list(
_select_iterables(self._raw_columns))
]
def _populate_column_collection(self):
for name, c in self._columns_plus_names:
if not hasattr(c, '_make_proxy'):
continue
if name is None:
key = None
elif self.use_labels:
key = c._key_label
if key is not None and key in self.c:
key = c.anon_label
else:
key = None
c._make_proxy(self, key=key,
name=name,
name_is_truncatable=True)
def _refresh_for_new_column(self, column):
for fromclause in self._froms:
col = fromclause._refresh_for_new_column(column)
if col is not None:
if col in self.inner_columns and self._cols_populated:
our_label = col._key_label if self.use_labels else col.key
if our_label not in self.c:
return col._make_proxy(
self,
name=col._label if self.use_labels else None,
key=col._key_label if self.use_labels else None,
name_is_truncatable=True)
return None
return None
def _needs_parens_for_grouping(self):
return (
self._limit_clause is not None or
self._offset_clause is not None or
bool(self._order_by_clause.clauses)
)
def self_group(self, against=None):
"""return a 'grouping' construct as per the ClauseElement
specification.
This produces an element that can be embedded in an expression. Note
that this method is called automatically as needed when constructing
expressions and should not require explicit use.
"""
if isinstance(against, CompoundSelect) and \
not self._needs_parens_for_grouping():
return self
return FromGrouping(self)
def union(self, other, **kwargs):
"""return a SQL UNION of this select() construct against the given
selectable."""
return CompoundSelect._create_union(self, other, **kwargs)
def union_all(self, other, **kwargs):
"""return a SQL UNION ALL of this select() construct against the given
selectable.
"""
return CompoundSelect._create_union_all(self, other, **kwargs)
def except_(self, other, **kwargs):
"""return a SQL EXCEPT of this select() construct against the given
selectable."""
return CompoundSelect._create_except(self, other, **kwargs)
def except_all(self, other, **kwargs):
"""return a SQL EXCEPT ALL of this select() construct against the
given selectable.
"""
return CompoundSelect._create_except_all(self, other, **kwargs)
def intersect(self, other, **kwargs):
"""return a SQL INTERSECT of this select() construct against the given
selectable.
"""
return CompoundSelect._create_intersect(self, other, **kwargs)
def intersect_all(self, other, **kwargs):
"""return a SQL INTERSECT ALL of this select() construct against the
given selectable.
"""
return CompoundSelect._create_intersect_all(self, other, **kwargs)
def bind(self):
if self._bind:
return self._bind
froms = self._froms
if not froms:
for c in self._raw_columns:
e = c.bind
if e:
self._bind = e
return e
else:
e = list(froms)[0].bind
if e:
self._bind = e
return e
return None
def _set_bind(self, bind):
self._bind = bind
bind = property(bind, _set_bind)
class ScalarSelect(Generative, Grouping):
_from_objects = []
_is_from_container = True
def __init__(self, element):
self.element = element
self.type = element._scalar_type()
@property
def columns(self):
raise exc.InvalidRequestError('Scalar Select expression has no '
'columns; use this object directly '
'within a column-level expression.')
c = columns
@_generative
def where(self, crit):
"""Apply a WHERE clause to the SELECT statement referred to
by this :class:`.ScalarSelect`.
"""
self.element = self.element.where(crit)
def self_group(self, **kwargs):
return self
class Exists(UnaryExpression):
"""Represent an ``EXISTS`` clause.
"""
__visit_name__ = UnaryExpression.__visit_name__
_from_objects = []
def __init__(self, *args, **kwargs):
"""Construct a new :class:`.Exists` against an existing
:class:`.Select` object.
Calling styles are of the following forms::
# use on an existing select()
s = select([table.c.col1]).where(table.c.col2==5)
s = exists(s)
# construct a select() at once
exists(['*'], **select_arguments).where(criterion)
# columns argument is optional, generates "EXISTS (SELECT *)"
# by default.
exists().where(table.c.col2==5)
"""
if args and isinstance(args[0], (SelectBase, ScalarSelect)):
s = args[0]
else:
if not args:
args = ([literal_column('*')],)
s = Select(*args, **kwargs).as_scalar().self_group()
UnaryExpression.__init__(self, s, operator=operators.exists,
type_=type_api.BOOLEANTYPE,
wraps_column_expression=True)
def select(self, whereclause=None, **params):
return Select([self], whereclause, **params)
def correlate(self, *fromclause):
e = self._clone()
e.element = self.element.correlate(*fromclause).self_group()
return e
def correlate_except(self, *fromclause):
e = self._clone()
e.element = self.element.correlate_except(*fromclause).self_group()
return e
def select_from(self, clause):
"""return a new :class:`.Exists` construct, applying the given
expression to the :meth:`.Select.select_from` method of the select
statement contained.
"""
e = self._clone()
e.element = self.element.select_from(clause).self_group()
return e
def where(self, clause):
"""return a new exists() construct with the given expression added to
its WHERE clause, joined to the existing clause via AND, if any.
"""
e = self._clone()
e.element = self.element.where(clause).self_group()
return e
class TextAsFrom(SelectBase):
"""Wrap a :class:`.TextClause` construct within a :class:`.SelectBase`
interface.
This allows the :class:`.TextClause` object to gain a ``.c`` collection
and other FROM-like capabilities such as :meth:`.FromClause.alias`,
:meth:`.SelectBase.cte`, etc.
The :class:`.TextAsFrom` construct is produced via the
:meth:`.TextClause.columns` method - see that method for details.
.. versionadded:: 0.9.0
.. seealso::
:func:`.text`
:meth:`.TextClause.columns`
"""
__visit_name__ = "text_as_from"
_textual = True
def __init__(self, text, columns, positional=False):
self.element = text
self.column_args = columns
self.positional = positional
@property
def _bind(self):
return self.element._bind
@_generative
def bindparams(self, *binds, **bind_as_values):
self.element = self.element.bindparams(*binds, **bind_as_values)
def _populate_column_collection(self):
for c in self.column_args:
c._make_proxy(self)
def _copy_internals(self, clone=_clone, **kw):
self._reset_exported()
self.element = clone(self.element, **kw)
def _scalar_type(self):
return self.column_args[0].type
class AnnotatedFromClause(Annotated):
def __init__(self, element, values):
# force FromClause to generate their internal
# collections into __dict__
element.c
Annotated.__init__(self, element, values)
| [
"[email protected]"
] | |
f9bc5ad8586c2bea44572eba30d6fa69696fb018 | eb9c3dac0dca0ecd184df14b1fda62e61cc8c7d7 | /google/cloud/talent/v4/talent-v4-py/google/cloud/talent_v4/services/completion/async_client.py | 4a61a595b6f482e3f9a5c84d598f49b9f47efb5e | [
"Apache-2.0"
] | permissive | Tryweirder/googleapis-gen | 2e5daf46574c3af3d448f1177eaebe809100c346 | 45d8e9377379f9d1d4e166e80415a8c1737f284d | refs/heads/master | 2023-04-05T06:30:04.726589 | 2021-04-13T23:35:20 | 2021-04-13T23:35:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,683 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Sequence, Tuple, Type, Union
import pkg_resources
import google.api_core.client_options as ClientOptions # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.talent_v4.types import common
from google.cloud.talent_v4.types import completion_service
from .transports.base import CompletionTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import CompletionGrpcAsyncIOTransport
from .client import CompletionClient
class CompletionAsyncClient:
"""A service handles auto completion."""
_client: CompletionClient
DEFAULT_ENDPOINT = CompletionClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = CompletionClient.DEFAULT_MTLS_ENDPOINT
company_path = staticmethod(CompletionClient.company_path)
parse_company_path = staticmethod(CompletionClient.parse_company_path)
tenant_path = staticmethod(CompletionClient.tenant_path)
parse_tenant_path = staticmethod(CompletionClient.parse_tenant_path)
common_billing_account_path = staticmethod(CompletionClient.common_billing_account_path)
parse_common_billing_account_path = staticmethod(CompletionClient.parse_common_billing_account_path)
common_folder_path = staticmethod(CompletionClient.common_folder_path)
parse_common_folder_path = staticmethod(CompletionClient.parse_common_folder_path)
common_organization_path = staticmethod(CompletionClient.common_organization_path)
parse_common_organization_path = staticmethod(CompletionClient.parse_common_organization_path)
common_project_path = staticmethod(CompletionClient.common_project_path)
parse_common_project_path = staticmethod(CompletionClient.parse_common_project_path)
common_location_path = staticmethod(CompletionClient.common_location_path)
parse_common_location_path = staticmethod(CompletionClient.parse_common_location_path)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
CompletionAsyncClient: The constructed client.
"""
return CompletionClient.from_service_account_info.__func__(CompletionAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
CompletionAsyncClient: The constructed client.
"""
return CompletionClient.from_service_account_file.__func__(CompletionAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@property
def transport(self) -> CompletionTransport:
"""Return the transport used by the client instance.
Returns:
CompletionTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(type(CompletionClient).get_transport_class, type(CompletionClient))
def __init__(self, *,
credentials: credentials.Credentials = None,
transport: Union[str, CompletionTransport] = 'grpc_asyncio',
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the completion client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.CompletionTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = CompletionClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def complete_query(self,
request: completion_service.CompleteQueryRequest = None,
*,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> completion_service.CompleteQueryResponse:
r"""Completes the specified prefix with keyword
suggestions. Intended for use by a job search auto-
complete search box.
Args:
request (:class:`google.cloud.talent_v4.types.CompleteQueryRequest`):
The request object. Auto-complete parameters.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.talent_v4.types.CompleteQueryResponse:
Response of auto-complete query.
"""
# Create or coerce a protobuf request object.
request = completion_service.CompleteQueryRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.complete_query,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded,
exceptions.ServiceUnavailable,
),
deadline=30.0,
),
default_timeout=30.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
('tenant', request.tenant),
)),
)
# Send the request.
response = await rpc(
request,
retry=retry,
timeout=timeout,
metadata=metadata,
)
# Done; return the response.
return response
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
'google-cloud-talent',
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = (
'CompletionAsyncClient',
)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
f4476ad1cff0d97701afa2544924788af2b900f8 | fb909b0716f62ae118afa7d505cbcbd28f62bc63 | /main/migrations/0066_auto_20200911_1141.py | 1d62edbe74183d9bf7fcc2d790cd66555eadc459 | [] | no_license | dkalola/JustAsk-Final | a5b951462cd3c88eb84320bb8fcf10c32f959090 | c2e7c2ffae4d3c2d870d5ba5348a6bae62db5319 | refs/heads/main | 2023-05-24T16:02:17.425251 | 2021-06-16T19:33:52 | 2021-06-16T19:33:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,049 | py | # Generated by Django 3.1.1 on 2020-09-11 11:41
import datetime
from django.db import migrations, models
import tinymce.models
class Migration(migrations.Migration):
dependencies = [
('main', '0065_auto_20200906_1119'),
]
operations = [
migrations.AlterField(
model_name='ebook',
name='Email',
field=models.EmailField(blank=True, default='', max_length=254, verbose_name='Email'),
),
migrations.AlterField(
model_name='ebook',
name='price',
field=models.IntegerField(blank=True, default=0, verbose_name='Rental Price (Rs 450 min)'),
),
migrations.AlterField(
model_name='ebook',
name='price2',
field=models.IntegerField(blank=True, default=0, verbose_name='Buy Price (Rs 650 min)'),
),
migrations.AlterField(
model_name='paper',
name='Date',
field=models.DateTimeField(blank=True, default=datetime.datetime(2020, 9, 11, 11, 41, 44, 41629), null=True, verbose_name='Date Of Paper'),
),
migrations.AlterField(
model_name='question',
name='qid',
field=models.CharField(default='76GSJ6O8', max_length=8, unique=True, verbose_name='Question ID'),
),
migrations.AlterField(
model_name='question',
name='question',
field=tinymce.models.HTMLField(verbose_name='Question'),
),
migrations.AlterField(
model_name='student',
name='EndDate',
field=models.DateTimeField(blank=True, default=datetime.datetime(2020, 9, 11, 11, 41, 44, 34024), null=True, verbose_name='End Date of Subscription'),
),
migrations.AlterField(
model_name='student',
name='StartDate',
field=models.DateTimeField(blank=True, default=datetime.datetime(2020, 9, 11, 11, 41, 44, 33988), null=True, verbose_name='Start Date of Subscription'),
),
]
| [
"[email protected]"
] | |
1863ed33c75a4114a986b15983e8039d257910f5 | a80943c82d8723e49f1f88cec90a41051c54b949 | /chloe/plots.py | 916fcbf5d1cd5bc6274cbbab0cf77c197c9302fa | [] | no_license | munozchris/jacc | decaebec6ed5da0305c16a252138cc79ba0aafaa | f6ea8316c69a8b30f4d77f0f14ae4a9f6613c584 | refs/heads/master | 2021-01-11T17:11:40.468185 | 2017-03-14T20:40:12 | 2017-03-14T20:40:12 | 79,737,052 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,349 | py | import sqlite3
import matplotlib.pyplot as plt
import numpy as np
# Helper functions to determine if a departmnent is in a table:
def is_dept_in_ex(c, dept):
depts_in_ex = c.execute("SELECT DISTINCT Dept FROM e_xTA;").fetchall()
depts_in_ex = [entry[0] for entry in depts_in_ex]
if dept in depts_in_ex:
return True
else:
return False
def is_dept_in_eo(c, dept):
depts_in_eo = c.execute("SELECT DISTINCT Dept FROM e_oTA;").fetchall()
depts_in_eo = [entry[0] for entry in depts_in_eo]
if dept in depts_in_eo:
return True
else:
return False
def is_dept_in_bio(c, dept):
depts_in_bio = c.execute("SELECT DISTINCT Dept FROM e_bio;").fetchall()
depts_in_bio = [entry[0] for entry in depts_in_bio]
if dept in depts_in_bio:
return True
else:
return False
def is_dept_in_lang(c, dept):
depts_in_lang = c.execute("SELECT DISTINCT Dept FROM e_lang;").fetchall()
depts_in_lang = [entry[0] for entry in depts_in_lang]
if dept in depts_in_lang:
return True
else:
return False
def assign_x_values_to_dates(date):
'''
given a date as a string, give it a numerical
value to make graphing easier
'''
first_digit = float(date[-1:]) - 2
if date[:6] == "Winter":
decimal = 0.0
elif date[:6] == "Spring":
decimal = 0.25
elif date[:6] == "Summer":
decimal = 0.5
elif date[:6] == "Autumn":
decimal = 0.75
x_value = first_digit+decimal
return x_value
# GIVEN A DEPARTMENT, MAKE A BAR CHART OF THE AVERAGE HOURS SPENT PER WEEK
# FOR EACH CLASS IN THAT DEPARTMENT
def get_all_hours(dept):
conn = sqlite3.connect("../jae/eval.db")
c = conn.cursor()
hours = []
course_nums = []
if is_dept_in_ex(c, dept):
query = "SELECT CourseNum, AVG(MedHrs) FROM e_xTA WHERE Dept = ? GROUP BY CourseNum"
data = (dept,)
results1 = c.execute(query, data)
for row in results1:
hours.append(row[1])
course_nums.append(row[0])
if is_dept_in_eo(c, dept):
query = "SELECT CourseNum, AVG(MedHrs) FROM e_oTA WHERE Dept = ? GROUP BY CourseNum"
data = (dept,)
results2 = c.execute(query, data)
for row in results2:
hours.append(row[1])
course_nums.append(row[0])
if is_dept_in_bio(c, dept):
query = "SELECT CourseNum, AVG(MedHrs) FROM e_bio WHERE Dept = ? GROUP BY CourseNum"
data = (dept,)
results3 = c.execute(query, data)
for row in results3:
hours.append(row[1])
course_nums.append(row[0])
if is_dept_in_lang(c, dept):
query = "SELECT CourseNum, AVG(MedHrs) FROM e_xlang WHERE Dept = ? GROUP BY CourseNum"
data = (dept,)
results4 = c.execute(query, data)
for row in results4:
hours.append(row[1])
course_nums.append(row[0])
return course_nums, hours
def make_dept_plot(dept):
course_nums, hours = get_all_hours(dept)
N = len(course_nums)
ind = np.arange(N)
width = 1.0
fig, ax = plt.subplots()
rects = ax.bar(ind, hours, width, color='b')
ax.set_ylabel('Average Hours Spent Per Class')
ax.set_title('Average Hours Spent in Each Class in the '+dept+' Department')
ax.set_xticks(ind+width/2)
ax.set_xticklabels(course_nums, rotation=45)
plt.show()
# PLOT AVERAGE HOURS SPENT FOR EACH DEPARTMENT
def plot_all_depts():
conn = sqlite3.connect("../jae/eval.db")
c = conn.cursor()
dept_hour_dict = {}
tables = ["e_xTA", "e_oTA", "e_lang", "e_bio"]
for table in tables:
query = "SELECT Dept, AVG(MedHrs), SUM(NumResponses) FROM "+table+" GROUP BY Dept"
results = c.execute(query)
for dept, hour, responses in results:
if dept not in dept_hour_dict:
dept_hour_dict[dept] = [hour, responses]
else:
total_responses = dept_hour_dict[dept][1] + responses
dept_hour_dict[dept][0] = (dept_hour_dict[dept][1] * dept_hour_dict[dept][0] +
hour * responses) / total_responses
dept_hour_dict[dept][1] = total_responses
initial_list = dept_hour_dict.items()
departments = [value[0] for value in initial_list]
hours = [value[1][0] for value in initial_list]
N = len(departments)
ind = np.arange(N)
width = 1.0
fig, ax = plt.subplots()
rects = ax.bar(ind, hours, width, color='b')
ax.set_ylabel('Average Hours Spent Per Department')
ax.set_title('Average Hours Spent in Each Department')
ax.set_xticks(ind+width/2)
ax.set_xticklabels(departments, rotation=45)
plt.show()
def plot_hours_over_time(dept, coursenum):
conn = sqlite3.connect("../jae/eval.db")
c = conn.cursor()
min_hours = []
max_hours = []
med_hours = []
dates = []
x_values = []
if is_dept_in_ex:
query = "SELECT CourseSection, MinHrs, MedHrs, MaxHrs FROM e_xTA WHERE Dept = ? AND CourseNum = ?;"
if is_dept_in_eo:
query = "SELECT CourseSection, MinHrs, MedHrs, MaxHrs FROM e_oTA WHERE Dept = ? AND CourseNum = ?;"
if is_dept_in_bio:
query = "SELECT CourseSection, MinHrs, MedHrs, MaxHrs FROM e_bio WHERE Dept = ? AND CourseNum = ?;"
if is_dept_in_lang:
query = "SELECT CourseSection, MinHrs, MedHrs, MaxHrs FROM e_lang WHERE Dept = ? AND CourseNum = ?;"
data = (dept, coursenum)
results = c.execute(query, data).fetchall()
for info, min_hrs, med_hrs, max_hrs in results:
year = str(info[-4:])
quarter = info[-11:][:-5]
date = quarter+", "+year
min_hours.append(min_hrs)
max_hours.append(max_hrs)
med_hours.append(med_hrs)
dates.append(date)
x_values.append(assign_x_values_to_dates(date))
fig, ax = plt.subplots()
ax.scatter(x_values, med_hours, color='r')
ax.scatter(x_values, min_hours, color='b')
ax.scatter(x_values, max_hours, color='g')
ax.get_xaxis().set_ticks([])
ax.set_xlabel("2011 through 2016")
ax.set_ylabel("Max, Min, and Average Hours Spent Per Week")
ax.set_title("Hours Spent Per Week over Time")
plt.show()
| [
"[email protected]"
] | |
70524268a8513ee28ba8c529abac584e1da23674 | c5c3ee2ac4393e7bdbf61f32677221bef5523973 | /src/jk_asyncio_logging/AsyncioMulticastLogger.py | 268925e318a8be94b847fbb09df74c97c6f773e7 | [
"Apache-2.0"
] | permissive | jkpubsrc/python-module-jk-asyncio-logging | 249960382c61353bc0809b77e30312779d3482f2 | 97b6ca79f8f11353c4b554875e353535d1bbf6fc | refs/heads/master | 2022-10-02T03:10:18.771557 | 2020-01-20T18:38:41 | 2020-01-20T18:38:41 | 235,171,209 | 0 | 1 | null | 2022-09-03T13:55:36 | 2020-01-20T18:37:53 | Python | UTF-8 | Python | false | false | 863 | py |
import jk_logging
from .AsyncioLogWrapper import AsyncioLogWrapper
class AsyncioMulticastLogger(AsyncioLogWrapper):
@staticmethod
def create(*argv):
loggers = []
for l in argv:
if isinstance(l, AsyncioLogWrapper):
loggers.append(l._l)
else:
assert isinstance(l, jk_logging.AbstractLogger)
loggers.append(l)
return AsyncioMulticastLogger(jk_logging.MulticastLogger.create(*loggers))
#
def addLogger(self, logger):
if isinstance(logger, AsyncioLogWrapper):
logger = logger._l
assert isinstance(logger, jk_logging.AbstractLogger)
self._l.addLogger(logger)
#
def removeLogger(self, logger):
if isinstance(logger, AsyncioLogWrapper):
logger = logger._l
assert isinstance(logger, jk_logging.AbstractLogger)
self._l.removeLogger(logger)
#
def removeAllLoggers(self):
self._l.removeAllLoggers()
#
#
| [
"[email protected]"
] | |
9822128d048432a0a887770ad6757bcefc3d287d | 7b6313d1c4e0e8a5bf34fc8ac163ad446bc69354 | /python/[hackerrank]Minimum Height Triangle.py | 7f3143c59379db448635a1872d49f9dacbc86239 | [] | no_license | menuka-maharjan/competitive_programming | c6032ae3ddcbc974e0e62744989a2aefa30864b2 | 22d0cea0f96d8bd6dc4d81b146ba20ea627022dd | refs/heads/master | 2023-05-01T05:23:09.641733 | 2021-05-23T16:22:21 | 2021-05-23T16:22:21 | 332,250,476 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 272 | py | #!/bin/python3
import sys
import math
def lowestTriangle(base, area):
x=math.ceil((2*area)/base)
return x
# Complete this function
base, area = input().strip().split(' ')
base, area = [int(base), int(area)]
height = lowestTriangle(base, area)
print(height)
| [
"[email protected]"
] | |
097070139417fc16e5f2a0c6fdef23fc212ec909 | 4f906856e07f82f5d14ddabfd3c00c491d4ce8c8 | /diagonal.py | db6d178b8c7d3c44671efdabeb8c983b76e18130 | [] | no_license | SoliareofAstora/NumericMethods | d4ce4d6559b9f0fbad72082ca2f2197ea3c1d349 | 6696a07d0ae45a6f18a44c1257f5631ab78bd859 | refs/heads/master | 2021-09-05T11:10:18.370598 | 2018-01-26T20:05:56 | 2018-01-26T20:05:56 | 112,853,919 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 511 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Dec 2 21:02:09 2017
@author: SoliareOfAstora
"""
import numpy as np
class Diagonal:
x = np.array([])
row = 0
def setNumbers(self, rownumber, vector):
self.x = vector
self.row = rownumber
def getNumber(self, rownumber):
temp = rownumber + self.row
if temp >= 0:
if temp < np.size(self.x):
return self.x[temp]
else:
return 0
else:
return 0
| [
"[email protected]"
] | |
25b74e239ea33c4f95a5a9e3af28b4ae11b3f352 | 3c00534e9554eeae59d4d0a17e1f06ef8fd3a1f6 | /bgt_payroll/wizard/slip_report.py | 27b9bf10bc9734f3f9b194ad5e74d20b320ecc10 | [] | no_license | bambangbc/HRIS-YT---ODOO-10 | 26524eed207d535aa1ae071a4bc0d1c994778bb0 | f5e3e983f244bc57ffda175d3178fc1c4c74dab0 | refs/heads/main | 2023-06-28T00:06:47.834302 | 2021-07-16T06:22:27 | 2021-07-16T06:22:27 | 386,532,297 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 93,108 | py | # -*- coding: utf-8 -*-
# @Author: xrix
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api
from openerp.exceptions import except_orm
from odoo.addons import decimal_precision as dp
from odoo.exceptions import UserError
import math
import time
import calendar
from datetime import datetime, timedelta, date
from dateutil.relativedelta import relativedelta
from odoo import models, fields, api, exceptions, _
from odoo.tools import DEFAULT_SERVER_DATETIME_FORMAT
from odoo.tools.float_utils import float_is_zero, float_compare
from odoo.exceptions import ValidationError, RedirectWarning, UserError
from collections import OrderedDict
import xlsxwriter
import base64
from cStringIO import StringIO
import pytz
from pytz import timezone
import PIL
import io
dic = {
'to_19' : ('Zero', 'One', 'Two', 'Three', 'Four', 'Five', 'Six', 'Seven', 'Eight', 'Nine', 'Ten', 'Eleven', 'Twelve', 'Thirteen', 'Fourteen', 'Fifteen', 'Sixteen', 'Seventeen', 'Eighteen', 'Nineteen'),
'tens' : ('Twenty', 'Thirty', 'Forty', 'Fifty', 'Sixty', 'Seventy', 'Eighty', 'Ninety'),
'denom' : ('', 'Thousand', 'Million', 'Billion', 'Trillion', 'Quadrillion', 'Quintillion'),
'to_19_id' : ('NOL', 'SATU', 'DUA', 'TIGA', 'EMPAT', 'LIMA', 'ENAM', 'TUJUH', 'DELAPAN', 'SEMBILAN', 'SEPULUH', 'SEBELAS', 'DUA BELAS', 'TIGA BELAS', 'EMPAT BELAS', 'LIMA BELAS', 'ENAM BELAS', 'TUJUH BELAS', 'DELAPAN BELAS', 'SEMBILAN BELAS'),
'tens_id' : ('DUA PULUH', 'TIGA PULUH', 'EMPAT PULUH', 'LIMA PULUH', 'ENAM PULUH', 'TUJUH PULUH', 'DELAPAN PULUH', 'SEMBILAN PULUH'),
'denom_id' : ('', 'RIBU', 'JUTA', 'MILIAR', 'TRILIUN', 'BILIUN')
}
def terbilang(number, currency, bhs):
number = '%.2f' % number
units_name = ' ' + cur_name(currency) + ' '
lis = str(number).split('.')
start_word = english_number(int(lis[0]), bhs)
end_word = english_number(int(lis[1]), bhs)
cents_number = int(lis[1])
cents_name = (cents_number > 1) and 'Sen' or 'sen'
final_result_sen = start_word + units_name + end_word +' '+cents_name
final_result = start_word + units_name
if end_word == 'NOL' or end_word == 'ZERO':
final_result = final_result
else:
final_result = final_result_sen
return final_result[:1].upper()+final_result[1:]
def _convert_nn(val, bhs):
tens = dic['tens_id']
to_19 = dic['to_19_id']
if bhs == 'en':
tens = dic['tens']
to_19 = dic['to_19']
if val < 20:
return to_19[val]
for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens)):
if dval + 10 > val:
if val % 10:
return dcap + ' ' + to_19[val % 10]
return dcap
def _convert_nnn(val, bhs):
word = ''; rat = ' RATUS'; to_19 = dic['to_19_id']
if bhs == 'en':
rat = ' Hundred'
to_19 = dic['to_19']
(mod, rem) = (val % 100, val // 100)
if rem == 1:
word = 'Seratus'
if mod > 0:
word = word + ' '
elif rem > 1:
word = to_19[rem] + rat
if mod > 0:
word = word + ' '
if mod > 0:
word = word + _convert_nn(mod, bhs)
return word
def english_number(val, bhs):
denom = dic['denom_id']
if bhs == 'en':
denom = dic['denom']
if val < 100:
return _convert_nn(val, bhs)
if val < 1000:
return _convert_nnn(val, bhs)
for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom))):
if dval > val:
mod = 1000 ** didx
l = val // mod
r = val - (l * mod)
ret = _convert_nnn(l, bhs) + ' ' + denom[didx]
if r > 0:
ret = ret + ' ' + english_number(r, bhs)
if bhs == 'id':
if val < 2000:
ret = ret.replace("SATU RIBU", "SERIBU")
return ret
def cur_name(cur="idr"):
cur = cur.lower()
if cur=="usd":
return "Dollars"
elif cur=="aud":
return "Dollars"
elif cur=="idr":
return "RUPIAH"
elif cur=="jpy":
return "Yen"
elif cur=="sgd":
return "Dollars"
elif cur=="usd":
return "Dollars"
elif cur=="eur":
return "Euro"
else:
return cur
class purchaseConfirmWizard(models.TransientModel):
_name = 'slip.payroll'
warning = fields.Char(readonly=True)
file_data = fields.Binary('File', readonly=True)
@api.model
def get_default_date_model(self):
return pytz.UTC.localize(datetime.now()).astimezone(timezone('Asia/Jakarta'))
@api.multi
def download_slip(self):
fp = StringIO()
workbook = xlsxwriter.Workbook(fp)
worksheet = workbook.add_worksheet('Report Payroll')
worksheet.set_column('A:A', 12)
worksheet.set_column('B:D', 3)
worksheet.set_column('E:E', 12)
worksheet.set_column('F:F', 3)
worksheet.set_column('G:G', 17)
worksheet.set_column('H:H', 3)
worksheet.set_column('I:I', 12)
worksheet.set_column('J:K', 3)
worksheet.set_column('M:M', 12)
worksheet.set_column('N:N', 3)
worksheet.set_column('O:O', 17)
worksheet.set_column('P:P', 3)
worksheet.set_column('Q:Q', 12)
worksheet.set_column('R:T', 3)
worksheet.set_column('U:U', 12)
worksheet.set_column('V:V', 3)
worksheet.set_column('W:W', 17)
worksheet.set_column('X:X', 3)
worksheet.set_column('Y:Y', 12)
worksheet.set_column('Z:AB', 3)
worksheet.set_column('AC:AC', 12)
worksheet.set_column('AD:AD', 3)
worksheet.set_column('AE:AE', 17)
worksheet.set_column('AF:AF', 3)
#konten di sini
active_ids = self._context.get('active_ids')
desain_ids = self.env['hr.payslip'].browse(active_ids)
brand_ids = desain_ids.mapped('name')
merge_format = workbook.add_format({
'bold': 1,
'border': 1,
'align': 'center',
'valign': 'vcenter',
'fg_color': 'white'
})
merge_format.set_font_color('red')
title_format = workbook.add_format({
'bold': 1,
'border': 0,
'valign': 'vcenter',
})
left_format = workbook.add_format({
'bold': 0,
'border' : 1,
'valign' : 'vleft',
})
left2_format = workbook.add_format({
'bold': 0,
'border' : 0,
'valign' : 'vleft',
})
center_format = workbook.add_format({
'bold': 0,
'border' : 0,
'align' : 'center',
'valign' : 'vcenter',
})
center1_format = workbook.add_format({
'bold': 1,
'border' : 0,
'align' : 'center',
'valign' : 'vcenter',
})
title_format.set_text_wrap()
right_format = workbook.add_format({
'bold' : 0,
'border': 0,
'align' : 'right',
'valign': 'vright',
})
right1_format = workbook.add_format({
'bold' : 1,
'border': 0,
'align' : 'right',
'valign': 'vright',
})
left_format_top = workbook.add_format({
'bold': 0,
'top' : 1,
'valign' : 'vleft',
})
right_format_top = workbook.add_format({
'bold': 0,
'top' : 1,
'valign' : 'vright',
})
center_format_top = workbook.add_format({
'bold': 0,
'top' : 1,
'valign' : 'vcenter',
})
row = 1
#for brand_id in brand_ids :
merge_from = row
merge_to = row+1
row += 2
worksheet.merge_range('A%s:AB%s'%(merge_from,merge_to), 'Payroll', title_format)
#desain_brand_ids = desain_ids.filtered(lambda desain: desain.brand_id.id == brand_id.id)
desain_brand_ids = desain_ids
while desain_brand_ids :
five_desain_ids = desain_brand_ids[0:4]
col = {1:['A','B','C','D','E','F','G','H'], 2:['I','J','K','L','M','N','O','P'], 3:['Q','R','S','T','U','V','W','X'], 4:['Y','Z','AA','AB','AC','AD','AE','AF']}
col_range = 1
row += 1
x = 0
jum = len(five_desain_ids)
for desain in five_desain_ids :
one_letter = col[col_range][0]
two_letter = col[col_range][1]
tree_letter = col[col_range][2]
four_letter = col[col_range][3]
five_letter = col[col_range][4]
six_letter = col[col_range][5]
seven_letter = col[col_range][6]
eeg_letter = col[col_range][7]
presences1 = 0
presences2 = 0
overtime1 = 0
overtime2 = 0
day_off1 = 0
day_off2 = 0
day_off_absen1 = 0
day_off_absen2 = 0
kuota1 = 0
kuota2 = 0
alpha1 = 0
alpha2 = 0
BM1 = 0
BM2 = 0
#import pdb;pdb.set_trace()
if desain.contract_id.type_id.name == "HARIAN" :
for line in desain.worked_days_line_ids :
if line.code == 'Presences1' :
presences1 = line.number_of_days
elif line.code == 'Presences2' :
presences2 = line.number_of_days
elif line.code == 'Overtime1' :
overtime1 = line.number_of_hours
elif line.code == 'Overtime2' :
overtime2 = line.number_of_hours
elif line.code == 'day_off1' :
day_off1 = line.number_of_days
elif line.code == 'day_off2' :
day_off2 = line.number_of_days
elif line.code == 'day_off_absen1' :
day_off_absen2 = line.number_of_days
elif line.code == 'kuota1' :
kuota1 = line.number_of_days
elif line.code == 'kuota2' :
kuota2 = line.number_of_days
elif line.code == 'Alpha1' :
alpha1 = line.number_of_days
elif line.code == 'Alpha2' :
alpha2 = line.number_of_days
elif line.code == 'BM1' :
BM1 = line.number_of_days
elif line.code == 'BM2' :
BM2 = line.number_of_days
pres1 = presences1 + 1 + day_off1
pres2 = presences2 + 1 + day_off2
premi1 = presences1 + day_off1
premi2 = presences2 + day_off2
umak = presences1 + presences2 + day_off1 + day_off2
TGPH1 = 0
TGPH2 = 0
TPHH1 = 0
TPHH2 = 0
BM1 = 0
BM2 = 0
BBLH = 0
OVM1 = 0
OVM2 = 0
meals1 = 0
SP = 0
SW = 0
KAS = 0
KRSN = 0
NET = 0
for nominal in desain.line_ids :
if nominal.code == 'TGPH1' :
TGPH1 = nominal.amount
elif nominal.code == 'TGPH2' :
TGPH2 = nominal.amount
elif nominal.code == 'TPHH1' :
TPHH1 = nominal.amount
elif nominal.code == 'TPHH2' :
TPHH2 = nominal.amount
elif nominal.code == 'BM1' :
BM1 = nominal.amount
elif nominal.code == 'BM2' :
BM2 = nominal.amount
elif nominal.code == 'BBLH' :
BBLH = nominal.amount
elif nominal.code == 'OVM1' :
OVM1 = nominal.amount
elif nominal.code == 'OVM2' :
OVM2 = nominal.amount
elif nominal.code == 'MEALS1' :
meals1 = nominal.amount
elif nominal.code == 'SP' :
SP = nominal.amount
elif nominal.code == 'SW' :
SW = nominal.amount
elif nominal.code == 'KAS' :
KAS = nominal.amount
elif nominal.code == 'KRSN' :
KRSN = nominal.amount
elif nominal.code == 'NET' :
NET = nominal.amount
if desain.contract_id.wage > desain.contract_id.umk :
ovrt = int(desain.contract_id.umk/56/4*1.5)
else :
ovrt = int(desain.contract_id.wage/56/4*1.5)
#import pdb;pdb.set_trace()
bulan = ["","Januari","Februari","Maret","April","Mei","Juni","Juli","Agustus","September","Oktober","November","Desember"]
tanggal = desain.date_to[8:10] + "-" + bulan[int(desain.date_to[5:7])] + "-" + desain.date_to[:4]
worksheet.write('%s%s'%(one_letter,row), 'NAMA', workbook.add_format({'bold': 0,'top' : 1,'left': 1,'valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(two_letter,row,five_letter,row),desain.employee_id.name,left_format_top)
worksheet.write('%s%s'%(six_letter,row), "", center_format_top)
worksheet.write('%s%s'%(seven_letter,row),tanggal,workbook.add_format({'bold': 0,'align':'right','top': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row),'',workbook.add_format({'bold': 0,'top': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+1), 'NIK', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(two_letter,row+1,five_letter,row+1),desain.employee_id.nik,left2_format)
worksheet.write('%s%s'%(seven_letter,row+1),desain.employee_id.bank_account_id.bank_id.name,workbook.add_format({'bold': 0,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+1),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+2), 'DIV', workbook.add_format({'bold': 0,'bottom': 1,'left': 1,'valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(two_letter,row+2,five_letter,row+2),desain.employee_id.department_id.name,workbook.add_format({'bold': 0,'bottom' : 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(four_letter,row+2), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+2), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(six_letter,row+2), "", workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+2), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+2),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+3), 'G POKOK', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+4), '', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(two_letter,row+3), '1', center_format)
worksheet.write('%s%s'%(two_letter,row+4), '2', center_format)
worksheet.write('%s%s'%(tree_letter,row+3), presences1+1, center_format)
worksheet.write('%s%s'%(tree_letter,row+4), presences2+1, center_format)
worksheet.write('%s%s'%(four_letter,row+3), 'x', center_format)
worksheet.write('%s%s'%(four_letter,row+4), 'x', center_format)
worksheet.write('%s%s'%(five_letter,row+3), round((desain.contract_id.wage*0.65)/28), workbook.add_format({'num_format': '#,##0', 'valign':'right'}))
worksheet.write('%s%s'%(five_letter,row+4), round((desain.contract_id.wage*0.65)/28), workbook.add_format({'num_format': '#,##0', 'valign':'right'}))
worksheet.write('%s%s'%(six_letter,row+3), "'=", center_format)
worksheet.write('%s%s'%(six_letter,row+4), "'=", center_format)
worksheet.write('%s%s'%(seven_letter,row+3), round(TGPH1), workbook.add_format({'bold': 0,'valign' : 'vright','num_format': '#,##0'}))
worksheet.write('%s%s'%(seven_letter,row+4), round(TGPH2), workbook.add_format({'bold': 0,'valign' : 'vright','num_format': '#,##0'}))
worksheet.write('%s%s'%(eeg_letter,row+3),'',workbook.add_format({'bold': 0,'right': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+4),'',workbook.add_format({'bold': 0,'right': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+5), 'PREMI', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+6), '', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(two_letter,row+5), '1', center_format)
worksheet.write('%s%s'%(two_letter,row+6), '2', center_format)
worksheet.write('%s%s'%(tree_letter,row+5), premi1, center_format)
worksheet.write('%s%s'%(tree_letter,row+6), premi2, center_format)
worksheet.write('%s%s'%(four_letter,row+5), 'x', center_format)
worksheet.write('%s%s'%(four_letter,row+6), 'x', center_format)
worksheet.write('%s%s'%(five_letter,row+5), round(desain.contract_id.wage*0.2/24), workbook.add_format({'num_format': '#,##0', 'valign':'right'}))
worksheet.write('%s%s'%(five_letter,row+6), round(desain.contract_id.wage*0.2/24), workbook.add_format({'num_format': '#,##0', 'valign':'right'}))
worksheet.write('%s%s'%(six_letter,row+5), "'=", center_format)
worksheet.write('%s%s'%(six_letter,row+6), "'=", center_format)
worksheet.write('%s%s'%(seven_letter,row+5), round(TPHH1), workbook.add_format({'bold': 0,'valign' : 'vright','num_format': '#,##0'}))
worksheet.write('%s%s'%(seven_letter,row+6), round(TPHH2), workbook.add_format({'bold': 0,'valign' : 'vright','num_format': '#,##0'}))
worksheet.write('%s%s'%(eeg_letter,row+5),'',workbook.add_format({'bold': 0,'right': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+6),'',workbook.add_format({'bold': 0,'right': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+7), 'BNS MG', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+8), '', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(two_letter,row+7), '1', center_format)
worksheet.write('%s%s'%(two_letter,row+8), '2', center_format)
worksheet.write('%s%s'%(seven_letter,row+7), round(BM1), workbook.add_format({'num_format': '#,##0','bold': 0,'valign' : 'vright'}))
worksheet.write('%s%s'%(seven_letter,row+8), round(BM2), workbook.add_format({'num_format': '#,##0','bold': 0,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+7),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+8),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+9),'BNS BLN',workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+9),round(BBLH), workbook.add_format({'num_format': '#,##0','bold': 0,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+9),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+10), 'U LMBR', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+11), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(two_letter,row+10), '1', center_format)
worksheet.write('%s%s'%(two_letter,row+11), '2', workbook.add_format({'bold': 0,'bottom': 1,'align' : 'center','valign' : 'vcenter'}))
worksheet.write('%s%s'%(tree_letter,row+10), overtime1, center_format)
worksheet.write('%s%s'%(tree_letter,row+11), overtime2, workbook.add_format({'bold': 0,'bottom': 1,'align' : 'center','valign' : 'vcenter'}))
worksheet.write('%s%s'%(four_letter,row+10), 'x', center_format)
worksheet.write('%s%s'%(four_letter,row+11), 'x', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'center'}))
worksheet.write('%s%s'%(five_letter,row+10), ovrt, workbook.add_format({'num_format': '#,##0', 'valign':'right'}))
worksheet.write('%s%s'%(five_letter,row+11), ovrt, workbook.add_format({'num_format': '#,##0','bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(six_letter,row+10), "'=", center_format)
worksheet.write('%s%s'%(six_letter,row+11), "'=", workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(seven_letter,row+10), round(OVM1), workbook.add_format({'num_format': '#,##0','bold': 0,'valign' : 'vright'}))
worksheet.write('%s%s'%(seven_letter,row+11), round(OVM2), workbook.add_format({'num_format': '#,##0','bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+10),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+11),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+12), round(TGPH1 + TGPH2 + TPHH1 + TPHH2 + BM1 + BM2 + BBLH + OVM1 + OVM2) ,workbook.add_format({'num_format': '#,##0', 'valign':'right'}))
worksheet.write('%s%s'%(one_letter,row+12),'',workbook.add_format({'bold': 0,'left': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+12),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+13),'U MKN',workbook.add_format({'bold': 0,'bottom': 1,'left': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(two_letter,row+13), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(tree_letter,row+13),umak,workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(four_letter,row+13),'x',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(five_letter,row+13),desain.contract_id.meals,workbook.add_format({'bold': 0,'bottom': 1,'align' : 'center','valign' : 'vright'}))
worksheet.write('%s%s'%(six_letter,row+13),"'=",workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(seven_letter,row+13),round(meals1),workbook.add_format({'num_format': '#,##0','bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+13),'+',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vright'}))
col_range += 1
worksheet.write('%s%s'%(seven_letter,row+14),round(TGPH1 + TGPH2 + TPHH1 + TPHH2 + BM1 + BM2 + BBLH + OVM1 + OVM2 + meals1),right_format)
worksheet.write('%s%s'%(one_letter,row+14),'',workbook.add_format({'bold': 0,'left': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+14),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+15),'S POKOK',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+15),SP,workbook.add_format({'num_format': '#,##0', 'valign':'right'}))
worksheet.write('%s%s'%(eeg_letter,row+15),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+16),'S WAJIB',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+16),SW,workbook.add_format({'num_format': '#,##0', 'valign':'right'}))
worksheet.write('%s%s'%(eeg_letter,row+16),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+17),'KB KAS',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+17),KAS,workbook.add_format({'num_format': '#,##0', 'valign':'right'}))
worksheet.write('%s%s'%(eeg_letter,row+17),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+18),'KOREKSI',workbook.add_format({'bold': 0,'left': 1,'align': 'left','bottom': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(two_letter,row+18), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(tree_letter,row+18),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(four_letter,row+18),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(five_letter,row+18),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(six_letter,row+18),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(seven_letter,row+18),KRSN,workbook.add_format({'num_format': '#,##0', 'bottom':1, 'valign':'right'}))
worksheet.write('%s%s'%(eeg_letter,row+18),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+19),'',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+19),'TOTAL',center1_format)
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+19,seven_letter,row+19),round(TGPH1 + TGPH2 + TPHH1 + TPHH2 + BM1 + BM2 + BBLH + OVM1 + OVM2 + meals1 + SP + SW + KAS + KRSN),workbook.add_format({'num_format': '#,##0', 'valign':'right'}))
worksheet.write('%s%s'%(eeg_letter,row+19),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+20),'',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+20),'REAL',workbook.add_format({'bold': 0,'bottom': 1,'font_size':15,'bold':1,'align': 'center','valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+20,seven_letter,row+20),NET,workbook.add_format({'num_format': '#,##0','bold': 0,'bottom': 1,'font_size':15,'bold':1,'align': 'right','valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+20),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+21),'',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+21),desain.employee_id.bank_account_id.bank_id.name,workbook.add_format({'bold': 0,'font_size':15,'bold':1,'align': 'center','valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+21,seven_letter,row+21),NET,workbook.add_format({'num_format': '#,##0','font_size':15,'bold':1,'align': 'right','valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+21),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+22),'',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+22),'CASH',workbook.add_format({'bold': 0,'font_size':15,'bold':1,'align': 'center','valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+22,seven_letter,row+22),'-',workbook.add_format({'num_format': '#,##0','bold': 0,'font_size':15,'bold':1,'align': 'right','valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+22),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+23),'',workbook.add_format({'bold': 0,'left': 1,'bottom': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+23),'TOTAL',workbook.add_format({'bold': 1,'align': 'center','valign' : 'vright'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+23,seven_letter,row+23),NET,workbook.add_format({'num_format': '#,##0','bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+23),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(two_letter,row+23), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(tree_letter,row+23),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(four_letter,row+23),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(six_letter,row+23),NET,workbook.add_format({'num_format': '#,##0','bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(seven_letter,row+23),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vright'}))
NETS = terbilang(NET, 'idr', 'idr')
worksheet.merge_range('%s%s:%s%s'%(one_letter,row+24,eeg_letter,row+25),NETS,workbook.add_format({'bold': 1,'border': 1,'align':'center','valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+26),'Payroll',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vright'}))
worksheet.write('%s%s'%(two_letter,row+26), '', workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.merge_range('%s%s:%s%s'%(tree_letter,row+26,four_letter,row+26),'Finance',workbook.add_format({'bold': 0,'align': 'left','valign' : 'vright'}))
worksheet.write('%s%s'%(five_letter,row+26),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vcenter'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+26,seven_letter,row+26),'Checker',workbook.add_format({'bold': 0,'align':'left','valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+26),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+27),'',workbook.add_format({'bold': 0,'bottom': 1,'left': 1,'align': 'left','valign' : 'vright'}))
worksheet.write('%s%s'%(two_letter,row+27), '', workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vright'}))
worksheet.merge_range('%s%s:%s%s'%(tree_letter,row+27,four_letter,row+27),'',workbook.add_format({'bold': 0,'align': 'left','bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(five_letter,row+27),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vcenter'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+27,seven_letter,row+27),'',workbook.add_format({'bold': 0,'bottom': 1,'align':'left','valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+27),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vright'}))
x +=1
if x == jum :
row += 28
desain_brand_ids -= five_desain_ids
elif desain.contract_id.type_id.name == "TRAINING" :
presences1 = 0
presences2 = 0
overtime1 = 0
overtime2 = 0
umak = 0
ov1 = 0
ov2 = 0
SP = 0
SW = 0
KAS = 0
KRSN = 0
NET = 0
for line in desain.worked_days_line_ids :
if line.code == 'Presences1' :
presences1 = line.number_of_days
elif line.code == 'Presences2' :
presences2 = line.number_of_days
elif line.code == 'Overtime1' :
overtime1 = line.number_of_hours
if desain.contract_id.wage > desain.contract_id.umk :
ov1 = int(overtime1 * (desain.contract_id.umk/56/4*1.5))
else :
ov1 = int(overtime1 * (desain.contract_id.wage/56/4*1.5))
elif line.code == 'Overtime2' :
overtime2 = line.number_of_hours
if desain.contract_id.wage > desain.contract_id.umk :
ov2 = int(overtime2 * (desain.contract_id.umk/56/4*1.5))
else :
ov2 = int(overtime2 * (desain.contract_id.wage/56/4*1.5))
for nominal in desain.line_ids :
if nominal.code == 'SP' :
SP = nominal.amount
elif nominal.code == 'SW' :
SW = nominal.amount
elif nominal.code == 'KAS' :
KAS = nominal.amount
elif nominal.code == 'KRSN' :
KRSN = nominal.amount
elif nominal.code == 'NET' :
NET = nominal.amount
bulan = ["","Januari","Februari","Maret","April","Mei","Juni","Juli","Agustus","September","Oktober","November","Desember"]
tanggal = desain.date_to[8:10] + "-" + bulan[int(desain.date_to[5:7])] + "-" + desain.date_to[:4]
worksheet.write('%s%s'%(one_letter,row), 'NAMA', workbook.add_format({'bold': 0,'top' : 1,'left': 1,'valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(two_letter,row,five_letter,row),desain.employee_id.name,left_format_top)
worksheet.write('%s%s'%(six_letter,row), "", center_format_top)
worksheet.write('%s%s'%(seven_letter,row),tanggal,workbook.add_format({'bold': 0,'align':'right','top': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row),'',workbook.add_format({'bold': 0,'top': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+1), 'NIK', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(two_letter,row+1,five_letter,row+1),desain.employee_id.nik,left2_format)
worksheet.write('%s%s'%(seven_letter,row+1),desain.employee_id.bank_account_id.bank_id.name,workbook.add_format({'bold': 0,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+1),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+2), 'DIV', workbook.add_format({'bold': 0,'bottom': 1,'left': 1,'valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(two_letter,row+2,five_letter,row+2),desain.employee_id.department_id.name,workbook.add_format({'bold': 0,'bottom' : 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(four_letter,row+2), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+2), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(six_letter,row+2), "", workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+2), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+2),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+3), 'U SAKU', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+4), '', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(two_letter,row+3), '1', center_format)
worksheet.write('%s%s'%(two_letter,row+4), '2', center_format)
worksheet.write('%s%s'%(tree_letter,row+3), presences1+1, center_format)
worksheet.write('%s%s'%(tree_letter,row+4), presences2+1, center_format)
worksheet.write('%s%s'%(four_letter,row+3), 'x', center_format)
worksheet.write('%s%s'%(four_letter,row+4), 'x', center_format)
worksheet.write('%s%s'%(five_letter,row+3), round(desain.contract_id.wage/24), workbook.add_format({'num_format': '#,##0','right':1}))
worksheet.write('%s%s'%(five_letter,row+4), round(desain.contract_id.wage/24), workbook.add_format({'num_format': '#,##0','right':1}))
worksheet.write('%s%s'%(six_letter,row+3), "'=", center_format)
worksheet.write('%s%s'%(six_letter,row+4), "'=", center_format)
worksheet.write('%s%s'%(seven_letter,row+3), round((desain.contract_id.wage/24)*presences1), workbook.add_format({'num_format': '#,##0','bold': 0,'valign' : 'vright'}))
worksheet.write('%s%s'%(seven_letter,row+4), round((desain.contract_id.wage/24)*presences2), workbook.add_format({'num_format': '#,##0','bold': 0,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+3),'',workbook.add_format({'bold': 0,'right': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+4),'',workbook.add_format({'bold': 0,'right': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+5), 'U LEMBUR', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+6), '', workbook.add_format({'bold': 0,'left': 1,'bottom':1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(two_letter,row+5), '1', center_format)
worksheet.write('%s%s'%(two_letter,row+6), '2', workbook.add_format({'bold': 0,'bottom':1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(tree_letter,row+5), overtime1, workbook.add_format({'num_format': '#,##0','right':1}))
worksheet.write('%s%s'%(tree_letter,row+6), overtime2, workbook.add_format({'num_format': '#,##0','bold': 0,'align' : 'center','bottom':1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(four_letter,row+5), 'x', center_format)
worksheet.write('%s%s'%(four_letter,row+6), 'x', workbook.add_format({'bold': 0,'align' : 'center','bottom':1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+5), round(desain.contract_id.wage*0.65/24), workbook.add_format({'num_format': '#,##0','right':1}))
worksheet.write('%s%s'%(five_letter,row+6), round(desain.contract_id.wage*0.65/24), workbook.add_format({'num_format': '#,##0','bold': 0,'bottom':1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(six_letter,row+5), "'=", center_format)
worksheet.write('%s%s'%(six_letter,row+6), "'=", workbook.add_format({'bold': 0,'align' : 'center','bottom':1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+5), ov1, workbook.add_format({'num_format': '#,##0','bold': 0,'valign' : 'vright'}))
worksheet.write('%s%s'%(seven_letter,row+6), ov2, workbook.add_format({'num_format': '#,##0','bold': 0,'bottom':1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+5),'',workbook.add_format({'bold': 0,'right': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+6),'',workbook.add_format({'bold': 0,'right': 1,'bottom':1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+7), round(((desain.contract_id.wage/24)*presences1) + ((desain.contract_id.wage/24)*presences2) + ov1 + ov2), workbook.add_format({'num_format': '#,##0','valign':'vright'}))
worksheet.write('%s%s'%(one_letter,row+7),'',workbook.add_format({'bold': 0,'left': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+7),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+8),'U MKN',workbook.add_format({'bold': 0,'bottom': 1,'left': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(two_letter,row+8), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(tree_letter,row+8),"",workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(four_letter,row+8),'x',workbook.add_format({'bold': 0,'align' : 'center','bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(five_letter,row+8),desain.contract_id.meals,workbook.add_format({'bold': 0,'bottom': 1,'align' : 'center','valign' : 'vright'}))
worksheet.write('%s%s'%(six_letter,row+8),"'=",workbook.add_format({'bold': 0,'align' : 'center','bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(seven_letter,row+8),desain.contract_id.meals*(presences1+presences2),workbook.add_format({'num_format': '#,##0','bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+8),'+',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vright'}))
col_range += 1
worksheet.write('%s%s'%(seven_letter,row+9),round(((desain.contract_id.wage/24)*presences1) + ((desain.contract_id.wage/24)*presences2) + ov1 + ov2 - (desain.contract_id.meals*(presences1+presences2))), workbook.add_format({'num_format': '#,##0','valign':'vright'}))
worksheet.write('%s%s'%(one_letter,row+9),'',workbook.add_format({'bold': 0,'left': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+9),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+10),'S POKOK',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+10),SP,workbook.add_format({'num_format': '#,##0','valign':'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+10),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+11),'S WAJIB',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+11),SW,workbook.add_format({'num_format': '#,##0','valign':'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+11),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+12),'KB KAS',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+12),KAS,workbook.add_format({'num_format': '#,##0','valign':'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+12),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+13),'KOREKSI',workbook.add_format({'bold': 0,'left': 1,'align': 'left','bottom': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+13),KRSN,workbook.add_format({'num_format': '#,##0','bottom':1,'valign':'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+13),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(two_letter,row+13), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(tree_letter,row+13),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(four_letter,row+13),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(five_letter,row+13),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(six_letter,row+13),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
#worksheet.write('%s%s'%(seven_letter,row+13),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+14),'',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+14),'TOTAL',center1_format)
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+14,seven_letter,row+14),round(((desain.contract_id.wage/24)*presences1) + ((desain.contract_id.wage/24)*presences2) + ov1 + ov2 - (desain.contract_id.meals*(presences1+presences2)) + SP + SW + KAS + KRSN),workbook.add_format({'num_format': '#,##0','valign':'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+14),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+15),'',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+15),'REAL',workbook.add_format({'bold': 0,'bottom': 1,'font_size':15,'bold':1,'align': 'center','valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+15,seven_letter,row+15),NET,workbook.add_format({'num_format': '#,##0','bold': 0,'bottom': 1,'font_size':15,'bold':1,'align': 'right','valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+15),'',workbook.add_format({'bold': 0,'right':1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+16),'',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+16),desain.employee_id.bank_account_id.bank_id.name,workbook.add_format({'bold': 0,'font_size':15,'bold':1,'align': 'center','valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+16,seven_letter,row+16),NET,workbook.add_format({'num_format': '#,##0','bold': 0,'font_size':15,'bold':1,'align': 'right','valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+16),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+17),'',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+17),'CASH',center1_format)
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+17,seven_letter,row+17),'-',workbook.add_format({'num_format': '#,##0','valign':'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+17),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+18),'',workbook.add_format({'bold': 0,'left': 1,'bottom': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+18),'TOTAL',workbook.add_format({'bold': 1,'bottom': 1,'align': 'center','valign' : 'vright'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+18,seven_letter,row+18),NET,workbook.add_format({'num_format': '#,##0','bold': 0,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+18),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(two_letter,row+18), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(tree_letter,row+18),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(four_letter,row+18),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
#worksheet.write('%s%s'%(six_letter,row+18),NET,workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
#worksheet.write('%s%s'%(seven_letter,row+18),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vright'}))
NETS = terbilang(NET, 'idr', 'idr')
worksheet.merge_range('%s%s:%s%s'%(one_letter,row+19,eeg_letter,row+20),NETS,workbook.add_format({'bold': 1,'border': 1,'align':'center','valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+21),'Payroll',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vright'}))
worksheet.write('%s%s'%(two_letter,row+21), '', workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.merge_range('%s%s:%s%s'%(tree_letter,row+21,four_letter,row+21),'Finance',workbook.add_format({'bold': 0,'align': 'left','valign' : 'vright'}))
worksheet.write('%s%s'%(five_letter,row+21),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vcenter'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+21,seven_letter,row+21),'Checker',workbook.add_format({'bold': 0,'align':'left','valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+21),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+22),'',workbook.add_format({'bold': 0,'bottom': 1,'left': 1,'align': 'left','valign' : 'vright'}))
worksheet.write('%s%s'%(two_letter,row+22), '', workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vright'}))
worksheet.merge_range('%s%s:%s%s'%(tree_letter,row+22,four_letter,row+22),'',workbook.add_format({'bold': 0,'align': 'left','bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(five_letter,row+22),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vcenter'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+22,seven_letter,row+22),'',workbook.add_format({'bold': 0,'bottom': 1,'align':'left','valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+22),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vright'}))
x +=1
if x == jum :
row += 23
desain_brand_ids -= five_desain_ids
elif desain.contract_id.type_id.name == "BULANAN" or desain.contract_id.type_id.name == "STAFF" :
worksheet.set_column('A:A', 12)
worksheet.set_column('B:D', 3)
worksheet.set_column('E:E', 12)
worksheet.set_column('F:F', 3)
worksheet.set_column('G:G', 17)
worksheet.set_column('H:H', 3)
worksheet.set_column('I:I', 12)
worksheet.set_column('J:K', 3)
worksheet.set_column('M:M', 12)
worksheet.set_column('N:N', 3)
worksheet.set_column('O:O', 17)
worksheet.set_column('P:P', 3)
worksheet.set_column('Q:Q', 12)
worksheet.set_column('R:T', 3)
worksheet.set_column('U:U', 12)
worksheet.set_column('V:V', 3)
worksheet.set_column('W:W', 17)
worksheet.set_column('X:X', 3)
worksheet.set_column('Y:Y', 12)
worksheet.set_column('Z:AB', 3)
worksheet.set_column('AC:AC', 12)
worksheet.set_column('AD:AD', 3)
worksheet.set_column('AE:AE', 17)
worksheet.set_column('AF:AF', 3)
presences = 0
overtime = 0
SP = 0
SW = 0
KAS = 0
KRSN = 0
NET = 0
TGP = 0
TPH = 0
BBL = 0
OVTB = 0
MEALS = 0
for line in desain.worked_days_line_ids :
if line.code == 'Presences' :
presences = line.number_of_days
elif line.code == 'Overtime' :
overtime = line.number_of_hours
if desain.contract_id.wage > desain.contract_id.umk :
ov = overtime * (desain.contract_id.umk/56/4*1.5)
else :
ov = overtime * (desain.contract_id.wage/56/4*1.5)
for nominal in desain.line_ids :
if nominal.code == 'SP' :
SP = nominal.amount
elif nominal.code == 'SW' :
SW = nominal.amount
elif nominal.code == 'KAS' :
KAS = nominal.amount
elif nominal.code == 'KRSN' :
KRSN = nominal.amount
elif nominal.code == 'NET' :
NET = nominal.amount
elif nominal.code == 'TGP' :
TGP = nominal.amount
elif nominal.code == 'TPH' :
TPH = nominal.amount
elif nominal.code == 'BBL' :
BBL = nominal.amount
elif nominal.code == 'OVTB' :
OVTB = nominal.amount
elif nominal.code == 'MEALS' :
MEALS = nominal.amount
bulan = ["","Januari","Februari","Maret","April","Mei","Juni","Juli","Agustus","September","Oktober","November","Desember"]
tanggal = desain.date_to[8:10] + "-" + bulan[int(desain.date_to[5:7])] + "-" + desain.date_to[:4]
worksheet.write('%s%s'%(one_letter,row), 'NAMA', workbook.add_format({'bold': 0,'top' : 1,'left': 1,'valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(two_letter,row,five_letter,row),desain.employee_id.name,left_format_top)
worksheet.write('%s%s'%(six_letter,row), "", center_format_top)
worksheet.write('%s%s'%(seven_letter,row),tanggal,workbook.add_format({'bold': 0,'align':'right','top': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row),'',workbook.add_format({'bold': 0,'top': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+1), 'NIK', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(two_letter,row+1,five_letter,row+1),desain.employee_id.nik,left2_format)
worksheet.write('%s%s'%(seven_letter,row+1),desain.employee_id.bank_account_id.bank_id.name,workbook.add_format({'bold': 0,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+1),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+2), 'DIV', workbook.add_format({'bold': 0,'bottom': 1,'left': 1,'valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(two_letter,row+2,five_letter,row+2),desain.employee_id.department_id.name,workbook.add_format({'bold': 0,'bottom' : 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(four_letter,row+2), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+2), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(six_letter,row+2), "", workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+2), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+2),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+3), 'G POKOK', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+4), 'PREMI', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(two_letter,row+3), presences, center_format)
worksheet.write('%s%s'%(two_letter,row+4), presences, center_format)
worksheet.write('%s%s'%(tree_letter,row+3), 'x', center_format)
worksheet.write('%s%s'%(tree_letter,row+4), 'x', center_format)
worksheet.write('%s%s'%(four_letter,row+3), '', center_format)
worksheet.write('%s%s'%(four_letter,row+4), '', center_format)
worksheet.write('%s%s'%(five_letter,row+3), (desain.contract_id.wage*0.7)/25, workbook.add_format({'num_format': '#,##0','valign':'vright'}))
worksheet.write('%s%s'%(five_letter,row+4), (desain.contract_id.wage*0.2)/25, workbook.add_format({'num_format': '#,##0','valign':'vright'}))
worksheet.write('%s%s'%(six_letter,row+3), "'=", center_format)
worksheet.write('%s%s'%(six_letter,row+4), "'=", center_format)
worksheet.write('%s%s'%(seven_letter,row+3), TGP, workbook.add_format({'num_format': '#,##0','bold': 0,'align':'right','valign' : 'vright'}))
worksheet.write('%s%s'%(seven_letter,row+4), TPH, workbook.add_format({'num_format': '#,##0','bold': 0,'align':'right','valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+3),'',workbook.add_format({'bold': 0,'right': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+4),'',workbook.add_format({'bold': 0,'right': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+5), 'BNS BLN', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+6), 'U LEMBUR', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+7), '', workbook.add_format({'bold': 0,'left': 1,'bottom':1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(two_letter,row+5), '', center_format)
worksheet.write('%s%s'%(two_letter,row+6), overtime, workbook.add_format({'bold': 0,'align':'right','valign' : 'vleft'}))
worksheet.write('%s%s'%(two_letter,row+7), '0', workbook.add_format({'bold': 0,'align':'right','bottom':1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(tree_letter,row+5), '', center_format)
worksheet.write('%s%s'%(tree_letter,row+6), 'x', workbook.add_format({'bold': 0,'align':'center','valign' : 'vleft'}))
worksheet.write('%s%s'%(tree_letter,row+7), 'x', workbook.add_format({'bold': 0,'align':'center','bottom':1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(four_letter,row+5), '', center_format)
worksheet.write('%s%s'%(four_letter,row+6), '', workbook.add_format({'bold': 0,'valign' : 'vleft'}))
worksheet.write('%s%s'%(four_letter,row+7), '', workbook.add_format({'bold': 0,'bottom':1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+5), '', right_format)
worksheet.write('%s%s'%(five_letter,row+6), int(ov), workbook.add_format({'num_format': '#,##0','bold': 0,'align':'right' ,'valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+7), '', workbook.add_format({'bold': 0,'bottom':1,'align':'right' ,'valign' : 'vleft'}))
worksheet.write('%s%s'%(six_letter,row+5), "'", center_format)
worksheet.write('%s%s'%(six_letter,row+6), "'=", workbook.add_format({'bold': 0,'align':'center','valign' : 'vleft'}))
worksheet.write('%s%s'%(six_letter,row+7), "'=", workbook.add_format({'bold': 0,'align':'center','bottom':1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+5), int(BBL), workbook.add_format({'num_format': '#,##0','bold': 0,'align':'right','valign' : 'vright','align':'right'}))
worksheet.write('%s%s'%(seven_letter,row+6), int(OVTB), workbook.add_format({'num_format': '#,##0','bold': 0,'align':'right','valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+7), '0', workbook.add_format({'bold': 0,'align':'right','bottom':1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+5),'',workbook.add_format({'bold': 0,'right': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+6),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+7),'',workbook.add_format({'bold': 0,'right': 1,'bottom':1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+8), TGP+TPH+BBL+OVTB,workbook.add_format({'num_format': '#,##0','valign':'vright'}))
worksheet.write('%s%s'%(one_letter,row+8),'',workbook.add_format({'bold': 0,'left': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+8),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+9),'U MKN',workbook.add_format({'bold': 0,'bottom': 1,'left': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(two_letter,row+9), presences, workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(tree_letter,row+9),"x",workbook.add_format({'bold': 0,'align':'center','bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(four_letter,row+9),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(five_letter,row+9),desain.contract_id.meals,workbook.add_format({'bold': 0,'align':'right','bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(six_letter,row+9),"'=",workbook.add_format({'bold': 0,'bottom': 1,'align':'center','valign' : 'vcenter'}))
worksheet.write('%s%s'%(seven_letter,row+9),MEALS,workbook.add_format({'num_format': '#,##0','bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+9),'+',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vright'}))
col_range += 1
worksheet.write('%s%s'%(seven_letter,row+10),TGP+TPH+BBL+OVTB+MEALS,workbook.add_format({'num_format': '#,##0','valign':'vright'}))
worksheet.write('%s%s'%(one_letter,row+10),'',workbook.add_format({'bold': 0,'left': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+10),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+11),'S POKOK',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+11),SP,workbook.add_format({'num_format': '#,##0','valign':'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+11),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+12),'S WAJIB',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+12),SW,workbook.add_format({'num_format': '#,##0','valign':'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+12),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+13),'KB KAS',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+13),KAS,workbook.add_format({'num_format': '#,##0','valign':'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+13),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+14),'KOREKSI',workbook.add_format({'bold': 0,'left': 1,'align': 'left','bottom': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+14),KRSN,workbook.add_format({'num_format': '#,##0','bold': 0,'bottom': 1,'align': 'right','valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+14),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(two_letter,row+14), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(tree_letter,row+14),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(four_letter,row+14),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(five_letter,row+14),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(six_letter,row+14),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(one_letter,row+15),'',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+15),'TOTAL',center1_format)
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+15,seven_letter,row+15),TGP+TPH+BBL+OVTB+MEALS+SP+SW+KAS+KRSN,workbook.add_format({'num_format': '#,##0','valign':'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+15),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+16),'',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+16),'REAL',workbook.add_format({'bold': 0,'bottom': 1,'font_size':15,'bold':1,'align': 'center','valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+16,seven_letter,row+16),NET,workbook.add_format({'num_format': '#,##0','bold': 0,'bottom': 1,'font_size':15,'bold':1,'align': 'right','valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+16),'',workbook.add_format({'bold': 0,'right':1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+17),'',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+17),desain.employee_id.bank_account_id.bank_id.name,workbook.add_format({'bold': 0,'font_size':15,'bold':1,'align': 'center','valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+17,seven_letter,row+17),NET,workbook.add_format({'num_format': '#,##0','bold': 0,'font_size':15,'bold':1,'align': 'right','valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+17),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+18),'',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+18),'CASH',center1_format)
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+18,seven_letter,row+18),'-',right1_format)
worksheet.write('%s%s'%(eeg_letter,row+18),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+19),'',workbook.add_format({'bold': 0,'left': 1,'bottom': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+19),'TOTAL',workbook.add_format({'bold': 1,'bottom': 1,'align': 'center','valign' : 'vright'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+19,seven_letter,row+19),NET,workbook.add_format({'num_format': '#,##0','bold': 0,'align':'right','valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+19),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(two_letter,row+19), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(tree_letter,row+19),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(four_letter,row+19),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
#worksheet.write('%s%s'%(six_letter,row+18),NET,workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
#worksheet.write('%s%s'%(seven_letter,row+18),'',workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vright'}))
NETS = terbilang(NET, 'idr', 'idr')
worksheet.merge_range('%s%s:%s%s'%(one_letter,row+20,eeg_letter,row+21),NETS,workbook.add_format({'bold': 1,'border': 1,'align':'center','valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+22),'Payroll',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vright'}))
worksheet.write('%s%s'%(two_letter,row+22), '', workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.merge_range('%s%s:%s%s'%(tree_letter,row+22,four_letter,row+22),'Finance',workbook.add_format({'bold': 0,'align': 'left','valign' : 'vright'}))
worksheet.write('%s%s'%(five_letter,row+22),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vcenter'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+22,seven_letter,row+22),'Checker',workbook.add_format({'bold': 0,'align':'left','valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+22),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+23),'',workbook.add_format({'bold': 0,'bottom': 1,'left': 1,'align': 'left','valign' : 'vright'}))
worksheet.write('%s%s'%(two_letter,row+23), '', workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vright'}))
worksheet.merge_range('%s%s:%s%s'%(tree_letter,row+23,four_letter,row+23),'',workbook.add_format({'bold': 0,'align': 'left','bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(five_letter,row+23),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vcenter'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+23,seven_letter,row+23),'',workbook.add_format({'bold': 0,'bottom': 1,'align':'left','valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+23),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vright'}))
x +=1
if x == jum :
row += 24
desain_brand_ids -= five_desain_ids
elif desain.contract_id.type_id.name == "BORONGAN" :
#import pdb;pdb.set_trace()
#import pdb;pdb.set_trace()
presences1 = 0
presences2 = 0
SP = 0
SW = 0
UBR1 = 0
UBR2 = 0
KRJN1 = 0
KRJN2 = 0
BNPT1 = 0
BNPT2 = 0
for line in desain.worked_days_line_ids :
if line.code == 'Presences1' :
presences1 = line.number_of_days
elif line.code == 'Presences2' :
presences2 = line.number_of_days
for nominal in desain.line_ids :
if nominal.code == 'SP' :
SP = nominal.amount
elif nominal.code == 'SW' :
SW = nominal.amount
elif nominal.code == 'UBR1' :
UBR1 = nominal.amount
elif nominal.code == 'UBR2' :
UBR2 = nominal.amount
elif nominal.code == 'KRJN1' :
KRJN1 = nominal.amount
elif nominal.code == 'KRJN2' :
KRJN2 = nominal.amount
elif nominal.code == 'BNPT1' :
BNPT1 = nominal.amount
elif nominal.code == 'BNPT2' :
BNPT2 = nominal.amount
elif nominal.code == 'NET' :
NET = nominal.amount
bulan = ["","Januari","Februari","Maret","April","Mei","Juni","Juli","Agustus","September","Oktober","November","Desember"]
tanggal = desain.date_to[8:10] + "-" + bulan[int(desain.date_to[5:7])] + "-" + desain.date_to[:4]
worksheet.write('%s%s'%(one_letter,row), 'NAMA', workbook.add_format({'bold': 0,'top' : 1,'left': 1,'valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(two_letter,row,five_letter,row),desain.employee_id.name,left_format_top)
worksheet.write('%s%s'%(six_letter,row), "", center_format_top)
worksheet.write('%s%s'%(seven_letter,row),tanggal,workbook.add_format({'bold': 0,'align':'right','top': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row),'',workbook.add_format({'bold': 0,'top': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+1), 'DIV', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(two_letter,row+1,five_letter,row+1),desain.employee_id.department_id.name,workbook.add_format({'bold': 0,'valign' : 'vleft'}))
worksheet.write('%s%s'%(four_letter,row+1), '', workbook.add_format({'bold': 0,'valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+1), '', workbook.add_format({'bold': 0,'valign' : 'vleft'}))
worksheet.write('%s%s'%(six_letter,row+1), "", workbook.add_format({'bold': 0,'valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+1), '', workbook.add_format({'bold': 0,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+1),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+2), 'UT', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+3), '', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(two_letter,row+2), '1', center_format)
worksheet.write('%s%s'%(two_letter,row+3), '2', center_format)
worksheet.write('%s%s'%(tree_letter,row+2), '6', center_format)
worksheet.write('%s%s'%(tree_letter,row+3), '6', center_format)
worksheet.write('%s%s'%(four_letter,row+2), 'x', center_format)
worksheet.write('%s%s'%(four_letter,row+3), 'x', center_format)
worksheet.write('%s%s'%(five_letter,row+2), desain.contract_id.uang_transport, right_format)
worksheet.write('%s%s'%(five_letter,row+3), desain.contract_id.uang_transport, right_format)
worksheet.write('%s%s'%(six_letter,row+2), "'=", center_format)
worksheet.write('%s%s'%(six_letter,row+3), "'=", center_format)
worksheet.write('%s%s'%(seven_letter,row+2), desain.contract_id.uang_transport * presences1, workbook.add_format({'align':'right','bold': 0,'valign' : 'vright'}))
worksheet.write('%s%s'%(seven_letter,row+3), desain.contract_id.uang_transport * presences2, workbook.add_format({'align':'right','bold': 0,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+2),'',workbook.add_format({'bold': 0,'right': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+3),'',workbook.add_format({'bold': 0,'right': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+4), 'UPAH', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+5), '', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(two_letter,row+4), '1', center_format)
worksheet.write('%s%s'%(two_letter,row+5), '2', center_format)
worksheet.write('%s%s'%(tree_letter,row+4), '', center_format)
worksheet.write('%s%s'%(tree_letter,row+5), '', center_format)
worksheet.write('%s%s'%(four_letter,row+4), '', center_format)
worksheet.write('%s%s'%(four_letter,row+5), '', center_format)
worksheet.write('%s%s'%(five_letter,row+4), '', right_format)
worksheet.write('%s%s'%(five_letter,row+5), '', right_format)
worksheet.write('%s%s'%(six_letter,row+4), "", center_format)
worksheet.write('%s%s'%(six_letter,row+5), "", center_format)
worksheet.write('%s%s'%(seven_letter,row+4), UBR1, workbook.add_format({'align':'right','bold': 0,'valign' : 'vright'}))
worksheet.write('%s%s'%(seven_letter,row+5), UBR2, workbook.add_format({'align':'right','bold': 0,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+4),'',workbook.add_format({'bold': 0,'right': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+5),'',workbook.add_format({'bold': 0,'right': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+6), 'KRJN', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+7), '', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(two_letter,row+6), '1', center_format)
worksheet.write('%s%s'%(two_letter,row+7), '2', center_format)
worksheet.write('%s%s'%(seven_letter,row+6), KRJN1, workbook.add_format({'align':'right','bold': 0,'valign' : 'vright'}))
worksheet.write('%s%s'%(seven_letter,row+7), KRJN2, workbook.add_format({'align':'right','bold': 0,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+6),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+7),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+8), 'BNS/POT', workbook.add_format({'bold': 0,'left': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(one_letter,row+9), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(two_letter,row+8), '1', center_format)
worksheet.write('%s%s'%(two_letter,row+9), '2', workbook.add_format({'bold': 0,'bottom': 1,'align' : 'center','valign' : 'vcenter'}))
worksheet.write('%s%s'%(tree_letter,row+8), '', center_format)
worksheet.write('%s%s'%(tree_letter,row+9), '', workbook.add_format({'bold': 0,'bottom': 1,'align' : 'center','valign' : 'vcenter'}))
worksheet.write('%s%s'%(four_letter,row+8), '', center_format)
worksheet.write('%s%s'%(four_letter,row+9), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'center'}))
worksheet.write('%s%s'%(five_letter,row+8), '', right_format)
worksheet.write('%s%s'%(five_letter,row+9), '', workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(six_letter,row+8), "", center_format)
worksheet.write('%s%s'%(six_letter,row+9), "", workbook.add_format({'bold': 0,'bottom': 1,'valign' : 'vcenter'}))
worksheet.write('%s%s'%(seven_letter,row+8),BNPT1, workbook.add_format({'align':'right','bold': 0,'valign' : 'vright'}))
worksheet.write('%s%s'%(seven_letter,row+9),BNPT2, workbook.add_format({'align':'right','bold': 0,'bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+8),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+9),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+10), (desain.contract_id.uang_transport * presences1) + (desain.contract_id.uang_transport * presences2) + UBR1 + UBR2 + KRJN1 + KRJN2 + BNPT1 + BNPT2 ,right_format)
worksheet.write('%s%s'%(one_letter,row+10),'',workbook.add_format({'bold': 0,'left': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+10),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+11),'S POKOK',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+11),SP,right_format)
worksheet.write('%s%s'%(eeg_letter,row+11),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+12),'S WAJIB',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+12),SW,right_format)
worksheet.write('%s%s'%(eeg_letter,row+12),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+13),'UPAH HARIAN',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+13),'-',right_format)
worksheet.write('%s%s'%(eeg_letter,row+13),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+14),'KB MKN',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+14),'-',right_format)
worksheet.write('%s%s'%(eeg_letter,row+14),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+15),'SRGM',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+15),'-',right_format)
worksheet.write('%s%s'%(eeg_letter,row+15),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+16),'KRK/DLL',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(seven_letter,row+16),'-',right_format)
worksheet.write('%s%s'%(eeg_letter,row+16),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+17),'',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+17),'TOTAL',center1_format)
worksheet.write('%s%s'%(seven_letter,row+17),NET,right_format)
worksheet.write('%s%s'%(eeg_letter,row+17),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+18),'',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vleft'}))
worksheet.write('%s%s'%(five_letter,row+18),'REAL',workbook.add_format({'bold': 0,'bottom': 1,'font_size':15,'bold':1,'align': 'center','valign' : 'vleft'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+18,seven_letter,row+18),NET,workbook.add_format({'bold': 0,'bottom': 1,'font_size':15,'bold':1,'align': 'right','valign' : 'vleft'}))
worksheet.write('%s%s'%(eeg_letter,row+18),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
NETS = terbilang(NET, 'idr', 'idr')
worksheet.merge_range('%s%s:%s%s'%(one_letter,row+19,eeg_letter,row+19),NETS,workbook.add_format({'bold': 1,'border': 1,'align':'center','valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+20),'Payroll',workbook.add_format({'bold': 0,'left': 1,'align': 'left','valign' : 'vright'}))
worksheet.write('%s%s'%(two_letter,row+20), '', workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.merge_range('%s%s:%s%s'%(tree_letter,row+20,four_letter,row+20),'Finance',workbook.add_format({'bold': 0,'align': 'left','valign' : 'vright'}))
worksheet.write('%s%s'%(five_letter,row+20),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vcenter'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+20,seven_letter,row+20),'Checker',workbook.add_format({'bold': 0,'align':'left','valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+20),'',workbook.add_format({'bold': 0,'right': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(one_letter,row+21),'',workbook.add_format({'bold': 0,'bottom': 1,'left': 1,'align': 'left','valign' : 'vright'}))
worksheet.write('%s%s'%(two_letter,row+21), '', workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vright'}))
worksheet.merge_range('%s%s:%s%s'%(tree_letter,row+21,four_letter,row+21),'',workbook.add_format({'bold': 0,'align': 'left','bottom': 1,'valign' : 'vright'}))
worksheet.write('%s%s'%(five_letter,row+21),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vcenter'}))
worksheet.merge_range('%s%s:%s%s'%(six_letter,row+21,seven_letter,row+21),'',workbook.add_format({'bold': 0,'bottom': 1,'align':'left','valign' : 'vright'}))
worksheet.write('%s%s'%(eeg_letter,row+21),'',workbook.add_format({'bold': 0,'bottom': 1,'right': 1,'valign' : 'vright'}))
x +=1
if x == jum :
row += 22
desain_brand_ids -= five_desain_ids
#sampai sini
#import pdb;pdb.set_trace()
workbook.close()
result = base64.encodestring(fp.getvalue())
date_string = self.get_default_date_model().strftime("%Y-%m-%d")
filename = 'Slip Gaji %s'%(date_string)
filename += '%2Exlsx'
self.write({'file_data':result})
url = "web/content/?model="+self._name+"&id="+str(self.id)+"&field=file_data&download=true&filename="+filename
return {
'name': 'Slip Gaji',
'type': 'ir.actions.act_url',
'url': url,
'target': 'new',
}
@api.model
def default_get(self, fields_list):
warning = ''
skipped = []
ctx = self.env.context.copy()
obj = self.env['hr.payslip']
for po in ctx.get('active_ids',[]):
po_id = obj.browse(po)
skipped.append(po_id.name)
if skipped:
warning = 'Slip Gaji '+', '.join(skipped)
return {'warning' : warning}
| [
"[email protected]"
] | |
b3ad9085d450b0be9e16f6a891885dfb9b831d08 | d9be34d92ec5bfec5756d5310c2e34226d726cb4 | /nn/mnist_loader.py | 611bbf70f967542ec27724a4dfa39e3107c7a9a6 | [] | no_license | Oldpan/manim | 43119e4cf0b2d7c17affd66d1f64ce7a6c3bce81 | ac079f182a977bf0d830ab7647971b67cf9e5160 | refs/heads/master | 2021-07-20T11:23:45.752896 | 2017-10-27T22:12:29 | 2017-10-27T22:12:29 | 108,652,804 | 1 | 0 | null | 2017-10-28T13:47:39 | 2017-10-28T13:47:39 | null | UTF-8 | Python | false | false | 3,532 | py | """
mnist_loader
~~~~~~~~~~~~
A library to load the MNIST image data. For details of the data
structures that are returned, see the doc strings for ``load_data``
and ``load_data_wrapper``. In practice, ``load_data_wrapper`` is the
function usually called by our neural network code.
"""
#### Libraries
# Standard library
import cPickle
import gzip
# Third-party libraries
import numpy as np
def load_data():
"""Return the MNIST data as a tuple containing the training data,
the validation data, and the test data.
The ``training_data`` is returned as a tuple with two entries.
The first entry contains the actual training images. This is a
numpy ndarray with 50,000 entries. Each entry is, in turn, a
numpy ndarray with 784 values, representing the 28 * 28 = 784
pixels in a single MNIST image.
The second entry in the ``training_data`` tuple is a numpy ndarray
containing 50,000 entries. Those entries are just the digit
values (0...9) for the corresponding images contained in the first
entry of the tuple.
The ``validation_data`` and ``test_data`` are similar, except
each contains only 10,000 images.
This is a nice data format, but for use in neural networks it's
helpful to modify the format of the ``training_data`` a little.
That's done in the wrapper function ``load_data_wrapper()``, see
below.
"""
f = gzip.open('/Users/grant/cs/neural-networks-and-deep-learning/data/mnist.pkl.gz', 'rb')
training_data, validation_data, test_data = cPickle.load(f)
f.close()
return (training_data, validation_data, test_data)
def load_data_wrapper():
"""Return a tuple containing ``(training_data, validation_data,
test_data)``. Based on ``load_data``, but the format is more
convenient for use in our implementation of neural networks.
In particular, ``training_data`` is a list containing 50,000
2-tuples ``(x, y)``. ``x`` is a 784-dimensional numpy.ndarray
containing the input image. ``y`` is a 10-dimensional
numpy.ndarray representing the unit vector corresponding to the
correct digit for ``x``.
``validation_data`` and ``test_data`` are lists containing 10,000
2-tuples ``(x, y)``. In each case, ``x`` is a 784-dimensional
numpy.ndarry containing the input image, and ``y`` is the
corresponding classification, i.e., the digit values (integers)
corresponding to ``x``.
Obviously, this means we're using slightly different formats for
the training data and the validation / test data. These formats
turn out to be the most convenient for use in our neural network
code."""
tr_d, va_d, te_d = load_data()
training_inputs = [np.reshape(x, (784, 1)) for x in tr_d[0]]
training_results = [vectorized_result(y) for y in tr_d[1]]
training_data = zip(training_inputs, training_results)
validation_inputs = [np.reshape(x, (784, 1)) for x in va_d[0]]
validation_data = zip(validation_inputs, va_d[1])
test_inputs = [np.reshape(x, (784, 1)) for x in te_d[0]]
test_data = zip(test_inputs, te_d[1])
return (training_data, validation_data, test_data)
def vectorized_result(j):
"""Return a 10-dimensional unit vector with a 1.0 in the jth
position and zeroes elsewhere. This is used to convert a digit
(0...9) into a corresponding desired output from the neural
network."""
e = np.zeros((10, 1))
e[j] = 1.0
return e
| [
"[email protected]"
] | |
229bde7a28f796c2b05d06510b721e757b2aa411 | 1fbf09a5127a87434c8cfe7131f4b5879966cf04 | /web/dashboard/controller/project.py | d96b19e04ea25e26e2855f477d372df8b086c182 | [
"MIT"
] | permissive | pombredanne/Kunlun-M | cb0495c583e6ae39e168e96341f4fd2cb01d30ae | ab3b3cc843edee6a558a485d89daf944d8fd7f8e | refs/heads/master | 2023-08-23T05:58:12.433991 | 2021-09-30T10:04:04 | 2021-09-30T10:04:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,965 | py | #!/usr/bin/env python
# encoding: utf-8
'''
@author: LoRexxar
@contact: [email protected]
@file: project.py
@time: 2021/7/20 15:50
@desc:
'''
import re
import ast
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse, HttpResponseNotFound
from django.views.generic import TemplateView
from django.views import View
from django.shortcuts import render, redirect
from Kunlun_M.settings import SUPER_ADMIN
from Kunlun_M.const import VUL_LEVEL, VENDOR_VUL_LEVEL
from web.index.controller import login_or_token_required
from utils.utils import del_sensitive_for_config
from web.index.models import ScanTask, ScanResultTask, Rules, Tampers, NewEvilFunc, Project, ProjectVendors, VendorVulns
from web.index.models import get_and_check_scanresult, get_and_check_evil_func
class ProjectListView(TemplateView):
"""展示当前用户的项目"""
template_name = "dashboard/projects/projects_list.html"
def get_context_data(self, **kwargs):
context = super(ProjectListView, self).get_context_data(**kwargs)
rows = Project.objects.all().order_by('-id')
project_count = Project.objects.all().count()
context['projects'] = rows
for project in context['projects']:
tasks = ScanTask.objects.filter(project_id=project.id).order_by('-id')
tasks_count = len(tasks)
vendors_count = ProjectVendors.objects.filter(project_id=project.id).count()
results_count = ScanResultTask.objects.filter(scan_project_id=project.id, is_active=1).count()
last_scan_time = 0
if tasks:
last_scan_time = tasks.first().last_scan_time
project.tasks_count = tasks_count
project.results_count = results_count
project.last_scan_time = last_scan_time
project.vendors_count = vendors_count
context['projects'] = sorted(context['projects'], key=lambda x:x.last_scan_time)[::-1]
if 'p' in self.request.GET:
page = int(self.request.GET['p'])
else:
page = 1
# check page
if page*50 > project_count:
page = 1
context['projects'] = context['projects'][(page-1)*50: page*50]
context['page'] = page
max_page = project_count // 50 if project_count % 50 == 0 else (project_count // 50)+1
max_page = max_page+1 if max_page == 1 else max_page
context['max_page'] = max_page
context['page_range'] = range(int(max_page))[1:]
return context
class ProjectDetailView(View):
"""展示当前项目细节"""
@staticmethod
@login_or_token_required
def get(request, project_id):
project = Project.objects.filter(id=project_id).first()
tasks = ScanTask.objects.filter(project_id=project.id).order_by('-id')[:20]
taskresults = ScanResultTask.objects.filter(scan_project_id=project.id, is_active=1).all()
newevilfuncs = NewEvilFunc.objects.filter(project_id=project.id).all()
pvs = ProjectVendors.objects.filter(project_id=project.id)
for task in tasks:
task.is_finished = int(task.is_finished)
task.parameter_config = del_sensitive_for_config(task.parameter_config)
for taskresult in taskresults:
taskresult.is_unconfirm = int(taskresult.is_unconfirm)
taskresult.level = 0
taskresult.vid = 0
if taskresult.cvi_id == '9999':
vender_vul_id = taskresult.vulfile_path.split(":")[-1]
if vender_vul_id:
vv = VendorVulns.objects.filter(id=vender_vul_id).first()
if vv:
taskresult.vulfile_path = "[{}]{}".format(vv.vendor_name, vv.title)
taskresult.level = VENDOR_VUL_LEVEL[vv.severity]
taskresult.vid = vv.id
# 处理多个refer的显示问题
references = []
if re.search(r'"http[^"]+"', taskresult.source_code, re.I):
rs = re.findall(r'"http[^"]+"', taskresult.source_code, re.I)
for r in rs:
references.append(r)
else:
references = [taskresult.source_code]
taskresult.source_code = references
else:
r = Rules.objects.filter(svid=taskresult.cvi_id).first()
taskresult.level = VUL_LEVEL[r.level]
if not project:
return HttpResponseNotFound('Project Not Found.')
else:
data = {
'tasks': tasks,
'taskresults': taskresults,
'newevilfuncs': newevilfuncs,
'project': project,
'project_vendors': pvs,
}
return render(request, 'dashboard/projects/project_detail.html', data)
| [
"[email protected]"
] | |
179efb6dc7e69f13add2b48bd48138775e9b3571 | 79e45a6e4846927da432087aba845036b11c5622 | /PROD/bin/MarketData/Daily/DUSTdailyOHLC_withvol.py | ffea6ec37568125022acaf737d97cd3ed1ed8540 | [] | no_license | mjserpico/Scarlett-Trading | cba2bcfaacf886b9d851d978683b4ce641c8f6ad | 9778717393dbb0818ee026356996d1806345a6c2 | refs/heads/master | 2020-03-21T21:39:51.108503 | 2019-05-09T02:06:26 | 2019-05-09T02:06:26 | 139,076,454 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,169 | py | # -*- coding: utf-8 -*-
"""
Created on Sun Jan 08 09:16:43 2017
@author: Michael
"""
import mysql.connector
from ib.opt import Connection
from ib.ext.Contract import Contract
import time
import logging
import datetime
import datalink #universal logins for environment
import math
Flag = 0
CCY1 = "DU"
CCY2 = "ST"
Table = 'DUST'
yClose = 0
logging.basicConfig(filename='DailyOHLC' + str(datetime.date.today()) + '.txt', level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
fh = logging.FileHandler('DailyOHLC' + str(datetime.date.today()) + '.txt')
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter)
logger.addHandler(fh)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(formatter)
logger.addHandler(ch)
logger.debug('Starting DailyOHLC')
def truncate(f, n):
'''Truncates/pads a float f to n decimal places without rounding'''
s = '{}'.format(f)
if 'e' in s or 'E' in s:
return '{0:.{1}f}'.format(f, n)
i, p, d = s.partition('.')
return '.'.join([i, (d+'0'*n)[:n]])
def reply_handler(msg):
#print(msg.value)
logger.debug('In beginning of Reply Handler')
print("Reply:", msg)
test = msg.open
test2 = msg.high
test3 = msg.low
test4 = msg.close
test5 = msg.volume
logger.debug('test %s', test)
logger.debug('test5 %s', test5)
global Flag
logger.debug('Flag %s', Flag)
#test5 - msg.volume
logger.debug('In Reply Handler')
if float(test) != -1:
import time
logger.debug('Valid Price Found (OPEN NOT -1)')
#cnx = mysql.connector.connect(user='mjserpico', password='UrzE8B66',host="scar01.cqxmc7cib5oh.us-east-1.rds.amazonaws.com", database='SCAR01')
#cnx = mysql.connector.connect(user='Scarlett01', password='scar01lett',host="serpdb01.cqxmc7cib5oh.us-east-1.rds.amazonaws.com", database='SERPDB01')
cnx = mysql.connector.connect(user=datalink.DB_User, password=datalink.DB_Pass,host=datalink.DB_Host, database=datalink.DB_Path)
logger.debug('Connected to Database')
cur = cnx.cursor()
cur.execute("Insert Into "+ Table + """(Date, Open, High, Low, Close) values(%s,%s,%s,%s,%s)""",(time.strftime("%m/%d/%Y"),float(test),float(test2),float(test3),float(test4)))
cnx.commit()
logger.debug('Ran Insert Script')
today = datetime.date.today( )
dayofweek = datetime.datetime.today().weekday()
if dayofweek == 0: #if Today is Monday
yesterday = today - datetime.timedelta(days=3) #Get Friday
month = (str(0) + str(yesterday.month))
day = (str(0)+ str(yesterday.day))
yesterday2 = (month[-2:] +"/"+ day[-2:] +"/"+str(yesterday.year))
logger.debug('Yesterday2 was %s', str(yesterday2))
else:
yesterday = today - datetime.timedelta(days=1) #Take 1 Day back
month = (str(0) + str(yesterday.month))
day = (str(0)+ str(yesterday.day))
yesterday2 = (month[-2:] +"/"+ day[-2:] +"/"+str(yesterday.year))
logger.debug('Yesterday2 was %s', str(yesterday2))
#MovingAverage Calculation
#Step 1 Get earliest Date to calculate avg from
#reformat date to DB convention first
logger.debug('Today is still %s', today)
backdate = today - datetime.timedelta(days=13)
logger.debug('Date shifted back 10 is %s', backdate)
dayofweek = backdate.weekday()
month = (str(0) + str(backdate.month))
day = (str(0)+ str(backdate.day))
backdate2 = (month[-2:] +"/"+ day[-2:] +"/"+str(backdate.year))
logger.debug('First Date of Moving Average is %s', backdate2)
query = ("SELECT max(ID) from " + CCY1 + CCY2)
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
ID1 = ID
logger.debug('ID1 is %s', ID1)
query = ("SELECT (max(ID)-20) from " + CCY1 + CCY2)
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
ID2 = ID
logger.debug('ID1 is %s', ID1)
logger.debug('ID2 is %s', ID2)
query = ("SELECT (max(ID)-1) from " + CCY1 + CCY2)
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
ID3 = ID
logger.debug('ID3 is %s', ID3)
#Pull ATR Length From RiskParameter Table
query = ("Select RiskParametersValue from RiskParameters where RiskParametersName = 'ATRlength';")
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
atrlength = ID
logger.debug('ID4 is %s', atrlength)
#ID for ATR length start point
query = ("SELECT (max(ID)-" + str(atrlength[0]) + ") from " + CCY1 + CCY2)
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
ID4 = ID
logger.debug('ID4 is %s', ID4)
#Pull MovingAvg Length RiskParameter Table
query = ("Select RiskParametersValue from RiskParameters where RiskParametersName = 'MovAvgLength';")
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
movavglength = ID
logger.debug('ID is %s', atrlength)
#ID for MovAvg length start point
query = ("SELECT (max(ID)-" + str(movavglength[0]) + ") from " + CCY1 + CCY2)
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
ID5 = ID
logger.debug('ID5 is %s', ID5)
query = ("SELECT (max(ID)-30) from " + CCY1 + CCY2)
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
ID30 = ID
logger.debug('ID30 is %s', ID30)
query = ("SELECT (max(ID)-60) from " + CCY1 + CCY2)
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
ID60 = ID
logger.debug('ID60 is %s', ID60)
query = ("SELECT (max(ID)-90) from " + CCY1 + CCY2)
logger.debug('Query is %s', query)
cur.execute(query)
for (ID) in cur:
ID90 = ID
logger.debug('ID90 is %s', ID90)
query = ("SELECT Close from " + CCY1 + CCY2 + " where ID = " + str(ID3[0]) + ";")
cur.execute(query)
for (Close) in cur:
yClose = Close
logger.debug('yClose is %s', yClose[0])
query = ("SELECT Close from " + CCY1 + CCY2 + " where ID = " + str(ID1[0]) + ";")
cur.execute(query)
for (Close) in cur:
tClose = Close
logger.debug('tClose is %s', tClose[0])
#Interday Return
CloseReturn = float(tClose[0])
yCloseReturn = float(yClose[0])
logger.debug('yClose is %s', yClose[0])
logger.debug('Close is %s', tClose[0])
returns = round(((CloseReturn / yCloseReturn) - 1) * 100,2)
logger.debug('Return is %s', returns)
query = ("UPDATE " + CCY1 + CCY2 + " SET PercentReturn = " + str(returns) + " where ID = " + str(ID1[0]) +";")
logger.debug('Query is %s', query)
cur.execute(query)
cnx.commit()
# period Moving Average
query = ("SELECT round(Avg(Close),2) as Avg from " + CCY1 + CCY2 + " where ID BETWEEN " + str(ID5[0]) + " AND " + str(ID1[0]) + ";")
logger.debug('Query is %s', query)
cur.execute(query)
for (Avg) in cur:
BBMovAvg = Avg #Final Moving Average Value
logger.debug('MovAvg is %s', BBMovAvg)
##Puts Moving Average Value in hasPosition Table for Reference with intraday strategies
query = ("UPDATE hasPosition SET MovingAvgValue = " + str(BBMovAvg[0]) + " where CCY =\'" + CCY1 + CCY2 +"\';")
logger.debug('Query is %s', query)
cur.execute(query)
cnx.commit()
#True Range
TR1 = (test2-test3)
TR2 = abs(test2-float(yClose[0]))
TR3 = abs(test3-float(yClose[0]))
TR = truncate(max(TR1,TR2,TR3),4)
print(TR)
print(TR1)
print(TR2)
print(TR3)
query = ("UPDATE "+ Table +" SET TrueRange = " + str(TR) + " where ID =\'" + str(ID1[0]) +"\';")
logger.debug('Query is %s', query)
print(query)
cur.execute(query)
cnx.commit()
#ATR Daily
query = ("SELECT round(Avg(TrueRange),2) as Avg from " + CCY1 + CCY2 + " where ID BETWEEN " + str(ID4[0]) + " AND " + str(ID1[0]) + ";")
logger.debug('Query is %s', query)
print(query)
cur.execute(query)
for (Avg) in cur:
ATRAvg = Avg #Final Moving Average Value
logger.debug('ATR is %s', ATRAvg)
##Puts ATR in hasPosition Table for Reference with intraday strategies
query = ("UPDATE hasPosition SET ATRValue = " + str(ATRAvg[0]) + " where CCY =\'" + CCY1 + CCY2 +"\';")
logger.debug('Query is %s', query)
cur.execute(query)
print(query)
cnx.commit()
#Calculate 30D Vol
query = ("SELECT round(stddev(PercentReturn),2) as vol30 from " + CCY1 + CCY2 + " where ID BETWEEN " + str(ID30[0]) + " AND " + str(ID1[0]) + ";")
logger.debug('Query is %s', query)
cur.execute(query)
for (vol30) in cur:
thirtyd = truncate((vol30[0] * math.sqrt(252)),2) #Final Moving Average Value
logger.debug('30d is %s', thirtyd)
query = ("UPDATE "+ Table +" SET thirtyvol = " + str(thirtyd) + " where ID =\'" + str(ID1[0]) +"\';")
logger.debug('Query is %s', query)
print(query)
cur.execute(query)
cnx.commit()
#Calculate 60D Vol
query = ("SELECT round(stddev(PercentReturn),2) as vol60 from " + CCY1 + CCY2 + " where ID BETWEEN " + str(ID60[0]) + " AND " + str(ID1[0]) + ";")
logger.debug('Query is %s', query)
cur.execute(query)
for (vol60) in cur:
sixtyd = truncate((vol60[0] * math.sqrt(252)),2) #Final Moving Average Value
logger.debug('sixtyd is %s', sixtyd)
query = ("UPDATE "+ Table +" SET sixtyvol = " + str(sixtyd) + " where ID =\'" + str(ID1[0]) +"\';")
logger.debug('Query is %s', query)
print(query)
cur.execute(query)
cnx.commit()
#Calculate 90D Vol
query = ("SELECT round(stddev(PercentReturn),2) as vol90 from " + CCY1 + CCY2 + " where ID BETWEEN " + str(ID90[0]) + " AND " + str(ID1[0]) + ";")
logger.debug('Query is %s', query)
cur.execute(query)
for (vol90) in cur:
ninetyd = truncate((vol90[0] * math.sqrt(252)),2) #Final Moving Average Value
logger.debug('ninetyd is %s', ninetyd)
query = ("UPDATE "+ Table +" SET ninetyvol = " + str(ninetyd) + " where ID =\'" + str(ID1[0]) +"\';")
logger.debug('Query is %s', query)
print(query)
cur.execute(query)
cnx.commit()
Flag = 1
logger.debug('Flag set to %s', Flag)
print(Flag)
return(Flag)
while Flag == 0:
logger.debug('Flag set to %s', Flag)
conn = Connection.create(port=4002, clientId=999)
conn.connect()
logger.debug('Connecting to Server')
time.sleep(1)
conn.register(reply_handler,'HistoricalData') #By registering "HistoricalData" --the Method name only --we can eliminate all the open order garbage
logger.debug('Registered HistoricalData Reply Handler')
time.sleep(1)
qqq = Contract()
qqq.m_symbol = Table
qqq.m_secType = 'STK'
qqq.m_exchange = 'SMART:ISLAND'
qqq.m_currency = 'USD'
logger.debug('Requesting historical data')
conn.reqHistoricalData(1, qqq, '', '1 D', '1 day', 'TRADES', 0, 1)
logger.debug('Returned from Reply Handler')
time.sleep(1) #give IB time to send us messages
logger.debug('Disconnecting from Server')
conn.disconnect()
logger.debug('Finished DUST Daily OHLC') | [
"[email protected]"
] | |
b7bb6f433a614f530de7f24c83596e5d1d083e36 | 84f1fea102aeb2d324e8ad3908e1765d04a0a730 | /emails/migrations/0002_sentemail_transaction_event.py | 367b922c18a634054c774c56a9edd85a29f73e26 | [
"Apache-2.0"
] | permissive | Natsoye/explorer | c205f8eb8d08705c2c4ee4ee45c28f7d0a534b10 | 638c70204d6001d9c5c56701917a6273a02c90cf | refs/heads/master | 2021-08-30T10:42:56.371192 | 2021-08-17T15:43:04 | 2021-08-17T15:43:04 | 181,131,891 | 2 | 0 | Apache-2.0 | 2021-08-17T15:43:05 | 2019-04-13T06:43:15 | Python | UTF-8 | Python | false | false | 526 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('emails', '0001_initial'),
('transactions', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='sentemail',
name='transaction_event',
field=models.ForeignKey(blank=True, to='transactions.OnChainTransaction', null=True),
preserve_default=True,
),
]
| [
"[email protected]"
] | |
d6cfbd76a3fdbf59869fa3c44935823a5b1d04e7 | 37e87b3d5e1ee9009f0ea0671bc0c6edf0e233b7 | /162_3.py | a5d822d8c80608bdbd14448d50c484cc6169f703 | [] | no_license | Jane11111/Leetcode2021 | d9f4987792938597bf89ff72ba6bbcb4a3f9d081 | a95b871578aae0103066962c33b8c0f4ec22d0f2 | refs/heads/master | 2023-07-14T21:29:41.196752 | 2021-08-23T03:28:02 | 2021-08-23T03:28:02 | 344,804,297 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 560 | py | # -*- coding: utf-8 -*-
# @Time : 2021-03-12 10:16
# @Author : zxl
# @FileName: 162_3.py
class Solution(object):
def findPeakElement(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
i = 0
j = len(nums)-1
while i<=j:
if i==j:
return i
m = (i+j)//2
if nums[m]>nums[m+1]:
j = m
else:
i=m+1
return -1
obj = Solution()
nums = [1,2,1,3,5,6,4]
ans= obj.findPeakElement(nums)
print(ans)
| [
"[email protected]"
] | |
abf1af7ecf2a32b6a43a8befe1c0c7f05827072e | 2f3960422d08af95bf67c9057b83244540be51e9 | /transformer_tf/Transformer.py | 3770aeca1afbf23d880663bb7f1c7c30fc57fffe | [] | no_license | FreeFlyXiaoMa/text_classify_base_model3 | 2d4eb8f392a4e38bd62f15f45cf29f7bf4546e96 | a6b91b557fa307b685d077dd32609f5d83db487a | refs/heads/master | 2020-11-28T16:18:40.007655 | 2019-12-24T03:50:54 | 2019-12-24T03:50:54 | 229,865,838 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,770 | py | import tensorflow as tf
import math
import numpy as np
from tensorflow.python.ops import array_ops
class Muti_head_Attention(object):
def __init__(self, max_length, num_classes, vocab_size, embedding_size, hidden_num, num_blocks, num_heads):
# placeholders for input output and dropout
self.input_x = tf.placeholder(tf.int32, shape=[None, max_length], name='input_x')
self.input_y = tf.placeholder(tf.int32, shape=[None, num_classes], name='input_y')
self.drop_out_prob = tf.placeholder(tf.float32, name='drop_out_keep')
# embedding layer
with tf.device('/cpu:0'), tf.name_scope("embedding"):
self.embedding = tf.Variable(tf.truncated_normal([vocab_size, embedding_size],
stddev=1.0 / math.sqrt(embedding_size)),
name="embedding", trainable=True)
self.embedding_chars = tf.nn.embedding_lookup(self.embedding, self.input_x)
# Positional Encoding
N = array_ops.shape(self.embedding_chars)[0]
T = max_length
self.embedding_chars += self.positional_encoding(N, T,
num_units=hidden_num,
zero_pad=False,
scale=False,
scope="enc_pe")
# Dropout
self.enc = tf.layers.dropout(self.embedding_chars, rate=self.drop_out_prob)
# Blocks
for i in range(num_blocks):
with tf.variable_scope("num_blocks_{}".format(i)):
# Multihead Attention
self.enc = self.multihead_attention(queries=self.enc,
keys=self.enc,
num_units=hidden_num,
num_heads=num_heads,
dropout_rate=self.drop_out_prob,
causality=False)
# Feed Forward
self.enc = self.feedforward(self.enc, num_units=[4 * hidden_num, hidden_num])
# 将特征进行拼接
self.enc = tf.reshape(self.enc, [-1, max_length * hidden_num, 1])
self.enc = tf.squeeze(self.enc, -1)
fc_w = tf.Variable(tf.truncated_normal([max_length * hidden_num, num_classes], stddev=0.1), name='fc_w')
fc_b = tf.Variable(tf.zeros([num_classes]), name='fc_b')
# 定义损失函数
l2_loss = 0
l2_loss += tf.nn.l2_loss(fc_w)
l2_loss += tf.nn.l2_loss(fc_b)
self.logits = tf.matmul(self.enc, fc_w) + fc_b
self.score = tf.nn.softmax(self.logits, name='score')
self.predictions = tf.argmax(self.score, 1, name="predictions")
self.cost = tf.losses.softmax_cross_entropy(self.input_y, self.logits)
# l2_reg_lambda = 0.01
# self.cost = self.cost + l2_reg_lambda * l2_loss
tvars = tf.trainable_variables()
grads, _ = tf.clip_by_global_norm(tf.gradients(self.cost, tvars), 5)
optimizer = tf.train.AdamOptimizer(0.001)
self.train_op = optimizer.apply_gradients(zip(grads, tvars))
self.accuracy = tf.reduce_mean(
tf.cast(tf.equal(tf.argmax(self.input_y, axis=1), tf.argmax(self.score, axis=1)), tf.float32))
def positional_encoding(self, N, T,
num_units,
zero_pad=True,
scale=True,
scope="positional_encoding",
reuse=None):
'''Sinusoidal Positional_Encoding.
Args:
inputs: A 2d Tensor with shape of (N, T).
num_units: Output dimensionality
zero_pad: Boolean. If True, all the values of the first row (id = 0) should be constant zero
scale: Boolean. If True, the output will be multiplied by sqrt num_units(check details from paper)
scope: Optional scope for `variable_scope`.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
A 'Tensor' with one more rank than inputs's, with the dimensionality should be 'num_units'
'''
with tf.variable_scope(scope, reuse=reuse):
position_ind = tf.tile(tf.expand_dims(tf.range(T), 0), [N, 1])
# First part of the PE function: sin and cos argument
position_enc = np.array([
[pos / np.power(10000, 2. * i / num_units) for i in range(num_units)]
for pos in range(T)])
# Second part, apply the cosine to even columns and sin to odds.
position_enc[:, 0::2] = np.sin(position_enc[:, 0::2]) # dim 2i
position_enc[:, 1::2] = np.cos(position_enc[:, 1::2]) # dim 2i+1
# Convert to a tensor
lookup_table = tf.convert_to_tensor(position_enc)
if zero_pad:
lookup_table = tf.concat((tf.zeros(shape=[1, num_units]),
lookup_table[1:, :]), 0)
outputs = tf.nn.embedding_lookup(lookup_table, position_ind)
if scale:
outputs = outputs * num_units ** 0.5
outputs = tf.cast(outputs, dtype=tf.float32)
return outputs
def multihead_attention(self, queries,
keys,
num_units=None,
num_heads=8,
dropout_rate=0,
causality=False,
scope="multihead_attention",
reuse=None):
'''Applies multihead attention.
Args:
queries: A 3d tensor with shape of [N, T_q, C_q].
keys: A 3d tensor with shape of [N, T_k, C_k].
num_units: A scalar. Attention size.
dropout_rate: A floating point number.
is_training: Boolean. Controller of mechanism for dropout.
causality: Boolean. If true, units that reference the future are masked.
num_heads: An int. Number of heads.
scope: Optional scope for `variable_scope`.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns
A 3d tensor with shape of (N, T_q, C)
'''
with tf.variable_scope(scope, reuse=reuse):
# Set the fall back option for num_units
if num_units is None:
num_units = queries.get_shape().as_list[-1]
# Linear projections
Q = tf.layers.dense(queries, num_units, activation=tf.nn.relu) # (N, T_q, C)
K = tf.layers.dense(keys, num_units, activation=tf.nn.relu) # (N, T_k, C)
V = tf.layers.dense(keys, num_units, activation=tf.nn.relu) # (N, T_k, C)
# Split and concat
Q_ = tf.concat(tf.split(Q, num_heads, axis=2), axis=0) # (h*N, T_q, C/h)
K_ = tf.concat(tf.split(K, num_heads, axis=2), axis=0) # (h*N, T_k, C/h)
V_ = tf.concat(tf.split(V, num_heads, axis=2), axis=0) # (h*N, T_k, C/h)
# Multiplication
outputs = tf.matmul(Q_, tf.transpose(K_, [0, 2, 1])) # (h*N, T_q, T_k)
# Scale
outputs = outputs / (K_.get_shape().as_list()[-1] ** 0.5)
# Key Masking
key_masks = tf.sign(tf.abs(tf.reduce_sum(keys, axis=-1))) # (N, T_k)
key_masks = tf.tile(key_masks, [num_heads, 1]) # (h*N, T_k)
key_masks = tf.tile(tf.expand_dims(key_masks, 1), [1, tf.shape(queries)[1], 1]) # (h*N, T_q, T_k)
paddings = tf.ones_like(outputs) * (-2 ** 32 + 1)
outputs = tf.where(tf.equal(key_masks, 0), paddings, outputs) # (h*N, T_q, T_k)
# Causality = Future blinding
if causality:
diag_vals = tf.ones_like(outputs[0, :, :]) # (T_q, T_k)
tril = tf.linalg.LinearOperatorLowerTriangular(diag_vals).to_dense() # (T_q, T_k)
masks = tf.tile(tf.expand_dims(tril, 0), [tf.shape(outputs)[0], 1, 1]) # (h*N, T_q, T_k)
paddings = tf.ones_like(masks) * (-2 ** 32 + 1)
outputs = tf.where(tf.equal(masks, 0), paddings, outputs) # (h*N, T_q, T_k)
# Activation
outputs = tf.nn.softmax(outputs) # (h*N, T_q, T_k)
# Query Masking
query_masks = tf.sign(tf.abs(tf.reduce_sum(queries, axis=-1))) # (N, T_q)
query_masks = tf.tile(query_masks, [num_heads, 1]) # (h*N, T_q)
query_masks = tf.tile(tf.expand_dims(query_masks, -1), [1, 1, tf.shape(keys)[1]]) # (h*N, T_q, T_k)
outputs *= query_masks # broadcasting. (N, T_q, C)
# Dropouts
outputs = tf.layers.dropout(outputs, rate=dropout_rate)
# Weighted sum
outputs = tf.matmul(outputs, V_) # ( h*N, T_q, C/h)
# Restore shape
outputs = tf.concat(tf.split(outputs, num_heads, axis=0), axis=2) # (N, T_q, C)
# Residual connection
outputs += queries
# Normalize
outputs = self.normalize(outputs) # (N, T_q, C)
return outputs
def feedforward(self, inputs,
num_units=[2048, 512],
scope="multihead_attention",
reuse=None):
'''Point-wise feed forward net.
Args:
inputs: A 3d tensor with shape of [N, T, C].
num_units: A list of two integers.
scope: Optional scope for `variable_scope`.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
A 3d tensor with the same shape and dtype as inputs
'''
with tf.variable_scope(scope, reuse=reuse):
# Inner layer
params = {"inputs": inputs, "filters": num_units[0], "kernel_size": 1,
"activation": tf.nn.relu, "use_bias": True}
outputs = tf.layers.conv1d(**params)
# Readout layer
params = {"inputs": outputs, "filters": num_units[1], "kernel_size": 1,
"activation": None, "use_bias": True}
outputs = tf.layers.conv1d(**params)
# Residual connection
outputs += inputs
# Normalize
outputs = self.normalize(outputs)
return outputs
def normalize(self, inputs,
epsilon=1e-8,
scope="ln",
reuse=None):
'''Applies layer normalization.
Args:
inputs: A tensor with 2 or more dimensions, where the first dimension has
`batch_size`.
epsilon: A floating number. A very small number for preventing ZeroDivision Error.
scope: Optional scope for `variable_scope`.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
A tensor with the same shape and data dtype as `inputs`.
'''
with tf.variable_scope(scope, reuse=reuse):
inputs_shape = inputs.get_shape()
params_shape = inputs_shape[-1:]
mean, variance = tf.nn.moments(inputs, [-1], keep_dims=True)
beta = tf.Variable(tf.zeros(params_shape))
gamma = tf.Variable(tf.ones(params_shape))
normalized = (inputs - mean) / ((variance + epsilon) ** (.5))
outputs = gamma * normalized + beta
return outputs
| [
"[email protected]"
] | |
8737cd2b0d8f72b063a39e23b4cb76f20b2b2e95 | 64bf39b96a014b5d3f69b3311430185c64a7ff0e | /intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/arista/eos/tests/unit/modules/network/eos/test_eos_eapi.py | a01bf1eb370a2fdae9d8734baa3d9a7804e49bea | [
"GPL-3.0-or-later",
"GPL-3.0-only",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | SimonFangCisco/dne-dna-code | 7072eba7da0389e37507b7a2aa5f7d0c0735a220 | 2ea7d4f00212f502bc684ac257371ada73da1ca9 | refs/heads/master | 2023-03-10T23:10:31.392558 | 2021-02-25T15:04:36 | 2021-02-25T15:04:36 | 342,274,373 | 0 | 0 | MIT | 2021-02-25T14:39:22 | 2021-02-25T14:39:22 | null | UTF-8 | Python | false | false | 6,923 | py | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible_collections.arista.eos.tests.unit.compat.mock import patch
from ansible_collections.arista.eos.plugins.modules import eos_eapi
from ansible_collections.arista.eos.tests.unit.modules.utils import (
set_module_args,
)
from .eos_module import TestEosModule, load_fixture
class TestEosEapiModule(TestEosModule):
module = eos_eapi
def setUp(self):
super(TestEosEapiModule, self).setUp()
self.mock_run_commands = patch(
"ansible_collections.arista.eos.plugins.modules.eos_eapi.run_commands"
)
self.run_commands = self.mock_run_commands.start()
self.mock_load_config = patch(
"ansible_collections.arista.eos.plugins.modules.eos_eapi.load_config"
)
self.load_config = self.mock_load_config.start()
self.mock_verify_state = patch(
"ansible_collections.arista.eos.plugins.modules.eos_eapi.verify_state"
)
self.verify_state = self.mock_verify_state.start()
self.command_fixtures = {}
def tearDown(self):
super(TestEosEapiModule, self).tearDown()
self.mock_run_commands.stop()
self.mock_load_config.stop()
# hack for older version of mock
# should be using patch.stopall() but CI is still failing
try:
self.mock_verify_state.stop()
except RuntimeError:
pass
def load_fixtures(self, commands=None, transport="eapi"):
def run_commands(module, commands, **kwargs):
output = list()
for cmd in commands:
output.append(load_fixture(self.command_fixtures[cmd]))
return output
self.run_commands.side_effect = run_commands
self.load_config.return_value = dict(diff=None, session="session")
def start_configured(self, *args, **kwargs):
self.command_fixtures = {
"show vrf": "eos_eapi_show_vrf.text",
"show management api http-commands | json": "eos_eapi_show_mgmt.json",
}
return self.execute_module(*args, **kwargs)
def start_unconfigured(self, *args, **kwargs):
self.command_fixtures = {
"show vrf": "eos_eapi_show_vrf.text",
"show management api http-commands | json": "eos_eapi_show_mgmt_unconfigured.json",
}
return self.execute_module(*args, **kwargs)
def test_eos_eapi_http_enable(self):
set_module_args(dict(http=True))
commands = [
"management api http-commands",
"protocol http port 80",
"no shutdown",
]
self.start_unconfigured(changed=True, commands=commands)
def test_eos_eapi_http_disable(self):
set_module_args(dict(http=False))
commands = ["management api http-commands", "no protocol http"]
self.start_configured(changed=True, commands=commands)
def test_eos_eapi_http_port(self):
set_module_args(dict(http_port=81))
commands = ["management api http-commands", "protocol http port 81"]
self.start_configured(changed=True, commands=commands)
def test_eos_eapi_http_invalid(self):
set_module_args(dict(http_port=80000))
self.start_unconfigured(failed=True)
def test_eos_eapi_https_enable(self):
set_module_args(dict(https=True))
commands = [
"management api http-commands",
"protocol https port 443",
"no shutdown",
]
self.start_unconfigured(changed=True, commands=commands)
def test_eos_eapi_https_disable(self):
set_module_args(dict(https=False))
commands = ["management api http-commands", "no protocol https"]
self.start_configured(changed=True, commands=commands)
def test_eos_eapi_https_port(self):
set_module_args(dict(https_port=8443))
commands = ["management api http-commands", "protocol https port 8443"]
self.start_configured(changed=True, commands=commands)
def test_eos_eapi_local_http_enable(self):
set_module_args(dict(local_http=True))
commands = [
"management api http-commands",
"protocol http localhost port 8080",
"no shutdown",
]
self.start_unconfigured(changed=True, commands=commands)
def test_eos_eapi_local_http_disable(self):
set_module_args(dict(local_http=False))
commands = [
"management api http-commands",
"no protocol http localhost",
]
self.start_configured(changed=True, commands=commands)
def test_eos_eapi_local_http_port(self):
set_module_args(dict(local_http_port=81))
commands = [
"management api http-commands",
"protocol http localhost port 81",
]
self.start_configured(changed=True, commands=commands)
def test_eos_eapi_vrf(self):
set_module_args(dict(vrf="test"))
commands = [
"management api http-commands",
"no shutdown",
"vrf test",
"no shutdown",
]
self.start_unconfigured(changed=True, commands=commands)
def test_eos_eapi_change_from_default_vrf(self):
set_module_args(dict(vrf="test"))
commands = ["management api http-commands", "vrf test", "no shutdown"]
self.start_configured(changed=True, commands=commands)
def test_eos_eapi_default(self):
set_module_args(dict())
self.start_configured(changed=False, commands=[])
def test_eos_eapi_vrf_missing(self):
set_module_args(dict(vrf="missing"))
self.start_unconfigured(failed=True)
def test_eos_eapi_state_absent(self):
set_module_args(dict(state="stopped"))
commands = ["management api http-commands", "shutdown"]
self.start_configured(changed=True, commands=commands)
def test_eos_eapi_state_failed(self):
self.mock_verify_state.stop()
set_module_args(dict(state="stopped", timeout=1))
result = self.start_configured(failed=True)
"timeout expired before eapi running state changed" in result["msg"]
| [
"[email protected]"
] | |
3786cda539c2758029469435453faa4ddf75d5b5 | 9724c8cd81ad39f7f9a2419e2873d7d74cb10c72 | /pyabc/external/__init__.py | 2cb45567dc2210c4965c2f9d98839dbe36b6260c | [
"BSD-3-Clause"
] | permissive | ICB-DCM/pyABC | 36b7fc431fe4ba4b34d80d268603ec410aeaf918 | d1542fb201edca86369082e1fc7934995e3d03a4 | refs/heads/main | 2023-09-01T13:42:52.880878 | 2023-08-18T16:55:04 | 2023-08-18T16:55:04 | 96,995,608 | 187 | 49 | BSD-3-Clause | 2023-08-18T16:55:05 | 2017-07-12T10:30:10 | Python | UTF-8 | Python | false | false | 871 | py | """
.. _api_external:
External simulators
===================
This module can be used to easily interface pyABC with model simulations,
summary statistics calculators and distance functions written in programming
languages other than Python.
The class :class:`pyabc.external.ExternalHandler`, as well as derived
Model, SumStat, and Distance classes, allow the use of arbitrary languages,
with communication via file i/o.
It has been successfully used with models written in e.g. R, Java, or C++.
Further, pyABC provides efficient interfaces to R via the class
:class:`pyabc.external.r.R` via the rpy2 package, and to Julia via the class
:class:`pyabc.external.julia.Julia` via the pyjulia package.
"""
from .base import (
LOC,
RETURNCODE,
TIMEOUT,
ExternalDistance,
ExternalHandler,
ExternalModel,
ExternalSumStat,
create_sum_stat,
)
| [
"[email protected]"
] | |
0a792293e4a77a07abaf9f4e415181af5f8c98e6 | 0a2f79a4338615113abf34dbd027d6bec762c981 | /tries.py | 066636c52b3e15a0e99ff4763be6533413bc7248 | [] | no_license | SmithChen-364/learngit | d56220cb5902b30b7832f3cdeb4d429c58583d1c | b5202a269d06d745477c146bf0379a5f16a6b585 | refs/heads/master | 2020-06-20T12:11:19.678310 | 2019-08-06T07:47:29 | 2019-08-06T07:47:29 | 197,118,166 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,807 | py | #tries for search words
#tries类
class tries:
#根节点root
root=None
#添加方法
def add(self,word):
#如果没有根节点,则将第一个单词的第一个字符添加为根节点,
#如果有根节点,从第一个字符开始添加节点
if(not self.root):
self.root=self.add_element(word,0,self.root)
self.root.parentNode=self.triesNode("root")
else:
self.add_element(word,0,self.root)
def addList(self,wordlist):
if(len(wordlist)==0):
print("invalid list you input")
else:
for item in wordlist:
self.add(item)
def add_element(self,word,index,current):
if(current==None):
current=self.triesNode(word[index])
if(word[index]<current.getChar()):
current.leftNode=self.add_element(word,index,current.leftNode)
current.leftNode.parentNode=current
elif(word[index]>current.getChar()):
current.rightNode=self.add_element(word,index,current.rightNode)
current.rightNode.parentNode=current
else:
if(index==len(word)-1):
current.setFreq(current.getFreq()+1)
return current
current.middleNode=self.add_element(word,index+1,current.middleNode)
current.middleNode.parentNode=current
return current
def delete(self,word):
isExist=self.search(word)
if(isExist):
isExist.setFreq(0)
def printAll(self):
self.printAllNode(self.root,[])
def printPrefix(self,word):
self.add(word)
prefix=self.search(word)
if(prefix.getFreq()>1):
print(word)
self.printAllNode(prefix.middleNode,list(word[:-1]))
prefix.setFreq(prefix.getFreq()-1)
def printAllNode(self,_currentNode,storage):
if(_currentNode==None):
return
if(_currentNode.parentNode.middleNode is _currentNode):
storage.append(_currentNode.parentNode.getChar())
if(_currentNode.getFreq()!=0):
storage.append(_currentNode.getChar())
print("".join(storage))
storage.pop()
self.printAllNode(_currentNode.leftNode,storage)
self.printAllNode(_currentNode.middleNode,storage)
self.printAllNode(_currentNode.rightNode,storage)
if(_currentNode.parentNode.middleNode is _currentNode):
storage.pop()
def search(self,word):
i=0
currentNode=self.root
parentNode=None
Freq=0
while(i<len(word)):
if(currentNode==None):
break
if(word[i]>currentNode.getChar()):
currentNode=currentNode.rightNode
elif(word[i]<currentNode.getChar()):
currentNode=currentNode.leftNode
else:
Freq=currentNode.getFreq()
parentNode=currentNode
currentNode=currentNode.middleNode
i=i+1
if(i==len(word) and (not Freq==0)):
print(word+" is in it ")
print("%d times recorded!"%Freq)
return parentNode
else:
print(word+" is not in it")
return None
class triesNode:
leftNode=rightNode=middleNode=None
char=None
parentNode=None
freq=0
def __init__(self,char):
self.char=char
def setFreq(self,freq):
self.freq=freq
def getFreq(self):
return self.freq
def getChar(self):
return self.char
person=tries()
person.addList(["shell","she","shelter","salad","sho","sh"])
print("printALL")
person.printAll()
print("printPrefix")
person.printPrefix("she")
| [
"[email protected]"
] | |
6497c191d192cb0725b81862d4fff0d3b8e6c824 | 5c1e5935129b4d0f5626f64bd62e3370b0553262 | /openpype/settings/entities/list_entity.py | c6155b78f8d9f09056fe7454d0edca180480bd03 | [
"MIT"
] | permissive | dangerstudios/OpenPype | c72f9d6d3a4a964697fa77712c116cfd2949b696 | f82ed5e127340e852a7517ff1e13e2524ee6e704 | refs/heads/main | 2023-04-21T00:25:02.364398 | 2021-04-13T22:10:36 | 2021-04-13T22:10:36 | 362,811,887 | 0 | 0 | null | 2021-04-29T12:35:54 | 2021-04-29T12:35:53 | null | UTF-8 | Python | false | false | 14,725 | py | import copy
from . import (
BaseEntity,
EndpointEntity
)
from .lib import (
NOT_SET,
OverrideState
)
from .exceptions import (
DefaultsNotDefined,
StudioDefaultsNotDefined,
EntitySchemaError
)
class ListEntity(EndpointEntity):
schema_types = ["list"]
_default_label_wrap = {
"use_label_wrap": False,
"collapsible": True,
"collapsed": False
}
def __iter__(self):
for item in self.children:
yield item
def __bool__(self):
"""Returns true because len may return 0."""
return True
def __len__(self):
return len(self.children)
def __contains__(self, item):
if isinstance(item, BaseEntity):
for child_entity in self.children:
if child_entity.id == item.id:
return True
return False
for _item in self.value:
if item == _item:
return True
return False
def index(self, item):
if isinstance(item, BaseEntity):
for idx, child_entity in enumerate(self.children):
if child_entity.id == item.id:
return idx
else:
for idx, _item in enumerate(self.value):
if item == _item:
return idx
raise ValueError(
"{} is not in {}".format(item, self.__class__.__name__)
)
def append(self, item):
child_obj = self._add_new_item()
child_obj.set_override_state(self._override_state)
child_obj.set(item)
self.on_change()
def extend(self, items):
for item in items:
self.append(item)
def clear(self):
self.children.clear()
self.on_change()
def pop(self, idx):
item = self.children.pop(idx)
self.on_change()
return item
def remove(self, item):
for idx, child_obj in enumerate(self.children):
found = False
if isinstance(item, BaseEntity):
if child_obj is item:
found = True
elif child_obj.value == item:
found = True
if found:
self.pop(idx)
return
raise ValueError("ListEntity.remove(x): x not in ListEntity")
def insert(self, idx, item):
child_obj = self._add_new_item(idx)
child_obj.set_override_state(self._override_state)
child_obj.set(item)
self.on_change()
def _add_new_item(self, idx=None):
child_obj = self.create_schema_object(self.item_schema, self, True)
if idx is None:
self.children.append(child_obj)
else:
self.children.insert(idx, child_obj)
return child_obj
def add_new_item(self, idx=None):
child_obj = self._add_new_item(idx)
child_obj.set_override_state(self._override_state)
self.on_change()
return child_obj
def swap_items(self, item_1, item_2):
index_1 = self.index(item_1)
index_2 = self.index(item_2)
self.swap_indexes(index_1, index_2)
def swap_indexes(self, index_1, index_2):
children_len = len(self.children)
if index_1 > children_len or index_2 > children_len:
raise IndexError(
"{} index out of range".format(self.__class__.__name__)
)
self.children[index_1], self.children[index_2] = (
self.children[index_2], self.children[index_1]
)
self.on_change()
def _convert_to_valid_type(self, value):
if isinstance(value, (set, tuple)):
return list(value)
return NOT_SET
def _item_initalization(self):
self.valid_value_types = (list, )
self.children = []
self.value_on_not_set = []
self._ignore_child_changes = False
item_schema = self.schema_data["object_type"]
if not isinstance(item_schema, dict):
item_schema = {"type": item_schema}
self.item_schema = item_schema
if not self.group_item:
self.is_group = True
# Value that was set on set_override_state
self.initial_value = []
def schema_validations(self):
super(ListEntity, self).schema_validations()
if self.is_dynamic_item and self.use_label_wrap:
reason = (
"`ListWidget` can't have set `use_label_wrap` to True and"
" be used as widget at the same time."
)
raise EntitySchemaError(self, reason)
if self.use_label_wrap and not self.label:
reason = (
"`ListWidget` can't have set `use_label_wrap` to True and"
" not have set \"label\" key at the same time."
)
raise EntitySchemaError(self, reason)
for child_obj in self.children:
child_obj.schema_validations()
def get_child_path(self, child_obj):
result_idx = None
for idx, _child_obj in enumerate(self.children):
if _child_obj is child_obj:
result_idx = idx
break
if result_idx is None:
raise ValueError("Didn't found child {}".format(child_obj))
return "/".join([self.path, str(result_idx)])
def set(self, value):
new_value = self.convert_to_valid_type(value)
self.clear()
for item in new_value:
self.append(item)
def on_child_change(self, _child_entity):
if self._ignore_child_changes:
return
if self._override_state is OverrideState.STUDIO:
self._has_studio_override = True
elif self._override_state is OverrideState.PROJECT:
self._has_project_override = True
self.on_change()
def set_override_state(self, state):
# Trigger override state change of root if is not same
if self.root_item.override_state is not state:
self.root_item.set_override_state(state)
return
self._override_state = state
while self.children:
self.children.pop(0)
# Ignore if is dynamic item and use default in that case
if not self.is_dynamic_item and not self.is_in_dynamic_item:
if state > OverrideState.DEFAULTS:
if not self.has_default_value:
raise DefaultsNotDefined(self)
elif state > OverrideState.STUDIO:
if not self.had_studio_override:
raise StudioDefaultsNotDefined(self)
value = NOT_SET
if self._override_state is OverrideState.PROJECT:
if self.had_project_override:
value = self._project_override_value
self._has_project_override = self.had_project_override
if value is NOT_SET or self._override_state is OverrideState.STUDIO:
if self.had_studio_override:
value = self._studio_override_value
self._has_studio_override = self.had_studio_override
if value is NOT_SET or self._override_state is OverrideState.DEFAULTS:
if self.has_default_value:
value = self._default_value
else:
value = self.value_on_not_set
for item in value:
child_obj = self._add_new_item()
child_obj.update_default_value(item)
if self._override_state is OverrideState.PROJECT:
if self.had_project_override:
child_obj.update_project_value(item)
elif self.had_studio_override:
child_obj.update_studio_value(item)
elif self._override_state is OverrideState.STUDIO:
if self.had_studio_override:
child_obj.update_studio_value(item)
for child_obj in self.children:
child_obj.set_override_state(self._override_state)
self.initial_value = self.settings_value()
@property
def value(self):
output = []
for child_obj in self.children:
output.append(child_obj.value)
return output
@property
def has_unsaved_changes(self):
if self._override_state is OverrideState.NOT_DEFINED:
return False
if self._override_state is OverrideState.DEFAULTS:
if not self.has_default_value:
return True
elif self._override_state is OverrideState.STUDIO:
if self.had_studio_override != self._has_studio_override:
return True
if not self._has_studio_override and not self.has_default_value:
return True
elif self._override_state is OverrideState.PROJECT:
if self.had_project_override != self._has_project_override:
return True
if (
not self._has_project_override
and not self._has_studio_override
and not self.has_default_value
):
return True
if self._child_has_unsaved_changes:
return True
if self.settings_value() != self.initial_value:
return True
return False
@property
def has_studio_override(self):
if self._override_state >= OverrideState.STUDIO:
return (
self._has_studio_override
or self._child_has_studio_override
)
return False
@property
def has_project_override(self):
if self._override_state >= OverrideState.PROJECT:
return (
self._has_project_override
or self._child_has_project_override
)
return False
@property
def _child_has_unsaved_changes(self):
for child_obj in self.children:
if child_obj.has_unsaved_changes:
return True
return False
@property
def _child_has_studio_override(self):
if self._override_state >= OverrideState.STUDIO:
for child_obj in self.children:
if child_obj.has_studio_override:
return True
return False
@property
def _child_has_project_override(self):
if self._override_state is OverrideState.PROJECT:
for child_obj in self.children:
if child_obj.has_project_override:
return True
return False
def _settings_value(self):
output = []
for child_obj in self.children:
output.append(child_obj.settings_value())
return output
def _discard_changes(self, on_change_trigger):
if self._override_state is OverrideState.NOT_DEFINED:
return
not_set = object()
value = not_set
if (
self._override_state >= OverrideState.PROJECT
and self.had_project_override
):
value = copy.deepcopy(self._project_override_value)
if (
value is not_set
and self._override_state >= OverrideState.STUDIO
and self.had_studio_override
):
value = copy.deepcopy(self._studio_override_value)
if value is not_set and self._override_state >= OverrideState.DEFAULTS:
if self.has_default_value:
value = copy.deepcopy(self._default_value)
else:
value = copy.deepcopy(self.value_on_not_set)
if value is not_set:
raise NotImplementedError("BUG: Unexcpected part of code.")
self._ignore_child_changes = True
while self.children:
self.children.pop(0)
for item in value:
child_obj = self._add_new_item()
child_obj.update_default_value(item)
if self._override_state is OverrideState.PROJECT:
if self.had_project_override:
child_obj.update_project_value(item)
elif self.had_studio_override:
child_obj.update_studio_value(item)
elif self._override_state is OverrideState.STUDIO:
if self.had_studio_override:
child_obj.update_studio_value(item)
child_obj.set_override_state(self._override_state)
if self._override_state >= OverrideState.PROJECT:
self._has_project_override = self.had_project_override
if self._override_state >= OverrideState.STUDIO:
self._has_studio_override = self.had_studio_override
self._ignore_child_changes = False
on_change_trigger.append(self.on_change)
def _add_to_studio_default(self, _on_change_trigger):
self._has_studio_override = True
self.on_change()
def _remove_from_studio_default(self, on_change_trigger):
if self._override_state is not OverrideState.STUDIO:
return
value = self._default_value
if value is NOT_SET:
value = self.value_on_not_set
self._ignore_child_changes = True
while self.children:
self.children.pop(0)
for item in value:
child_obj = self._add_new_item()
child_obj.update_default_value(item)
child_obj.set_override_state(self._override_state)
self._ignore_child_changes = False
self._has_studio_override = False
on_change_trigger.append(self.on_change)
def _add_to_project_override(self, _on_change_trigger):
self._has_project_override = True
self.on_change()
def _remove_from_project_override(self, on_change_trigger):
if self._override_state is not OverrideState.PROJECT:
return
if not self.has_project_override:
return
if self._has_studio_override:
value = self._studio_override_value
elif self.has_default_value:
value = self._default_value
else:
value = self.value_on_not_set
self._ignore_child_changes = True
while self.children:
self.children.pop(0)
for item in value:
child_obj = self._add_new_item()
child_obj.update_default_value(item)
if self._has_studio_override:
child_obj.update_studio_value(item)
child_obj.set_override_state(self._override_state)
self._ignore_child_changes = False
self._has_project_override = False
on_change_trigger.append(self.on_change)
def reset_callbacks(self):
super(ListEntity, self).reset_callbacks()
for child_entity in self.children:
child_entity.reset_callbacks()
| [
"[email protected]"
] | |
50efae8cf3c6ae5e6053a7c24c353564133793bc | 8b7334be253552c4a2982e3022d211ad8970abf6 | /data/migrations/0053_merge_20180615_1859.py | 77b069dd59b066015e97cf4ed41046967c32178f | [
"MIT"
] | permissive | Duke-GCB/bespin-api | e5442bf66471bd9469f8094575a1efc00eafc700 | cea5c20fb2ff592adabe6ebb7ca934939aa11a34 | refs/heads/master | 2021-08-16T05:36:25.323416 | 2019-10-18T15:30:16 | 2019-10-18T15:30:16 | 69,596,994 | 0 | 3 | MIT | 2021-06-10T18:55:46 | 2016-09-29T18:41:56 | Python | UTF-8 | Python | false | false | 333 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2018-06-15 18:59
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('data', '0051_merge_20180615_1758'),
('data', '0052_auto_20180615_1537'),
]
operations = [
]
| [
"[email protected]"
] | |
8f2a8b1a47778e9fafd036fb9c90ce4733f8b4a4 | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_2_1/branyine/2016_round1B_A.py | 3a2e46214d97bb27396b00187e8aa2574f9e2376 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 1,790 | py | # link: https://code.google.com/codejam/contest/dashboard?c= #s=p0
import string
import time
testIndex=2
problemRoot="d:/prog/versenyek/googlejam"
problemDir="2016/round1B"
problemName="A"
inputFiles= ["-example.in", "-small.in", "-large.in"]
outputFiles=["-example.out", "-small.out", "-large.out"]
time1=time.time()
fileName=string.strip(problemRoot)+"/"+problemDir+"/"+problemName+inputFiles[testIndex]
inputData=[map(str, line.split()) for line in open(fileName,'r') if line.strip()]
fileName=string.strip(problemRoot)+"/"+problemDir+"/"+problemName+outputFiles[testIndex]
fileToWrite=open(fileName,'wb')
time2=time.time()
iLineNum=1
for iCase in xrange(int(inputData[0][0])):
numStr=inputData[iLineNum][0]
# Z num of zeros
# W num of twos
# U num of fours
# X num of sixes
# G num of eights
# F num minus fours is num fives
# V num minus fives is num sevens
# R num minus zeros minus fours is num threes
# I num minus fives minus sixes minus eights is num nines
# N num minus sevens minus nines*2 is num ones
numChs={}
for ch in 'ZWUXGFVRIN':
numChs[ch]=0
for ch in numStr:
if ch in 'ZWUXGFVRIN':
numChs[ch]+=1
nums=[0]*10
nums[0]=numChs['Z']
nums[2]=numChs['W']
nums[4]=numChs['U']
nums[6]=numChs['X']
nums[8]=numChs['G']
nums[5]=numChs['F']-nums[4]
nums[7]=numChs['V']-nums[5]
nums[3]=numChs['R']-nums[0]-nums[4]
nums[9]=numChs['I']-nums[5]-nums[6]-nums[8]
nums[1]=numChs['N']-nums[7]-2*nums[9]
toCall=''
for i in xrange(10):
toCall+=str(i)*nums[i]
print toCall
fileToWrite.write("Case #"+str(iCase+1)+": "+toCall+"\n")
iLineNum+=1
fileToWrite.close()
print 'Total time: ', time.time() - time1
print 'Solving time: ', time.time() - time2
| [
"[[email protected]]"
] | |
2d6afda630fcaf11b046b7bc8f1aea4823f7bb47 | 697fb11686110f569e7f4284045049d008688221 | /XDG_CACHE_HOME/Microsoft/Python Language Server/stubs.v1/Sm6gN7BhS-h48BYanqqR8e9hKpcKaYmhkeYrr4_nJd4=/_csv.cpython-37dm-x86_64-linux-gnu.pyi | ec5834e00fd39a7fb05a5e346445653229172585 | [
"Apache-2.0"
] | permissive | andredoumad/p3env | 433c9174899f0909b149f51c3243b6fe04e076bf | a8850d06755d53eb6fedd9995091dad34f1f9ccd | refs/heads/master | 2023-02-03T20:50:07.357255 | 2020-12-23T09:15:55 | 2020-12-23T09:15:55 | 317,041,015 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,032 | pyi | import builtins as _mod_builtins
class Dialect(_mod_builtins.object):
'CSV dialect\n\nThe Dialect type records CSV parsing and generation options.\n'
__class__ = Dialect
def __init__(self, *args, **kwargs):
'CSV dialect\n\nThe Dialect type records CSV parsing and generation options.\n'
pass
@classmethod
def __init_subclass__(cls):
'This method is called when a class is subclassed.\n\nThe default implementation does nothing. It may be\noverridden to extend subclasses.\n'
return None
@classmethod
def __subclasshook__(cls, subclass):
'Abstract classes can override this to customize issubclass().\n\nThis is invoked early on by abc.ABCMeta.__subclasscheck__().\nIt should return True, False or NotImplemented. If it returns\nNotImplemented, the normal algorithm is used. Otherwise, it\noverrides the normal algorithm (and the outcome is cached).\n'
return False
@property
def delimiter(self):
pass
@property
def doublequote(self):
pass
@property
def escapechar(self):
pass
@property
def lineterminator(self):
pass
@property
def quotechar(self):
pass
@property
def quoting(self):
pass
@property
def skipinitialspace(self):
pass
@property
def strict(self):
pass
class Error(_mod_builtins.Exception):
__class__ = Error
__dict__ = {}
def __init__(self, *args, **kwargs):
pass
@classmethod
def __init_subclass__(cls):
'This method is called when a class is subclassed.\n\nThe default implementation does nothing. It may be\noverridden to extend subclasses.\n'
return None
__module__ = '_csv'
@classmethod
def __subclasshook__(cls, subclass):
'Abstract classes can override this to customize issubclass().\n\nThis is invoked early on by abc.ABCMeta.__subclasscheck__().\nIt should return True, False or NotImplemented. If it returns\nNotImplemented, the normal algorithm is used. Otherwise, it\noverrides the normal algorithm (and the outcome is cached).\n'
return False
@property
def __weakref__(self):
'list of weak references to the object (if defined)'
pass
QUOTE_ALL = 1
QUOTE_MINIMAL = 0
QUOTE_NONE = 3
QUOTE_NONNUMERIC = 2
__doc__ = 'CSV parsing and writing.\n\nThis module provides classes that assist in the reading and writing\nof Comma Separated Value (CSV) files, and implements the interface\ndescribed by PEP 305. Although many CSV files are simple to parse,\nthe format is not formally defined by a stable specification and\nis subtle enough that parsing lines of a CSV file with something\nlike line.split(",") is bound to fail. The module supports three\nbasic APIs: reading, writing, and registration of dialects.\n\n\nDIALECT REGISTRATION:\n\nReaders and writers support a dialect argument, which is a convenient\nhandle on a group of settings. When the dialect argument is a string,\nit identifies one of the dialects previously registered with the module.\nIf it is a class or instance, the attributes of the argument are used as\nthe settings for the reader or writer:\n\n class excel:\n delimiter = \',\'\n quotechar = \'"\'\n escapechar = None\n doublequote = True\n skipinitialspace = False\n lineterminator = \'\\r\\n\'\n quoting = QUOTE_MINIMAL\n\nSETTINGS:\n\n * quotechar - specifies a one-character string to use as the \n quoting character. It defaults to \'"\'.\n * delimiter - specifies a one-character string to use as the \n field separator. It defaults to \',\'.\n * skipinitialspace - specifies how to interpret whitespace which\n immediately follows a delimiter. It defaults to False, which\n means that whitespace immediately following a delimiter is part\n of the following field.\n * lineterminator - specifies the character sequence which should \n terminate rows.\n * quoting - controls when quotes should be generated by the writer.\n It can take on any of the following module constants:\n\n csv.QUOTE_MINIMAL means only when required, for example, when a\n field contains either the quotechar or the delimiter\n csv.QUOTE_ALL means that quotes are always placed around fields.\n csv.QUOTE_NONNUMERIC means that quotes are always placed around\n fields which do not parse as integers or floating point\n numbers.\n csv.QUOTE_NONE means that quotes are never placed around fields.\n * escapechar - specifies a one-character string used to escape \n the delimiter when quoting is set to QUOTE_NONE.\n * doublequote - controls the handling of quotes inside fields. When\n True, two consecutive quotes are interpreted as one during read,\n and when writing, each quote character embedded in the data is\n written as two quotes\n'
__file__ = '/home/gordon/p3env/lib/python3.7/lib-dynload/_csv.cpython-37m-x86_64-linux-gnu.so'
__name__ = '_csv'
__package__ = ''
__version__ = '1.0'
_dialects = _mod_builtins.dict()
def field_size_limit():
'Sets an upper limit on parsed fields.\n csv.field_size_limit([limit])\n\nReturns old limit. If limit is not given, no new limit is set and\nthe old limit is returned'
pass
def get_dialect():
'Return the dialect instance associated with name.\n dialect = csv.get_dialect(name)'
pass
def list_dialects():
'Return a list of all know dialect names.\n names = csv.list_dialects()'
pass
def reader():
' csv_reader = reader(iterable [, dialect=\'excel\']\n [optional keyword args])\n for row in csv_reader:\n process(row)\n\nThe "iterable" argument can be any object that returns a line\nof input for each iteration, such as a file object or a list. The\noptional "dialect" parameter is discussed below. The function\nalso accepts optional keyword arguments which override settings\nprovided by the dialect.\n\nThe returned object is an iterator. Each iteration returns a row\nof the CSV file (which can span multiple input lines).\n'
pass
def register_dialect():
'Create a mapping from a string name to a dialect class.\n dialect = csv.register_dialect(name[, dialect[, **fmtparams]])'
pass
def unregister_dialect():
'Delete the name/dialect mapping associated with a string name.\n csv.unregister_dialect(name)'
pass
def writer():
' csv_writer = csv.writer(fileobj [, dialect=\'excel\']\n [optional keyword args])\n for row in sequence:\n csv_writer.writerow(row)\n\n [or]\n\n csv_writer = csv.writer(fileobj [, dialect=\'excel\']\n [optional keyword args])\n csv_writer.writerows(rows)\n\nThe "fileobj" argument can be any object that supports the file API.\n'
pass
| [
"[email protected]"
] | |
118e4b1974f84443f4a7aff55f59536929fc7f86 | 9f3b478d39f1a8b706813c1b4993614a376a045d | /pycryptics/utils/ngrams.py | f5b4948ad7ec29d344d4bdd141514be91e173c58 | [
"CC-BY-3.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] | permissive | kod3r/cryptics | a201c62bdd2ecafee5937d2d7af9c29dcd38ec67 | 4bd17d24202b6fabce8c7ad378a61e77eb7562a7 | refs/heads/master | 2020-12-25T16:36:25.800144 | 2013-03-09T06:12:35 | 2013-03-09T06:12:35 | 10,188,553 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 442 | py | import cPickle as pickle
import os.path
INITIAL_NGRAMS = dict()
NGRAMS = dict()
i = 0
while True:
if os.path.exists('data/ngrams.%02d.pck' % i):
with open('data/initial_ngrams.%02d.pck' % i, 'rb') as f:
d = pickle.load(f)
INITIAL_NGRAMS.update(d)
with open('data/ngrams.%02d.pck' % i, 'rb') as f:
d = pickle.load(f)
NGRAMS.update(d)
i += 1
else:
break
| [
"[email protected]"
] | |
e14073fe089fb089b1ffd4715e4332b9685f3cd9 | e65ae5bd9ae1c93e7117e630f7340bc73aa71212 | /lib/database/mongodb/build/lib.linux-x86_64-2.7/gridfs/grid_file.py | e5df5e42b479eedfdbf642e1d7f90b7f48d2075b | [
"Apache-2.0"
] | permissive | nadirhamid/oneline | e98ff1ed81da0536f9602ecdde2fb2a4fe80d256 | 833ebef0e26ae8e0cc452756381227746d830b23 | refs/heads/master | 2021-01-21T04:27:41.715047 | 2016-05-30T03:50:34 | 2016-05-30T03:50:34 | 23,320,578 | 1 | 2 | NOASSERTION | 2020-03-12T17:22:24 | 2014-08-25T16:29:36 | Python | UTF-8 | Python | false | false | 24,948 | py | # Copyright 2009-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tools for representing files stored in GridFS."""
import datetime
import math
import os
from bson.binary import Binary
from bson.objectid import ObjectId
from bson.py3compat import (b, binary_type, next_item,
string_types, text_type, StringIO)
from gridfs.errors import (CorruptGridFile,
FileExists,
NoFile,
UnsupportedAPI)
from pymongo import ASCENDING
from pymongo.collection import Collection
from pymongo.cursor import Cursor
from pymongo.errors import DuplicateKeyError
from pymongo.read_preferences import ReadPreference
try:
_SEEK_SET = os.SEEK_SET
_SEEK_CUR = os.SEEK_CUR
_SEEK_END = os.SEEK_END
# before 2.5
except AttributeError:
_SEEK_SET = 0
_SEEK_CUR = 1
_SEEK_END = 2
EMPTY = b("")
NEWLN = b("\n")
"""Default chunk size, in bytes."""
# Slightly under a power of 2, to work well with server's record allocations.
DEFAULT_CHUNK_SIZE = 255 * 1024
def _create_property(field_name, docstring,
read_only=False, closed_only=False):
"""Helper for creating properties to read/write to files.
"""
def getter(self):
if closed_only and not self._closed:
raise AttributeError("can only get %r on a closed file" %
field_name)
# Protect against PHP-237
if field_name == 'length':
return self._file.get(field_name, 0)
return self._file.get(field_name, None)
def setter(self, value):
if self._closed:
self._coll.files.update({"_id": self._file["_id"]},
{"$set": {field_name: value}},
**self._coll._get_wc_override())
self._file[field_name] = value
if read_only:
docstring = docstring + "\n\nThis attribute is read-only."
elif closed_only:
docstring = "%s\n\n%s" % (docstring, "This attribute is read-only and "
"can only be read after :meth:`close` "
"has been called.")
if not read_only and not closed_only:
return property(getter, setter, doc=docstring)
return property(getter, doc=docstring)
class GridIn(object):
"""Class to write data to GridFS.
"""
def __init__(self, root_collection, **kwargs):
"""Write a file to GridFS
Application developers should generally not need to
instantiate this class directly - instead see the methods
provided by :class:`~gridfs.GridFS`.
Raises :class:`TypeError` if `root_collection` is not an
instance of :class:`~pymongo.collection.Collection`.
Any of the file level options specified in the `GridFS Spec
<http://dochub.mongodb.org/core/gridfsspec>`_ may be passed as
keyword arguments. Any additional keyword arguments will be
set as additional fields on the file document. Valid keyword
arguments include:
- ``"_id"``: unique ID for this file (default:
:class:`~bson.objectid.ObjectId`) - this ``"_id"`` must
not have already been used for another file
- ``"filename"``: human name for the file
- ``"contentType"`` or ``"content_type"``: valid mime-type
for the file
- ``"chunkSize"`` or ``"chunk_size"``: size of each of the
chunks, in bytes (default: 256 kb)
- ``"encoding"``: encoding used for this file. In Python 2,
any :class:`unicode` that is written to the file will be
converted to a :class:`str`. In Python 3, any :class:`str`
that is written to the file will be converted to
:class:`bytes`.
If you turn off write-acknowledgment for performance reasons, it is
critical to wrap calls to :meth:`write` and :meth:`close` within a
single request:
>>> from pymongo import MongoClient
>>> from gridfs import GridFS
>>> client = MongoClient(w=0) # turn off write acknowledgment
>>> fs = GridFS(client.database)
>>> gridin = fs.new_file()
>>> request = client.start_request()
>>> try:
... for i in range(10):
... gridin.write('foo')
... gridin.close()
... finally:
... request.end()
In Python 2.5 and later this code can be simplified with a
with-statement, see :doc:`/examples/requests` for more information.
:Parameters:
- `root_collection`: root collection to write to
- `**kwargs` (optional): file level options (see above)
"""
if not isinstance(root_collection, Collection):
raise TypeError("root_collection must be an "
"instance of Collection")
# Handle alternative naming
if "content_type" in kwargs:
kwargs["contentType"] = kwargs.pop("content_type")
if "chunk_size" in kwargs:
kwargs["chunkSize"] = kwargs.pop("chunk_size")
# Defaults
kwargs["_id"] = kwargs.get("_id", ObjectId())
kwargs["chunkSize"] = kwargs.get("chunkSize", DEFAULT_CHUNK_SIZE)
object.__setattr__(self, "_coll", root_collection)
object.__setattr__(self, "_chunks", root_collection.chunks)
object.__setattr__(self, "_file", kwargs)
object.__setattr__(self, "_buffer", StringIO())
object.__setattr__(self, "_position", 0)
object.__setattr__(self, "_chunk_number", 0)
object.__setattr__(self, "_closed", False)
object.__setattr__(self, "_ensured_index", False)
def _ensure_index(self):
if not object.__getattribute__(self, "_ensured_index"):
self._coll.chunks.ensure_index(
[("files_id", ASCENDING), ("n", ASCENDING)],
unique=True)
object.__setattr__(self, "_ensured_index", True)
@property
def closed(self):
"""Is this file closed?
"""
return self._closed
_id = _create_property("_id", "The ``'_id'`` value for this file.",
read_only=True)
filename = _create_property("filename", "Name of this file.")
name = _create_property("filename", "Alias for `filename`.")
content_type = _create_property("contentType", "Mime-type for this file.")
length = _create_property("length", "Length (in bytes) of this file.",
closed_only=True)
chunk_size = _create_property("chunkSize", "Chunk size for this file.",
read_only=True)
upload_date = _create_property("uploadDate",
"Date that this file was uploaded.",
closed_only=True)
md5 = _create_property("md5", "MD5 of the contents of this file "
"(generated on the server).",
closed_only=True)
def __getattr__(self, name):
if name in self._file:
return self._file[name]
raise AttributeError("GridIn object has no attribute '%s'" % name)
def __setattr__(self, name, value):
# For properties of this instance like _buffer, or descriptors set on
# the class like filename, use regular __setattr__
if name in self.__dict__ or name in self.__class__.__dict__:
object.__setattr__(self, name, value)
else:
# All other attributes are part of the document in db.fs.files.
# Store them to be sent to server on close() or if closed, send
# them now.
self._file[name] = value
if self._closed:
self._coll.files.update({"_id": self._file["_id"]},
{"$set": {name: value}},
**self._coll._get_wc_override())
def __flush_data(self, data):
"""Flush `data` to a chunk.
"""
# Ensure the index, even if there's nothing to write, so
# the filemd5 command always succeeds.
self._ensure_index()
if not data:
return
assert(len(data) <= self.chunk_size)
chunk = {"files_id": self._file["_id"],
"n": self._chunk_number,
"data": Binary(data)}
try:
self._chunks.insert(chunk)
except DuplicateKeyError:
self._raise_file_exists(self._file['_id'])
self._chunk_number += 1
self._position += len(data)
def __flush_buffer(self):
"""Flush the buffer contents out to a chunk.
"""
self.__flush_data(self._buffer.getvalue())
self._buffer.close()
self._buffer = StringIO()
def __flush(self):
"""Flush the file to the database.
"""
try:
self.__flush_buffer()
db = self._coll.database
# See PYTHON-417, "Sharded GridFS fails with exception: chunks out
# of order." Inserts via mongos, even if they use a single
# connection, can succeed out-of-order due to the writebackListener.
# We mustn't call "filemd5" until all inserts are complete, which
# we ensure by calling getLastError (and ignoring the result).
db.error()
md5 = db.command(
"filemd5", self._id, root=self._coll.name,
read_preference=ReadPreference.PRIMARY)["md5"]
self._file["md5"] = md5
self._file["length"] = self._position
self._file["uploadDate"] = datetime.datetime.utcnow()
return self._coll.files.insert(self._file,
**self._coll._get_wc_override())
except DuplicateKeyError:
self._raise_file_exists(self._id)
def _raise_file_exists(self, file_id):
"""Raise a FileExists exception for the given file_id."""
raise FileExists("file with _id %r already exists" % file_id)
def close(self):
"""Flush the file and close it.
A closed file cannot be written any more. Calling
:meth:`close` more than once is allowed.
"""
if not self._closed:
self.__flush()
object.__setattr__(self, "_closed", True)
def write(self, data):
"""Write data to the file. There is no return value.
`data` can be either a string of bytes or a file-like object
(implementing :meth:`read`). If the file has an
:attr:`encoding` attribute, `data` can also be a
:class:`unicode` (:class:`str` in python 3) instance, which
will be encoded as :attr:`encoding` before being written.
Due to buffering, the data may not actually be written to the
database until the :meth:`close` method is called. Raises
:class:`ValueError` if this file is already closed. Raises
:class:`TypeError` if `data` is not an instance of
:class:`str` (:class:`bytes` in python 3), a file-like object,
or an instance of :class:`unicode` (:class:`str` in python 3).
Unicode data is only allowed if the file has an :attr:`encoding`
attribute.
:Parameters:
- `data`: string of bytes or file-like object to be written
to the file
.. versionadded:: 1.9
The ability to write :class:`unicode`, if the file has an
:attr:`encoding` attribute.
"""
if self._closed:
raise ValueError("cannot write to a closed file")
try:
# file-like
read = data.read
except AttributeError:
# string
if not isinstance(data, string_types):
raise TypeError("can only write strings or file-like objects")
if isinstance(data, unicode):
try:
data = data.encode(self.encoding)
except AttributeError:
raise TypeError("must specify an encoding for file in "
"order to write %s" % (text_type.__name__,))
read = StringIO(data).read
if self._buffer.tell() > 0:
# Make sure to flush only when _buffer is complete
space = self.chunk_size - self._buffer.tell()
if space:
to_write = read(space)
self._buffer.write(to_write)
if len(to_write) < space:
return # EOF or incomplete
self.__flush_buffer()
to_write = read(self.chunk_size)
while to_write and len(to_write) == self.chunk_size:
self.__flush_data(to_write)
to_write = read(self.chunk_size)
self._buffer.write(to_write)
def writelines(self, sequence):
"""Write a sequence of strings to the file.
Does not add seperators.
"""
for line in sequence:
self.write(line)
def __enter__(self):
"""Support for the context manager protocol.
"""
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Support for the context manager protocol.
Close the file and allow exceptions to propagate.
"""
self.close()
# propagate exceptions
return False
class GridOut(object):
"""Class to read data out of GridFS.
"""
def __init__(self, root_collection, file_id=None, file_document=None,
_connect=True):
"""Read a file from GridFS
Application developers should generally not need to
instantiate this class directly - instead see the methods
provided by :class:`~gridfs.GridFS`.
Either `file_id` or `file_document` must be specified,
`file_document` will be given priority if present. Raises
:class:`TypeError` if `root_collection` is not an instance of
:class:`~pymongo.collection.Collection`.
:Parameters:
- `root_collection`: root collection to read from
- `file_id`: value of ``"_id"`` for the file to read
- `file_document`: file document from `root_collection.files`
.. versionadded:: 1.9
The `file_document` parameter.
"""
if not isinstance(root_collection, Collection):
raise TypeError("root_collection must be an "
"instance of Collection")
self.__chunks = root_collection.chunks
self.__files = root_collection.files
self.__file_id = file_id
self.__buffer = EMPTY
self.__position = 0
self._file = file_document
if _connect:
self._ensure_file()
_id = _create_property("_id", "The ``'_id'`` value for this file.", True)
filename = _create_property("filename", "Name of this file.", True)
name = _create_property("filename", "Alias for `filename`.", True)
content_type = _create_property("contentType", "Mime-type for this file.",
True)
length = _create_property("length", "Length (in bytes) of this file.",
True)
chunk_size = _create_property("chunkSize", "Chunk size for this file.",
True)
upload_date = _create_property("uploadDate",
"Date that this file was first uploaded.",
True)
aliases = _create_property("aliases", "List of aliases for this file.",
True)
metadata = _create_property("metadata", "Metadata attached to this file.",
True)
md5 = _create_property("md5", "MD5 of the contents of this file "
"(generated on the server).", True)
def _ensure_file(self):
if not self._file:
self._file = self.__files.find_one({"_id": self.__file_id})
if not self._file:
raise NoFile("no file in gridfs collection %r with _id %r" %
(self.__files, self.__file_id))
def __getattr__(self, name):
self._ensure_file()
if name in self._file:
return self._file[name]
raise AttributeError("GridOut object has no attribute '%s'" % name)
def readchunk(self):
"""Reads a chunk at a time. If the current position is within a
chunk the remainder of the chunk is returned.
"""
received = len(self.__buffer)
chunk_data = EMPTY
if received > 0:
chunk_data = self.__buffer
elif self.__position < int(self.length):
chunk_number = int((received + self.__position) / self.chunk_size)
chunk = self.__chunks.find_one({"files_id": self._id,
"n": chunk_number})
if not chunk:
raise CorruptGridFile("no chunk #%d" % chunk_number)
chunk_data = chunk["data"][self.__position % self.chunk_size:]
self.__position += len(chunk_data)
self.__buffer = EMPTY
return chunk_data
def read(self, size=-1):
"""Read at most `size` bytes from the file (less if there
isn't enough data).
The bytes are returned as an instance of :class:`str` (:class:`bytes`
in python 3). If `size` is negative or omitted all data is read.
:Parameters:
- `size` (optional): the number of bytes to read
"""
self._ensure_file()
if size == 0:
return EMPTY
remainder = int(self.length) - self.__position
if size < 0 or size > remainder:
size = remainder
received = 0
data = StringIO()
while received < size:
chunk_data = self.readchunk()
received += len(chunk_data)
data.write(chunk_data)
self.__position -= received - size
# Return 'size' bytes and store the rest.
data.seek(size)
self.__buffer = data.read()
data.seek(0)
return data.read(size)
def readline(self, size=-1):
"""Read one line or up to `size` bytes from the file.
:Parameters:
- `size` (optional): the maximum number of bytes to read
.. versionadded:: 1.9
"""
if size == 0:
return b('')
remainder = int(self.length) - self.__position
if size < 0 or size > remainder:
size = remainder
received = 0
data = StringIO()
while received < size:
chunk_data = self.readchunk()
pos = chunk_data.find(NEWLN, 0, size)
if pos != -1:
size = received + pos + 1
received += len(chunk_data)
data.write(chunk_data)
if pos != -1:
break
self.__position -= received - size
# Return 'size' bytes and store the rest.
data.seek(size)
self.__buffer = data.read()
data.seek(0)
return data.read(size)
def tell(self):
"""Return the current position of this file.
"""
return self.__position
def seek(self, pos, whence=_SEEK_SET):
"""Set the current position of this file.
:Parameters:
- `pos`: the position (or offset if using relative
positioning) to seek to
- `whence` (optional): where to seek
from. :attr:`os.SEEK_SET` (``0``) for absolute file
positioning, :attr:`os.SEEK_CUR` (``1``) to seek relative
to the current position, :attr:`os.SEEK_END` (``2``) to
seek relative to the file's end.
"""
if whence == _SEEK_SET:
new_pos = pos
elif whence == _SEEK_CUR:
new_pos = self.__position + pos
elif whence == _SEEK_END:
new_pos = int(self.length) + pos
else:
raise IOError(22, "Invalid value for `whence`")
if new_pos < 0:
raise IOError(22, "Invalid value for `pos` - must be positive")
self.__position = new_pos
self.__buffer = EMPTY
def __iter__(self):
"""Return an iterator over all of this file's data.
The iterator will return chunk-sized instances of
:class:`str` (:class:`bytes` in python 3). This can be
useful when serving files using a webserver that handles
such an iterator efficiently.
"""
return GridOutIterator(self, self.__chunks)
def close(self):
"""Make GridOut more generically file-like."""
pass
def __enter__(self):
"""Makes it possible to use :class:`GridOut` files
with the context manager protocol.
"""
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Makes it possible to use :class:`GridOut` files
with the context manager protocol.
"""
return False
class GridOutIterator(object):
def __init__(self, grid_out, chunks):
self.__id = grid_out._id
self.__chunks = chunks
self.__current_chunk = 0
self.__max_chunk = math.ceil(float(grid_out.length) /
grid_out.chunk_size)
def __iter__(self):
return self
def next(self):
if self.__current_chunk >= self.__max_chunk:
raise StopIteration
chunk = self.__chunks.find_one({"files_id": self.__id,
"n": self.__current_chunk})
if not chunk:
raise CorruptGridFile("no chunk #%d" % self.__current_chunk)
self.__current_chunk += 1
return binary_type(chunk["data"])
class GridFile(object):
"""No longer supported.
.. versionchanged:: 1.6
The GridFile class is no longer supported.
"""
def __init__(self, *args, **kwargs):
raise UnsupportedAPI("The GridFile class is no longer supported. "
"Please use GridIn or GridOut instead.")
class GridOutCursor(Cursor):
"""A cursor / iterator for returning GridOut objects as the result
of an arbitrary query against the GridFS files collection.
"""
def __init__(self, collection, spec=None, skip=0, limit=0,
timeout=True, sort=None, max_scan=None,
read_preference=None, tag_sets=None,
secondary_acceptable_latency_ms=None, compile_re=True):
"""Create a new cursor, similar to the normal
:class:`~pymongo.cursor.Cursor`.
Should not be called directly by application developers - see
the :class:`~gridfs.GridFS` method :meth:`~gridfs.GridFS.find` instead.
.. versionadded 2.7
.. mongodoc:: cursors
"""
# Hold on to the base "fs" collection to create GridOut objects later.
self.__root_collection = collection
# Copy these settings from collection if they are not set by caller.
read_preference = read_preference or collection.files.read_preference
tag_sets = tag_sets or collection.files.tag_sets
latency = (secondary_acceptable_latency_ms
or collection.files.secondary_acceptable_latency_ms)
super(GridOutCursor, self).__init__(
collection.files, spec, skip=skip, limit=limit, timeout=timeout,
sort=sort, max_scan=max_scan, read_preference=read_preference,
secondary_acceptable_latency_ms=latency, compile_re=compile_re,
tag_sets=tag_sets)
def next(self):
"""Get next GridOut object from cursor.
"""
# Work around "super is not iterable" issue in Python 3.x
next_file = getattr(super(GridOutCursor, self), next_item)()
return GridOut(self.__root_collection, file_document=next_file)
def add_option(self, *args, **kwargs):
raise NotImplementedError("Method does not exist for GridOutCursor")
def remove_option(self, *args, **kwargs):
raise NotImplementedError("Method does not exist for GridOutCursor")
def _clone_base(self):
"""Creates an empty GridOutCursor for information to be copied into.
"""
return GridOutCursor(self.__root_collection)
| [
"[email protected]"
] | |
6eead2339d7beb6299ea75b3f6f188d0abada27a | 180ed6c8ff26b365e56c63d878a69c2e9d9b8c54 | /code/Day_04/01_xml.py | e218c1a1a13919328e7e183d4d19d9fc0e95e7c4 | [] | no_license | archeranimesh/python_devu_in | 9af46503b07f82a137d4b820920fa72f96a5067b | 3d77e9e7b150aae15375b236e3de7f573da4bdce | refs/heads/master | 2020-06-08T21:22:24.329530 | 2019-07-14T11:22:55 | 2019-07-14T11:22:55 | 193,308,932 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 447 | py | # Read from XML.
import xml.etree.ElementTree as ET
file_path = r"code/Day_04/data_files/data.xml"
f = ET.parse(file_path)
print("type(f): ", type(f))
# get the root.
root = f.getroot()
print("type(root): ", type(root))
print("---" * 30)
for i in root.iter():
print(i.tag)
print(i.text)
print("---" * 5)
print("---" * 30)
# iterate over a column.
print("---" * 30)
for i in root.iter("Name"):
print(i.text)
print("---" * 30)
| [
"[email protected]"
] | |
0d37212ad0a4f29125a824731a08f3d1111d8ae1 | 30e7173cd25d0c60bf3aecb009366c375e11411a | /articulos/templatetags/dict_key.py | a9144c06451ff0fe29b455948e58eb9ca08bbdd4 | [] | no_license | arm98sub/djangoHospital | 55eb8ab437d1fb2036f55d9247d41fc577978e1e | bef771676b336d9b98f750e2e0648324cb259056 | refs/heads/master | 2023-05-17T14:39:54.510044 | 2021-06-01T23:36:06 | 2021-06-01T23:36:06 | 372,985,852 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 143 | py | from django.template.defaultfilters import register
@register.filter(name='dict_key')
def dict_key(d,k):
return d.get(str(k)).get('cantidad') | [
"[email protected]"
] | |
1a9e69058adada5cf59211d0e18c4d57611b5fc0 | dcfd89a6a8ebf3b32948dbf93e222063c223bec4 | /v2/api/Models.py | 3b97f745e06490cfcab6047d4c1f5baeb491b429 | [] | no_license | SirYaro/kdm-manager | a19ff1d7711bc39a2de9e8f740cb03fac28a52b7 | 263d1330764b915f3acb0771e58845020cce06b2 | refs/heads/master | 2021-09-05T05:47:58.511224 | 2018-01-24T15:09:18 | 2018-01-24T15:09:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 35,307 | py | #!/usr/bin/python2.7
from bson.objectid import ObjectId
from collections import OrderedDict
from copy import copy, deepcopy
from datetime import datetime, timedelta
import json
from bson import json_util
import inspect
import operator
import os
import random
import socket
from user_agents import parse as ua_parse
from flask import request, Response
import utils
import models
import settings
#
# Base classes for game assets are here. Also, special exceptions for those
# classes live here as well.
#
class AssetMigrationError(Exception):
""" Handler for asset migration/conversion errors. """
def __init__(self, message="An error occurred while migrating this asset!"):
self.logger = utils.get_logger()
self.logger.exception(message)
Exception.__init__(self, message)
class AssetInitError(Exception):
""" Handler for asset-based errors. """
def __init__(self, message="An error occurred while initializing this asset!"):
self.logger = utils.get_logger()
self.logger.exception(message)
Exception.__init__(self, message)
class AssetLoadError(Exception):
""" Handler for asset-based errors. """
def __init__(self, message="Asset could not be retrieved from mdb!"):
self.logger = utils.get_logger()
self.logger.exception(message)
Exception.__init__(self, message)
class AssetCollection(object):
""" The base class for game asset objects, i.e. working with the dict assets
in the assets/ folder.
Each model in the models/ folder should have a method that subclasses this
base class.
Most Asset() objects that use this as their base class will define their own
self.assets dict, e.g. in their __init__() method. But is not mandatory:
review the __init__ method of this class carefully before writing custom
__init__ code in in an individual Asset() object module. """
def __repr__(self):
if not hasattr(self, 'type'):
self.logger.warn("AssetCollection does not have a 'type' attribute!")
return 'AssetCollection object (no type; %s assets)' % (len(self.assets))
return "AssetCollection object '%s' (%s assets)" % (self.type, len(self.assets))
def __init__(self):
""" All Assets() models must base-class this guy to get access to the
full range of AssetCollection methods, i.e. all of the common/ubiquitous
ones.
Base-classing also does a little user-friendliness/auto-magic when you
invoke it:
- you get self.logger for free.
- if you set 'self.root_module' to be an actual module from the
assets folder, that module will be scraped for dictionaries: those
dictionaries will then be used as your self.assets dict, i.e. so
that you DO NOT have to define your self.assets in your
models.Assets() sub class.
- if you have assets in your model.Assets.assets dict that DO NOT
set their own type, you can set self.type on your model.Assets when
you initialize it (but before you base-class this module) to force
a default 'type' attribute on all of your self.assets dict items.
- finally, all self.assets items get a 'handle' attribute that is
the same as their actual dictionary key value. Individual assets
SHOULD NEVER have a 'handle' attribute.
"""
self.logger = utils.get_logger()
if hasattr(self, "root_module"):
self.type = os.path.splitext(self.root_module.__name__)[-1][1:]
self.set_assets_from_root_module()
# preserve 'raw' types as sub types
# for a in self.assets.keys():
# a_dict = self.assets[a]
# if 'type' in a_dict.keys() and not 'sub_type' in a_dict.keys():
# self.assets[a]['sub_type'] = self.assets[a]['type']
# type override - be careful!
if hasattr(self, "type_override"):
self.type = self.type_override
for a in self.assets.keys():
self.assets[a]["type"] = self.type_override
# set the default 'type_pretty' value
self.set_pretty_types()
for a in self.assets.keys():
self.assets[a]["handle"] = a
#
# get / set / filter methods here
#
def set_assets_from_root_module(self):
""" This is the vanilla AssetCollection initialization method. You feed
it a 'module' value (which should be something from the assets/ folder)
and it creates a self.assets dictionary by iterating through the module.
If you need to do custom asset initialization, that is a fine and a good
thing to do, but do it in the actual models/whatever.py file.
Important! Adjusting the self.assets dict before calling this method
will overwrite any adjustments because this method starts self.assets as
a blank dict!
"""
# the 'type' of all assets is the name of their root module. Full stop.
# 'sub_type' is where we want to put any kind of 'type' info that we get
# from the asset itself.
all_assets = {}
for module_dict, v in self.root_module.__dict__.iteritems():
if isinstance(v, dict) and not module_dict.startswith('_'): # get all dicts in the module
for dict_key in sorted(v.keys()): # get all keys in each dict
if 'sub_type' in v[dict_key].keys():
raise Exception("%s already has a sub type!!!!" % v[dict_key])
# do NOT modify the original/raw asset dictionary
a_dict = v[dict_key].copy()
# set sub_type from raw asset 'type', then set the base type
a_dict['sub_type'] = v[dict_key].get("type", module_dict)
a_dict["type"] = self.type
# add it back to self.assets
all_assets[dict_key] = a_dict
self.assets = OrderedDict()
for k in sorted(all_assets.keys()):
self.assets[k] = all_assets[k]
def set_pretty_types(self, capitalize=True):
""" Iterates over self.assets; adds the "type_pretty" and 'sub_type_pretty'
to all assets in the AssetCollection.assets dict """
for h in self.assets.keys():
a_dict = self.get_asset(handle=h)
# flip a shit if we don't have a type
if a_dict.get('type', None) is None:
raise Exception("%s asset has no 'type' attribute! %s" % (self, a_dict))
# set the pretty types here
for type_attr in ['type', 'sub_type']:
type_value = a_dict.get(type_attr, None)
if type_value is None:
pretty_type = None
else:
pretty_type = type_value.replace("_"," ")
if capitalize:
pretty_type = pretty_type.title()
self.assets[h]["%s_pretty" % type_attr] = pretty_type
#
# common get and lookup methods
#
def get_asset(self, handle=None, backoff_to_name=False, raise_exception_if_not_found=True):
""" Return an asset dict based on a handle. Return None if the handle
cannot be retrieved. """
asset = copy(self.assets.get(handle, None)) # return a copy, so we don't modify the actual def
# implement backoff logic
if asset is None and backoff_to_name:
asset = copy(self.get_asset_from_name(handle))
# if the asset is still None, see if we want to raise an expception
if asset is None and raise_exception_if_not_found:
if not backoff_to_name:
msg = "The handle '%s' is not in %s and could not be retrieved! " % (handle, self.get_handles())
self.logger.error(msg)
elif backoff_to_name:
msg = "After backoff to name lookup, asset handle '%s' is not in %s and could not be retrieved." % (handle, self.get_names())
self.logger.error(msg)
raise utils.InvalidUsage(msg)
# finally, return the asset (or the NoneType)
return asset
def get_assets_by_sub_type(self, sub_type=None):
""" Returns a list of asset handles whose 'sub_type' attribute matches
the 'sub_type' kwarg value."""
handles = []
for a_dict in self.get_dicts():
sub = a_dict.get('sub_type', None)
if sub == sub_type:
handles.append(a_dict['handle'])
return handles
def get_assets_by_type(self, asset_type=None):
""" Returns a list of asset handles whose 'type' attribute matches
the 'asset_type' kwarg value."""
handles = []
for a_dict in self.get_dicts():
a_type = a_dict.get('type', None)
if a_type == asset_type:
handles.append(a_dict['handle'])
return handles
def get_handles(self):
""" Dumps all asset handles, i.e. the list of self.assets keys. """
try:
return sorted(self.assets, key=lambda x: self.assets[x]['name'])
except:
return sorted(self.assets.keys())
def get_names(self):
""" Dumps all asset 'name' attributes, i.e. a list of name values. """
return sorted([self.assets[k]["name"] for k in self.get_handles()])
def get_sorted_assets(self):
""" Returns the asset collections 'assets' dict as an OrderedDict. """
output = OrderedDict()
for n in self.get_names():
asset_dict = self.get_asset_from_name(n)
output[asset_dict['handle']] = asset_dict
return output
def get_sub_types(self):
""" Dumps a list of all asset 'sub_type' attributes. """
subtypes = set()
for a in self.get_handles():
a_dict = self.get_asset(a)
subtypes.add(a_dict.get('sub_type', None))
return sorted(subtypes)
def get_types(self):
""" Dumps a list of all asset 'type' attributes. """
subtypes = set()
for a in self.get_handles():
a_dict = self.get_asset(a)
subtypes.add(a_dict.get('type', None))
return subtypes
def get_dicts(self):
""" Dumps a list of dicts where each dict is an asset dict. """
output = []
for h in sorted(self.get_handles()):
output.append(self.get_asset(h))
return output
def get_asset_from_name(self, name, case_sensitive=False, raise_exception_if_not_found=True):
""" Tries to return an asset dict by looking up "name" attributes within
the self.assets. dict. Returns None if it fails.
By default, the mactching here is NOT case-sensitive: everything is
forced to upper() to allow for more permissive matching/laziness. """
if type(name) not in [str,unicode]:
self.logger.error("get_asset_from_name() cannot proceed! '%s' is not a str or unicode object!" % name)
if raise_exception_if_not_found:
raise AssetInitError("The get_asset_from_name() method requires a str or unicode type name!")
else:
return None
name = name.strip()
# special backoff for this dumbass pseudo-expansion
if name == 'White Box':
name = 'White Box & Promo'
if not case_sensitive:
name = name.upper()
name_lookup = {}
for a in self.assets.keys():
if "name" in self.assets[a]:
if case_sensitive:
name_lookup[self.assets[a]["name"]] = a
elif not case_sensitive:
asset_name_upper = self.assets[a]["name"].upper()
name_lookup[asset_name_upper] = a
if name in name_lookup.keys():
return self.get_asset(name_lookup[name])
else:
return None
def filter(self, filter_attrib=None, filtered_attrib_values=[], reverse=False):
""" Drops assets from the collection if their 'filter_attrib' value is
in the 'attrib_values' list.
Set 'reverse' kwarg to True to have the filter work in reverse, i.e. to
drop all assets that DO NOT have 'filter_attrib' values in the
'filtered_attrib_values' list.
"""
if filter_attrib is None or filtered_attrib_values == []:
self.logger.error("AssetCollection.filter() method does not accept None or empty list values!")
return False
for asset_key in self.assets.keys():
if self.get_asset(asset_key).get(filter_attrib, None) is None:
pass
elif reverse:
if self.get_asset(asset_key)[filter_attrib] not in filtered_attrib_values:
del self.assets[asset_key]
else:
if self.get_asset(asset_key)[filter_attrib] in filtered_attrib_values:
del self.assets[asset_key]
#
# no set/get/filter methods below this point!
#
def request_response(self, a_name=None, a_handle=None):
""" Processes a JSON request for a specific asset from the collection,
initializes the asset (if it can) and then calls the asset's serialize()
method to create an HTTP response. """
# first, if the request is a GET, just dump everything and bail
if request and request.method == "GET":
return Response(response=json.dumps(self.assets, default=json_util.default), status=200, mimetype="application/json")
# next, if the request has JSON, check for params
if request and hasattr(request, 'json'):
a_name = request.json.get("name", None)
a_handle = request.json.get("handle", None)
# if there are no lookups requested, dump everything and bail
if a_name is None and a_handle is None:
return Response(response=json.dumps(self.assets, default=json_util.default), status=200, mimetype="application/json")
# finally, do lookups and create a response based on the outcome
if a_handle is not None:
A = self.get_asset(a_handle)
elif a_name is not None:
A = self.get_asset_from_name(a_name)
if A is None:
return utils.http_404
return Response(response=json.dumps(A, default=json_util.default), status=200, mimetype="application/json")
class GameAsset(object):
""" The base class for initializing individual game asset objects. All of
the specific models in the models/ folder will sub-class this model for
their generally available methods, etc.
"""
def __init__(self, handle=None, name=None):
# initialize basic vars
self.logger = utils.get_logger()
self.name = name
self.handle = handle
def __repr__(self):
return "%s object '%s' (assets.%s['%s'])" % (self.type.title(), self.name, self.type, self.handle)
def initialize(self):
""" Call this method to initialize the object. """
if self.handle is not None:
self.initialize_from_handle()
elif self.name is not None:
self.initialize_from_name()
elif self.handle is None and self.name is None:
raise AssetInitError("Asset objects must be initialized with 'handle' or 'name' kwargs.")
else:
raise AssetInitError()
def initialize_asset(self, asset_dict):
""" Pass this a valid asset dictionary to set the object's attributes
with a bunch of exec calls. """
if type(asset_dict) != dict:
raise AssetInitError("Asset objects may not be initialized with a '%s' type object!" % type(asset_dict))
for k, v in asset_dict.iteritems():
if type(v) == str:
exec """self.%s = '%s' """ % (k,v.replace('"','\\"').replace("'","\\'"))
elif type(v) == datetime:
exec """self.%s = '%s' """ % (k,v.strftime(utils.ymd))
else:
exec "self.%s = %s" % (k,v)
def initialize_from_handle(self):
""" If we've got a not-None handle, we can initiailze the asset object
by checking self.assets to see if our handle is a valid key.
If we can't find a valid key, throw an exception. """
# sanity warning
if " " in self.handle:
self.logger.warn("Asset handle '%s' contains whitespaces. Handles should use underscores." % self.handle)
self.asset_dict = self.assets.get_asset(self.handle)
self.initialize_asset(self.asset_dict)
if self.name is None:
raise AssetInitError("Asset handle '%s' could not be retrieved!" % self.handle)
def initialize_from_name(self):
""" If we've got a not-None name, we can initiailze the asset object
by checking self.assets to see if we can find an asset whose "name"
value matches our self.name. """
# sanity warning
if "_" in self.name:
self.logger.warn("Asset name '%s' contains underscores. Names should use whitespaces." % self.name)
lookup_dict = {}
for asset_handle in self.assets.get_handles():
asset_dict = self.assets.get_asset(asset_handle)
lookup_dict[asset_dict["name"]] = asset_handle
if self.name in lookup_dict.keys():
self.handle = lookup_dict[self.name]
self.initialize_from_handle()
if self.handle is None:
raise AssetInitError("Asset handle '%s' could not be retrieved!" % self.handle)
def serialize(self, return_type=None):
""" Allows the object to represent itself as JSON by transforming itself
into a JSON-safe dict. """
shadow_self = copy(self)
for banned_attrib in ["logger", "assets"]:
if hasattr(shadow_self, banned_attrib):
delattr(shadow_self, banned_attrib)
if return_type == dict:
return shadow_self.__dict__
return json.dumps(shadow_self.__dict__, default=json_util.default)
#
# look-up and manipulation methods below
#
def get(self, attrib):
""" Wrapper method for trying to retrieve asset object attributes.
Returns a None type value if the requested attrib doesn't exist. """
try:
return getattr(self, attrib)
except:
return None
class UserAsset(object):
""" The base class for all user asset objects, such as survivors, sessions,
settlements and users. All user asset controllers in the 'models' module
use this as their base class. """
def __repr__(self):
""" Default __repr__ method for all user assets. Note that you should
PROBABLY define a __repr__ for your individual assets, if for no other
reason than to make the logs look cleaner. """
try:
exec 'repr_name = self.%s["name"]' % (self.collection[:-1])
except:
self.logger.warn("UserAsset object has no 'name' attribute!")
repr_name = "UNKNOWN"
return "%s object '%s' [%s]" % (self.collection, repr_name, self._id)
def __init__(self, collection=None, _id=None, normalize_on_init=True, new_asset_attribs={}, Settlement=None):
# initialize basic vars
self.logger = utils.get_logger()
self.normalize_on_init = normalize_on_init
self.new_asset_attribs = new_asset_attribs
if collection is not None:
self.collection = collection
elif hasattr(self,"collection"):
pass
else:
err_msg = "User assets (settlements, users, etc.) may not be initialized without specifying a collection!"
self.logger.error(err_msg)
raise AssetInitError(err_msg)
# use attribs to determine whether the object has been loaded
self.loaded = False
if _id is None:
self.get_request_params()
self.new()
_id = self._id
# if we're initializing with a settlement object already in memory, use it
# if this object IS a Settlement, the load() call below will overwrite this
self.Settlement = Settlement
# now do load() stuff
try:
try:
self._id = ObjectId(_id)
except Exception as e:
self.logger.error(e)
raise utils.InvalidUsage("The asset OID '%s' does not appear to be a valid object ID! %s" % (_id,e), status_code=422)
self.load()
self.loaded = True
except Exception as e:
self.logger.error("Could not load _id '%s' from %s!" % (_id, self.collection))
self.logger.exception(e)
raise
def save(self, verbose=True):
""" Saves the user asset back to either the 'survivors' or 'settlements'
collection in mdb, depending on self.collection. """
if self.collection == "settlements":
utils.mdb.settlements.save(self.settlement)
elif self.collection == "survivors":
utils.mdb.survivors.save(self.survivor)
elif self.collection == "users":
utils.mdb.users.save(self.user)
else:
raise AssetLoadError("Invalid MDB collection for this asset!")
if verbose:
self.logger.info("Saved %s to mdb.%s successfully!" % (self, self.collection))
def load(self):
""" Retrieves an mdb doc using self.collection and makes the document an
attribute of the object. """
mdb_doc = self.get_mdb_doc()
if self.collection == "settlements":
self.settlement = mdb_doc
self._id = self.settlement["_id"]
self.settlement_id = self._id
self.get_campaign('initialize') # sets an object
self.get_survivors('initialize') # sets a list of objects
self.init_asset_collections()
elif self.collection == "survivors":
self.survivor = mdb_doc
self._id = self.survivor["_id"]
self.settlement_id = self.survivor["settlement"]
elif self.collection == "users":
self.user = mdb_doc
self._id = self.user["_id"]
self.login = self.user["login"]
else:
raise AssetLoadError("Invalid MDB collection for this asset!")
def return_json(self):
""" Calls the asset's serialize() method and creates a simple HTTP
response. """
return Response(response=self.serialize(), status=200, mimetype="application/json")
#
# request helpers
#
def get_request_params(self, verbose=False):
""" Checks the incoming request (from Flask) for JSON and tries to add
it to self. """
params = {}
if verbose:
self.logger.debug("%s request info: %s" % (request.method, request.url))
self.logger.debug("%s request user: %s" % (request.method, request.User))
if request.method == "GET" and verbose:
self.logger.warn("%s:%s get_request_params() call is being ignored!" % (request.method, request.url))
return False
# self.logger.debug(request.get_json())
if request.get_json() is not None:
try:
params = dict(request.get_json())
except ValueError:
self.logger.warn("%s request JSON could not be converted to dict!" % request.method)
params = request.get_json()
else:
if verbose:
self.logger.warn("%s request did not contain JSON data!" % request.method)
self.logger.warn("Request URL: %s" % request.url)
self.params = params
def check_request_params(self, keys=[], verbose=True, raise_exception=True):
""" Checks self.params for the presence of all keys specified in 'keys'
list. Returns True if they're present and False if they're not.
Set 'verbose' to True if you want to log validation failures as errors.
"""
for k in keys:
if k not in self.params.keys():
if verbose:
self.logger.error("Request JSON is missing required parameter '%s'!" % k)
if raise_exception:
curframe = inspect.currentframe()
calframe = inspect.getouterframes(curframe, 2)
caller_function = calframe[1][3]
msg = "Insufficient request parameters for this route! The %s() method requires values for the following keys: %s." % (caller_function, utils.list_to_pretty_string(keys))
self.logger.exception(msg)
self.logger.error("Bad request params were: %s" % self.params)
raise utils.InvalidUsage(msg, status_code=400)
else:
return False
return True
#
# get/set methods for User Assets below here
#
def get_campaign(self, return_type=None):
""" Returns the campaign handle of the settlement as a string, if
nothing is specified for kwarg 'return_type'.
Use 'name' to return the campaign's name (from its definition).
'return_type' can also be dict. Specifying dict gets the
raw campaign definition from assets/campaigns.py. """
# first, get the handle; die if we can't
if self.collection == "survivors":
c_handle = self.Settlement.settlement["campaign"]
elif self.collection == "settlements":
# 2017-11-13 - bug fix - missing campaign attrib
if not "campaign" in self.settlement.keys():
self.settlement["campaign"] = 'people_of_the_lantern'
self.logger.warn("%s is a legacy settlement! Adding missing 'campaign' attribute!" % self)
self.save()
c_handle = self.settlement["campaign"]
else:
msg = "Objects whose collection is '%s' may not call the get_campaign() method!" % (self.collection)
raise AssetInitError(msg)
# now try to get the dict
C = models.campaigns.Assets()
c_dict = C.get_asset(c_handle, backoff_to_name=True)
# handle return_type requests
if return_type == 'name':
return c_dict["name"]
elif return_type == dict:
return c_dict
elif return_type == 'initialize':
self.campaign = models.campaigns.Campaign(c_dict['handle'])
return True
return c_handle
def get_serialize_meta(self):
""" Sets the 'meta' dictionary for the object when it is serialized. """
output = deepcopy(utils.api_meta)
if output['meta'].keys() != ['webapp','admins','api','object']:
stack = inspect.stack()
the_class = stack[1][0].f_locals["self"].__class__
the_method = stack[1][0].f_code.co_name
msg = "Models.UserAsset.get_serialize_meta() got modified 'meta' (%s) dict during call by %s.%s()!" % (output['meta'].keys(), the_class, the_method)
self.logger.error(msg)
try:
output["meta"]["object"]["version"] = self.object_version
except Exception as e:
self.logger.error("Could not create 'meta' dictionary when serializing object!")
self.logger.exception(e)
self.logger.warn(output["meta"])
return output
def get_current_ly(self):
""" Convenience/legibility function to help code readbility and reduce
typos, etc. """
if self.collection == "survivors":
return int(self.Settlement.settlement["lantern_year"])
return int(self.settlement["lantern_year"])
def get_mdb_doc(self):
""" Retrieves the asset's MDB document. Raises a special exception if it
cannot for some reason. """
mdb_doc = utils.mdb[self.collection].find_one({"_id": self._id})
if mdb_doc is None:
raise AssetLoadError("Asset _id '%s' could not be found in '%s'!" % (self._id, self.collection))
return mdb_doc
def list_assets(self, attrib=None, log_failures=True):
""" Laziness method that returns a list of dictionaries where dictionary
in the list is an asset in the object's list of those assets.
Basically, if your object is a survivor, and you set 'attrib' to
'abilities_and_impairments', you get back a list of dictionaries where
dictionary is an A&I asset dictionary.
Same goes for settlements: if you set 'attrib' to 'locations', you get
a list where each item is a location asset dict.
Important! This ignores unregistered/unknown/bogus items! Anything that
cannot be looked up by its handle or name is ignored!
"""
if attrib is None:
msg = "The list_assets() method cannot process 'None' type values!"
self.logger.error(msg)
raise Exception(msg)
output = []
if attrib == "principles":
A = models.innovations.Assets()
else:
exec "A = models.%s.Assets()" % attrib
exec "asset_list = self.%s['%s']" % (self.collection[:-1], attrib)
for a in asset_list:
a_dict = A.get_asset(a, backoff_to_name=True, raise_exception_if_not_found=False)
if a_dict is not None:
output.append(a_dict)
elif a_dict is None and log_failures:
self.logger.error("%s Unknown '%s' asset '%s' cannot be listed!" % (self, attrib, a))
else:
pass # just ignore failures and silently fail
return output
#
# asset update methods below
#
def log_event(self, msg=None, event_type=None, action=None, key=None, value=None, agent=None):
""" This is the primary user-facing logging interface, so there' s a bit
of a high bar for using it.
The basic idea of creating a log entry is that we're doing a bit of the
semantic logging (i.e. strongly typed) thing, so, depending on which
kwargs you use when calling this method, your final outcome/message is
going to vary somewhat.
That said, none of the kwargs here are mandatory, because context.
"""
#
# baseline attributes
#
# determine caller method
curframe = inspect.currentframe()
calframe = inspect.getouterframes(curframe, 2)
method = calframe[1][3]
# figure out the action
if action is None:
action = method.split("_")[0]
# determine event type
if event_type is None:
event_type = method
# set 'created_by'
created_by = None
created_by_email = None
if request:
if hasattr(request, 'User'):
created_by = request.User.user['_id']
created_by_email = request.User.user['login']
if agent is None:
agent = "user"
# set 'attribute_modified'
attribute_modified = {
'key': key,
'value': value,
}
if attribute_modified['key'] is not None:
attribute_modified['key_pretty'] = key.replace("_"," ").replace("and","&").title()
if attribute_modified['value'] is not None:
attribute_modified['value_pretty'] = value.replace("_"," ")
d = {
'version': 1.1,
'agent': agent,
"created_on": datetime.now(),
'created_by': created_by,
'created_by_email': created_by_email,
"settlement_id": self.settlement_id,
"ly": self.get_current_ly(),
'event_type': event_type,
'event': msg,
'modified': {'attribute': attribute_modified},
}
# survivor, if it's a survivor
if self.collection == 'survivors':
d['survivor_id'] = self.survivor['_id']
# target is the settlement, unless a survivor object calls this method
action_target = "settlement"
if 'survivor_id' in d.keys():
d['modified']['asset'] = {
'type': 'survivor',
'_id': d['survivor_id'],
'name': self.survivor['name'],
'sex': self.get_sex(),
}
action_target = "survivor"
else:
d['modified']['asset'] = {"type": "settlement", "name": self.settlement['name'], '_id': self.settlement_id}
# create the 'action'
action_word, action_preposition = utils.action_keyword(action)
d['action'] = {'word': action_word, 'preposition': action_preposition}
if key is None and value is None:
d['action']['repr'] = " ".join(['modified', action_target])
elif key is not None and value is None:
d['action']['repr'] = " ".join(['modified', action_target, key])
elif key is None and value is None:
d['action']['repr'] = " ".join(['modified', action_target])
else:
if action_target == "survivor":
d['action']['repr'] = " ".join([action_word, "'%s'" % value, action_preposition, str(key)])
else:
d['action']['repr'] = " ".join([action_word, "'%s'" % value, action_preposition, action_target, str(key)])
# default a message, if incoming message is none
if msg is None:
if d['modified']['asset']['type'] == 'survivor':
if d['agent'] == 'user':
d['event'] = " ".join([
d['created_by_email'],
d['action']['word'],
d['modified']['attribute']['value_pretty'],
d['action']['preposition'],
"%s [%s]" % (self.survivor['name'], self.get_sex()),
d['modified']['attribute']['key_pretty'],
])
else:
d['event'] = " ".join([
"%s [%s]" % (self.survivor['name'], self.get_sex()),
d['action']['word'],
d['modified']['attribute']['value_pretty'],
d['action']['preposition'],
d['modified']['attribute']['key_pretty'],
])
else:
d['event'] = " ".join([d['created_by_email'], d['action']['repr'], ])
d['event'] += "."
# finally, if we had a requester, now that we've settled on a message
# text, update the requester's latest action with it
if 'created_by' is not None:
if request:
ua_string = str(ua_parse(request.user_agent.string))
request.User.set_latest_action(d['event'], ua_string)
# finally, insert the event (i.e. save)
# self.logger.debug(d)
utils.mdb.settlement_events.insert(d)
self.logger.info("%s event: %s" % (self, d['event']))
| [
"[email protected]"
] | |
cc30e097c5a8e1be0b554cb3c5dce3428c0620e4 | 4af59c0cd941f0ebe5f0ce447a4db90914585d23 | /tensorflow_federated/python/core/impl/compiler/local_computation_factory_base.py | 9297be8cd44edaf0ccebc7d3d4ab82a6af1935f9 | [
"Apache-2.0"
] | permissive | ali-yaz/federated | bbabac58c8a5e96f95cef2b7558f61c6a8610651 | 8e3d60ae6129611b5ebf8f94755c50082d97d45a | refs/heads/master | 2023-03-10T21:19:43.160733 | 2021-02-23T09:11:10 | 2021-02-23T09:11:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,073 | py | # Copyright 2021, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines the interface for factories of framework-specific computations."""
import abc
from tensorflow_federated.proto.v0 import computation_pb2 as pb
from tensorflow_federated.python.core.api import computation_types
class LocalComputationFactory(metaclass=abc.ABCMeta):
"""Interface for factories of backend framework-specific local computations.
Implementations of this interface encapsulate the logic for constructing local
computations that are executable on a particular type of backends (such as
TensorFlow or XLA).
"""
@abc.abstractmethod
def create_constant_from_scalar(
self, value, type_spec: computation_types.Type) -> pb.Computation:
"""Creates a TFF computation returning a constant based on a scalar value.
The returned computation has the type signature `( -> T)`, where `T` may be
either a scalar, or a nested structure made up of scalars.
Args:
value: A numpy scalar representing the value to return from the
constructed computation (or to broadcast to all parts of a nested
structure if `type_spec` is a structured type).
type_spec: A `computation_types.Type` of the constructed constant. Must be
either a tensor, or a nested structure of tensors.
Returns:
An instance of `pb.Computation` with semantics as described above.
Raises:
TypeError: if types don't match.
ValueError: if the arguments are invalid.
"""
raise NotImplementedError
| [
"[email protected]"
] | |
eacf7b7e2e8cbc4bef268a86f4123bc20ea172cb | 1d6a727f7ce4aba5e74b4aea61e5f14a833ab014 | /pytext/main.py | a2a3e0ce3a525bfe8fd998810158e1d292050f24 | [
"BSD-3-Clause"
] | permissive | Smerity/pytext | 56daa678c8ea7f3a48eb074cb11e5ea90fb3ed8d | 3c7ecd48860b9f02680f297873a222fa47d1efe6 | refs/heads/master | 2023-08-10T01:32:46.256450 | 2019-04-05T18:21:38 | 2019-04-05T18:25:05 | 179,795,531 | 0 | 0 | NOASSERTION | 2019-04-06T06:06:09 | 2019-04-06T06:06:08 | null | UTF-8 | Python | false | false | 14,695 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import json
import pprint
import sys
import tempfile
from importlib import import_module
from pydoc import locate
from typing import Dict, List, Optional, Union
import click
import torch
from pytext import create_predictor
from pytext.builtin_task import add_include
from pytext.config import LATEST_VERSION, PyTextConfig
from pytext.config.component import register_tasks
from pytext.config.serialize import (
config_to_json,
parse_config,
pytext_config_from_json,
)
from pytext.data.data_handler import CommonMetadata
from pytext.metric_reporters.channel import Channel, TensorBoardChannel
from pytext.task import load
from pytext.utils.documentation import (
ROOT_CONFIG,
eprint,
find_config_class,
get_subclasses,
pretty_print_config_class,
replace_components,
)
from pytext.workflow import (
export_saved_model_to_caffe2,
export_saved_model_to_torchscript,
get_logits as workflow_get_logits,
prepare_task_metadata,
preprocess_task,
test_model_from_snapshot_path,
train_model,
)
from torch.multiprocessing.spawn import spawn
class Attrs:
def __repr__(self):
return f"Attrs({', '.join(f'{k}={v}' for k, v in vars(self).items())})"
def train_model_distributed(config, metric_channels: Optional[List[Channel]]):
assert (
config.use_cuda_if_available and torch.cuda.is_available()
) or config.distributed_world_size == 1, (
"distributed training is only available for GPU training"
)
assert (
config.distributed_world_size == 1
or config.distributed_world_size <= torch.cuda.device_count()
), (
f"Only {torch.cuda.device_count()} GPUs are available, "
"{config.distributed_world_size} GPUs were requested"
)
print(f"\n=== Starting training, World size is {config.distributed_world_size}")
if not config.use_cuda_if_available or not torch.cuda.is_available():
run_single(
rank=0,
config_json=config_to_json(PyTextConfig, config),
world_size=1,
dist_init_method=None,
metadata=None,
metric_channels=metric_channels,
)
else:
with tempfile.NamedTemporaryFile(
delete=False, suffix=".dist_sync"
) as sync_file:
dist_init_method = "file://" + sync_file.name
metadata = prepare_task_metadata(config)
spawn(
run_single,
(
config_to_json(PyTextConfig, config),
config.distributed_world_size,
dist_init_method,
metadata,
[],
),
config.distributed_world_size,
)
def run_single(
rank: int,
config_json: str,
world_size: int,
dist_init_method: Optional[str],
metadata: Optional[Union[Dict[str, CommonMetadata], CommonMetadata]],
metric_channels: Optional[List[Channel]],
):
config = pytext_config_from_json(config_json)
if rank != 0:
metric_channels = []
train_model(
config=config,
dist_init_url=dist_init_method,
device_id=rank,
rank=rank,
world_size=world_size,
metric_channels=metric_channels,
metadata=metadata,
)
def gen_config_impl(task_name, options):
# import the classes required by parameters
requested_classes = [locate(opt) for opt in options] + [locate(task_name)]
register_tasks(requested_classes)
task_class_set = find_config_class(task_name)
if not task_class_set:
raise Exception(
f"Unknown task class: {task_name} " "(try fully qualified class name?)"
)
elif len(task_class_set) > 1:
raise Exception(f"Multiple tasks named {task_name}: {task_class_set}")
task_class = next(iter(task_class_set))
task_config = getattr(task_class, "example_config", task_class.Config)
root = PyTextConfig(task=task_config(), version=LATEST_VERSION)
# Use components listed in options instead of defaults
for opt in options:
replace_class_set = find_config_class(opt)
if not replace_class_set:
raise Exception(f"Not a component class: {opt}")
elif len(replace_class_set) > 1:
raise Exception(f"Multiple component named {opt}: {replace_class_set}")
replace_class = next(iter(replace_class_set))
found = replace_components(root, opt, get_subclasses(replace_class))
if found:
eprint("INFO - Applying option:", "->".join(reversed(found)), "=", opt)
obj = root
for k in reversed(found[1:]):
obj = getattr(obj, k)
if hasattr(replace_class, "Config"):
setattr(obj, found[0], replace_class.Config())
else:
setattr(obj, found[0], replace_class())
else:
raise Exception(f"Unknown option: {opt}")
return config_to_json(PyTextConfig, root)
@click.group()
@click.option("--include", multiple=True)
@click.option("--config-file", default="")
@click.option("--config-json", default="")
@click.option(
"--config-module", default="", help="python module that contains the config object"
)
@click.pass_context
def main(context, config_file, config_json, config_module, include):
"""Configs can be passed by file or directly from json.
If neither --config-file or --config-json is passed,
attempts to read the file from stdin.
Example:
pytext train < demos/docnn.json
"""
for path in include or []:
add_include(path)
context.obj = Attrs()
def load_config():
# Cache the config object so it can be accessed multiple times
if not hasattr(context.obj, "config"):
if config_module:
context.obj.config = import_module(config_module).config
else:
if config_file:
with open(config_file) as file:
config = json.load(file)
elif config_json:
config = json.loads(config_json)
else:
click.echo("No config file specified, reading from stdin")
config = json.load(sys.stdin)
context.obj.config = parse_config(config)
return context.obj.config
context.obj.load_config = load_config
@main.command(help="Print help information on a config parameter")
@click.argument("class_name", default=ROOT_CONFIG)
@click.pass_context
def help_config(context, class_name):
"""
Find all the classes matching `class_name`, and
pretty-print each matching class field members (non-recursively).
"""
found_classes = find_config_class(class_name)
if found_classes:
for obj in found_classes:
pretty_print_config_class(obj)
print()
else:
raise Exception(f"Unknown component name: {class_name}")
@main.command(help="Generate a config JSON file with default values.")
@click.argument("task_name")
@click.argument("options", nargs=-1)
@click.pass_context
def gen_default_config(context, task_name, options):
"""
Generate a config for `task_name` with default values.
Optionally, override the defaults by passing your desired
components as `options`.
"""
try:
cfg = gen_config_impl(task_name, options)
except TypeError as ex:
eprint(
"ERROR - Cannot create this config",
"because some fields don't have a default value:",
ex,
)
sys.exit(-1)
print(json.dumps(cfg, sort_keys=True, indent=2))
@main.command()
@click.option(
"--model-snapshot",
default="",
help="load model snapshot and test configuration from this file",
)
@click.option("--test-path", default="", help="path to test data")
@click.option(
"--use-cuda/--no-cuda",
default=None,
help="Run supported parts of the model on GPU if available.",
)
@click.option(
"--use-tensorboard/--no-tensorboard",
default=True,
help="Whether to visualize test metrics using TensorBoard.",
)
@click.option(
"--field_names",
default=None,
help="""Field names for the test-path. If this is not set, the first line of
each file will be assumed to be a header containing the field names.""",
)
@click.pass_context
def test(context, model_snapshot, test_path, use_cuda, use_tensorboard, field_names):
"""Test a trained model snapshot.
If model-snapshot is provided, the models and configuration will then be
loaded from the snapshot rather than any passed config file.
Otherwise, a config file will be loaded.
"""
model_snapshot, use_cuda, use_tensorboard = _get_model_snapshot(
context, model_snapshot, use_cuda, use_tensorboard
)
print("\n=== Starting testing...")
metric_channels = []
if use_tensorboard:
metric_channels.append(TensorBoardChannel())
try:
test_model_from_snapshot_path(
model_snapshot,
use_cuda,
test_path,
metric_channels,
field_names=field_names,
)
finally:
for mc in metric_channels:
mc.close()
def _get_model_snapshot(context, model_snapshot, use_cuda, use_tensorboard):
if model_snapshot:
print(f"Loading model snapshot and config from {model_snapshot}")
if use_cuda is None:
raise Exception(
"if --model-snapshot is set --use-cuda/--no-cuda must be set"
)
else:
print(f"No model snapshot provided, loading from config")
config = context.obj.load_config()
model_snapshot = config.save_snapshot_path
use_cuda = config.use_cuda_if_available
use_tensorboard = config.use_tensorboard
print(f"Configured model snapshot {model_snapshot}")
return model_snapshot, use_cuda, use_tensorboard
@main.command()
@click.pass_context
def train(context):
"""Train a model and save the best snapshot."""
config = context.obj.load_config()
print("\n===Starting training...")
metric_channels = []
if config.use_tensorboard:
metric_channels.append(TensorBoardChannel())
try:
preprocess_task(config)
if config.distributed_world_size == 1:
train_model(config, metric_channels=metric_channels)
else:
train_model_distributed(config, metric_channels)
print("\n=== Starting testing...")
test_model_from_snapshot_path(
config.save_snapshot_path,
config.use_cuda_if_available,
test_path=None,
metric_channels=metric_channels,
)
finally:
for mc in metric_channels:
mc.close()
@main.command()
@click.option("--model", help="the pytext snapshot model file to load")
@click.option("--output-path", help="where to save the exported caffe2 model")
@click.option("--output-onnx-path", help="where to save the exported onnx model")
@click.pass_context
def export(context, model, output_path, output_onnx_path):
"""Convert a pytext model snapshot to a caffe2 model."""
if not model:
config = context.obj.load_config()
model = config.save_snapshot_path
output_path = config.export_caffe2_path
output_onnx_path = config.export_onnx_path
print(
f"Exporting {model} to caffe2 file: {output_path} and onnx file: {output_onnx_path}"
)
export_saved_model_to_caffe2(model, output_path, output_onnx_path)
@main.command()
@click.option("--model", help="the pytext snapshot model file to load")
@click.option("--output-path", help="where to save the exported torchscript model")
@click.pass_context
def torchscript_export(context, model, output_path):
"""Convert a pytext model snapshot to a caffe2 model."""
config = context.obj.load_config()
model = model or config.save_snapshot_path
output_path = output_path or f"{config.save_snapshot_path}.torchscript"
print(f"Exporting {model} to torchscript file: {output_path}")
export_saved_model_to_torchscript(model, output_path)
@main.command()
@click.option("--exported-model", help="where to load the exported model")
@click.pass_context
def predict(context, exported_model):
"""Start a repl executing examples against a caffe2 model."""
config = context.obj.load_config()
print(f"Loading model from {exported_model or config.export_caffe2_path}")
predictor = create_predictor(config, exported_model)
print(f"Model loaded, reading example JSON from stdin")
for line in sys.stdin.readlines():
input = json.loads(line)
predictions = predictor(input)
pprint.pprint(predictions)
@main.command()
@click.option("--model-file", help="where to load the pytorch model")
@click.pass_context
def predict_py(context, model_file):
"""
Start a repl executing examples against a PyTorch model.
Example is in json format with names being the same with column_to_read
in model training config
"""
task, train_config = load(model_file)
while True:
try:
line = input(
"please input a json example, the names should be the same with "
+ "column_to_read in model training config: \n"
)
if line:
pprint.pprint(task.predict([json.loads(line)])[0])
except EOFError:
break
@main.command()
@click.option(
"--model-snapshot",
default="",
help="load model snapshot and test configuration from this file",
)
@click.option("--test-path", default="", help="path to test data")
@click.option("--output-path", default="", help="path to save logits")
@click.option(
"--use-cuda/--no-cuda",
default=None,
help="Run supported parts of the model on GPU if available.",
)
@click.option(
"--field_names",
default=None,
help="""Field names for the test-path. If this is not set, the first line of
each file will be assumed to be a header containing the field names.""",
)
@click.pass_context
def get_logits(context, model_snapshot, test_path, use_cuda, output_path, field_names):
"""print logits from a trained model snapshot to output_path
"""
model_snapshot, use_cuda, _ = _get_model_snapshot(
context, model_snapshot, use_cuda, False
)
print("\n=== Starting get_logits...")
workflow_get_logits(model_snapshot, use_cuda, output_path, test_path, field_names)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
fbb2860ca4221244c0a63770d79e6e96dac90529 | 3603f8f76ff81ea75bfc916888bdcfa55b7f12e4 | /alds/alds1_6_c.py | ac675e72489195bfa0b40dd0da0ab01f62841d94 | [] | no_license | kimotot/aizu | 4de0319959a3b166b8c2c4940ab7b701b6ee3395 | 315be1240cff733e1c6a7cd98942a95b3bd7ec96 | refs/heads/master | 2021-07-24T12:37:41.935302 | 2021-03-10T09:05:05 | 2021-03-10T09:05:05 | 91,927,321 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,047 | py | def decode():
n = int(input())
cards = []
for i in range(n):
[m, v] = input().split()
cards.append((m, int(v), i))
return n, cards
def partition(a, p, r):
x = a[r][1]
i = p - 1
for j in range(p, r):
if a[j][1] <= x:
i += 1
t = a[j]
a[j] = a[i]
a[i] = t
t = a[i+1]
a[i+1] = a[r]
a[r] = t
return i+1
def disp(cards):
for (m, n, _) in cards:
print("{0} {1}".format(m, n))
def quicksort(a, p, r):
if p < r:
q = partition(a, p, r)
quicksort(a, p, q-1)
quicksort(a, q+1, r)
def isstable(cards):
for i in range(len(cards) - 1):
if cards[i][1] == cards[i+1][1]:
if cards[i][2] < cards[i+1][2]:
pass
else:
return False
return True
if __name__ == '__main__':
n, cards = decode()
quicksort(cards, 0, n-1)
if isstable(cards):
print("Stable")
else:
print("Not stable")
disp(cards)
| [
"[email protected]"
] | |
5e3fbd7e68a9f9a546b5ab547039decf8d759b24 | c50cf19707ecf44c8e15acf0e994d288fe4f01a7 | /credit/admin.py | d27528df722953df4b3f2fcffcb8d2b79e4637f8 | [
"MIT"
] | permissive | JeremyParker/idlecars-backend | ee5981356c60161dee05c22e01e5c913e73083c0 | 819cce48e4679d61164b238b81dab0e4d51b8afa | refs/heads/master | 2021-03-16T04:29:43.287760 | 2018-03-03T23:16:02 | 2018-03-03T23:16:02 | 31,734,223 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,074 | py | # -*- encoding:utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from credit import models
class CustomerAdmin(admin.ModelAdmin):
readonly_fields = [
'user',
'invite_code',
'invitor_code',
'invitor_credited',
]
list_display = [
'user',
'invitor_code',
'invitor_credited',
'invite_code',
'app_credit',
]
search_fields = [
'user__username',
]
def username(self, instance):
return instance.user.username
class CreditCodeAdmin(admin.ModelAdmin):
list_display = [
'created_time',
'description',
'credit_code',
'credit_amount',
'invitor_credit_amount',
'redeem_count',
'expiry_time',
]
search_fields = [
'customer__user__username',
'description',
'credit_code',
]
readonly_fields = [
'redeem_count',
]
# admin.site.register(models.CreditCode, CreditCodeAdmin)
# admin.site.register(models.Customer, CustomerAdmin)
| [
"[email protected]"
] | |
ec81c5347f900f5b8390d51c5ec6dc1d24fd7dd3 | b29349323954d7a7036f56cef4139ed2c8fcb4f0 | /scripts/color_sample.py | cbecd9d3b9924273ebb76deaaba6cd36e6b5d975 | [
"MIT"
] | permissive | JDevlieghere/dotfiles | e23098fb0241367764243531804c8b9f0ef41ac5 | 09fbb4369c2e54dd38c2566a283eb05864499068 | refs/heads/main | 2023-09-01T02:23:14.303513 | 2023-08-23T16:36:05 | 2023-08-23T16:36:05 | 39,687,300 | 336 | 123 | MIT | 2020-02-24T05:32:45 | 2015-07-25T13:52:36 | Perl | UTF-8 | Python | false | false | 2,707 | py | #!/usr/bin/python3
"""
usage: color_sample.py [-h] [file]
Color a call tree file generated by sample
positional arguments:
file sample file
optional arguments:
-h, --help show this help message and exit
"""
import re
import argparse
import fileinput
import bisect
END_MARKER = "Total number in stack"
BEGIN_MARKER = "Call graph:"
REGEX = re.compile(r"^\D*(\d+)")
def fg(r, g, b):
"""Change foreground color."""
return "\033[38;2;{:d};{:d};{:d}m".format(r, g, b)
def reset():
"""Reset foreground color."""
return "\033[0m"
def rgb(minimum, maximum, value):
"""Convert value within range to RGB."""
assert value <= maximum
assert value >= minimum
minimum, maximum = float(minimum), float(maximum)
r = 2 * (value - minimum) / (maximum - minimum)
b = int(max(0, 255 * (1 - r)))
r = int(max(0, 255 * (r - 1)))
g = 255 - b - r
return r, g, b
def binary_find(a, x):
"""Find value in sorted list."""
i = bisect.bisect_left(a, x)
if i != len(a) and a[i] == x:
return i
return -1
def get_all_samples(lines):
"""Compute a list of all samples."""
parsing = False
samples = []
for line in lines:
if BEGIN_MARKER in line:
parsing = True
continue
if END_MARKER in line:
break
if not parsing:
continue
match = re.match(REGEX, line)
if not match:
continue
samples.append(int(match.group(1)))
return sorted(set(samples))
def color(lines, all_samples):
"""Color the call tree based on the amount of samples for each branch."""
minimum = 0
maximum = len(all_samples)
coloring = False
for line in lines:
if BEGIN_MARKER in line:
coloring = True
if END_MARKER in line:
coloring = False
if not coloring:
print(line)
continue
match = re.match(REGEX, line)
if not match:
print(line)
continue
samples = int(match.group(1))
value = binary_find(all_samples, samples)
r, g, b = rgb(minimum, maximum, value)
print(fg(r, g, b) + line + reset())
def main():
"""Color a call tree file generated by sample."""
parser = argparse.ArgumentParser(
description="Color a call tree file generated by sample"
)
parser.add_argument("file", nargs="?", help="sample file")
args = parser.parse_args()
with fileinput.input(args.file) as file:
lines = []
for line in file:
lines.append(line.rstrip())
color(lines, get_all_samples(lines))
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
ce580dc80f9d1e254df39bbd9c803c0514315380 | ffaba5d94ea820281fee39be0841e3bf08ed157c | /setup.py | c3e69ea047bbcb8f389a65050e82f6fedcd67a85 | [
"MIT"
] | permissive | arruda/Twitter-Get-Old-Tweets-Scraper | d532e316ce013f94426bb9c999112c554b0f585f | 6999a2c72df3866cb4a196370172c50b90b57e8d | refs/heads/master | 2020-03-19T17:29:46.926370 | 2018-06-10T06:09:32 | 2018-06-10T06:09:32 | 136,762,748 | 1 | 1 | MIT | 2020-03-19T06:35:05 | 2018-06-09T22:35:46 | Python | UTF-8 | Python | false | false | 1,202 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""The setup script."""
from setuptools import setup, find_packages
with open('README.md') as readme_file:
readme = readme_file.read()
requirements = [
"pyquery>=1.2.17",
"requests>=2.13.0",
]
setup_requirements = [ ]
test_requirements = [ ]
setup(
author="Felipe Arruda",
author_email='[email protected]',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
description="Python Boilerplate contains all the boilerplate you need to create a Python package.",
install_requires=requirements,
include_package_data=True,
name='old_tweets_crawler',
packages=find_packages(include=['old_tweets_crawler*']),
url='https://github.com/arruda/Twitter-Get-Old-Tweets-Scraper',
version='0.1.0',
zip_safe=False,
)
| [
"[email protected]"
] | |
09a6fa7f3033f4fb40be0af0bda8c23201095e4a | 4d097d0e8c571874761463f698f5d34e38e549a0 | /python/b_1002 터렛.py | dbb3151cd939c0f0d6da6eb38b86c8cf3d2fd6e3 | [] | no_license | sondongmin0419/study | 533832151958fe8ae178d8aee183edf91ffa7e12 | 0c863e2c9111b35a15ccfaec3cc64828c584beb1 | refs/heads/master | 2023-03-24T11:54:04.139399 | 2021-03-15T13:40:21 | 2021-03-15T13:40:21 | 281,695,258 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 500 | py | import sys
input = sys.stdin.readline
n = int(input())
for TC in range(n):
x1, y1, r1, x2, y2, r2 = map(int, input().rstrip().split())
distance = ((x2 - x1) ** 2 + (y2 - y1) ** 2) ** 0.5
if x1 == x2 and y1 == y2:
if r1 == r2:
print(-1)
else:
print(0)
elif r1 + r2 == distance or distance+min(r1,r2) == max(r1,r2):
print(1)
elif r1 + r2 < distance or distance+min(r1,r2) < max(r1,r2):
print(0)
else:
print(2)
| [
"[email protected]"
] | |
eb6166eec43fe151d8738787a51a433ff70d1972 | 5b9f7edaf22297d0d6d0239135f1b2484fd24b34 | /module07.mysql.with.python/exercise08.py | 9aad91a512ff126d042535a8a26bd58f3cb5c995 | [
"MIT"
] | permissive | deepcloudlabs/dcl162-2020-sep-02 | 925791b5c7adae8263e82a3c9a6a406d0b68eb0e | abd21c59d89985e9f5922df65fd1a5ccab019de4 | refs/heads/master | 2022-12-12T16:10:58.497846 | 2020-09-04T18:16:06 | 2020-09-04T18:16:06 | 291,666,203 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | import mysql.connector
my_connection = mysql.connector.connect(host="localhost", user="root", password="Secret_123", database="world")
my_cursor = my_connection.cursor()
sql = "select ctry.name, city.name from country as ctry inner join city as city on ctry.capital = city.id limit 10"
my_cursor.execute(sql)
result_set = my_cursor.fetchall()
for row in result_set:
print(row)
| [
"[email protected]"
] | |
8439d19170cf896f5580caa519a737b1e0e12471 | 9e988c0dfbea15cd23a3de860cb0c88c3dcdbd97 | /sdBs/AllRun/o11_j200739+544516/sdB_O11_J200739+544516_lc.py | c712c7ed5a3afda832ff3ba22080131102af8479 | [] | no_license | tboudreaux/SummerSTScICode | 73b2e5839b10c0bf733808f4316d34be91c5a3bd | 4dd1ffbb09e0a599257d21872f9d62b5420028b0 | refs/heads/master | 2021-01-20T18:07:44.723496 | 2016-08-08T16:49:53 | 2016-08-08T16:49:53 | 65,221,159 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 358 | py | from gPhoton.gAperture import gAperture
def main():
gAperture(band="NUV", skypos=[301.9125,54.754444], stepsz=30., csvfile="/data2/fleming/GPHOTON_OUTPU/LIGHTCURVES/sdBs/sdB_O11_J200739+544516 /sdB_O11_J200739+544516_lc.csv", maxgap=1000., overwrite=True, radius=0.00555556, annulus=[0.005972227,0.0103888972], verbose=3)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
44a88ef6c47660f70db2f79b8fdd1d06f0611b66 | 875b9c5093efd688f79fada905bee80c42380ec1 | /tests/test_pygame_player.py | 20237226009ceb731a7dd5490eed46043acf0029 | [
"MIT"
] | permissive | chaheeee/PyGamePlayer | 945e64819b3e862757e1bf9dbf622ac844f6dbed | 430ced572eddcd638a2db5eb906e5f2016ac7250 | refs/heads/master | 2023-02-21T19:42:18.229341 | 2021-01-22T02:22:52 | 2021-01-22T02:22:52 | 330,974,347 | 0 | 0 | MIT | 2021-01-21T15:05:27 | 2021-01-19T12:35:22 | Python | UTF-8 | Python | false | false | 3,064 | py | import time
import pygame
from unittest import TestCase
from pygame_player import PyGamePlayer
class DummyPyGamePlayer(PyGamePlayer):
def __init__(self, force_game_fps=10, run_real_time=False):
super(DummyPyGamePlayer, self).__init__(force_game_fps=force_game_fps, run_real_time=run_real_time)
def get_keys_pressed(self, screen_array, feedback, terminal):
pass
def get_feedback(self):
return 0.0, False
class TestPyGamePlayer(TestCase):
DISPLAY_X = 1
DISPLAY_Y = 1
def setUp(self):
pygame.init()
pygame.display.set_mode((self.DISPLAY_X, self.DISPLAY_Y), 0, 32)
def tearDown(self):
pygame.quit()
def test_restores_pygame_methods_after_exit(self):
pygame_flip, pygame_update, pygame_event = pygame.display.flip, pygame.display.update, pygame.event.get
with PyGamePlayer():
# methods should be replaced
self.assertNotEqual(pygame_flip, pygame.display.flip)
self.assertNotEqual(pygame_update, pygame.display.update)
self.assertNotEqual(pygame_event, pygame.event.get)
# original methods should be restored
self.assertEqual(pygame_flip, pygame.display.flip)
self.assertEqual(pygame_update, pygame.display.update)
self.assertEqual(pygame_event, pygame.event.get)
def test_fixing_frames_per_second(self):
fix_fps_to = 3
with DummyPyGamePlayer(force_game_fps=fix_fps_to):
clock = pygame.time.Clock()
start_time_ms = clock.get_time()
for _ in range(fix_fps_to):
pygame.display.update()
end_time_ms = clock.get_time()
self.assertAlmostEqual(end_time_ms - start_time_ms, 1000.0,
msg='Expected only 1000 milliseconds to have passed on the clock after screen updates')
def test_get_keys_pressed_method_sets_event_get(self):
fixed_key_pressed = 24
class FixedKeysReturned(DummyPyGamePlayer):
def get_keys_pressed(self, screen_array, feedback, terminal):
return [fixed_key_pressed]
with FixedKeysReturned():
pygame.display.update()
key_pressed = pygame.event.get()
self.assertEqual(key_pressed[0].key, fixed_key_pressed)
def test_get_screen_buffer(self):
class TestScreenArray(DummyPyGamePlayer):
def get_keys_pressed(inner_self, screen_array, feedback, terminal):
self.assertEqual(screen_array.shape[0], self.DISPLAY_X)
self.assertEqual(screen_array.shape[1], self.DISPLAY_Y)
with TestScreenArray():
pygame.display.update()
def test_run_real_time(self):
fix_fps_to = 3
with PyGamePlayer(force_game_fps=fix_fps_to, run_real_time=True):
start = time.time()
clock = pygame.time.Clock()
for _ in range(fix_fps_to):
clock.tick(42343)
end = time.time()
self.assertAlmostEqual(end-start, 1.0, delta=0.1)
| [
"[email protected]"
] | |
5171ac07d2be35016805b504c4184d500421005d | 045cb1a5638c3575296f83471758dc09a8065725 | /addons/mrp_subcontracting/models/stock_picking.py | 34f266a39dd33ebb2f957f12845bc85be08de33c | [] | no_license | marionumza/saas | 7236842b0db98d1a0d0c3c88df32d268509629cb | 148dd95d991a348ebbaff9396759a7dd1fe6e101 | refs/heads/main | 2023-03-27T14:08:57.121601 | 2021-03-20T07:59:08 | 2021-03-20T07:59:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,173 | py | # -*- coding: utf-8 -*-
# Part of Harpiya. See LICENSE file for full copyright and licensing details.
from datetime import timedelta
from harpiya import api, fields, models
class StockPicking(models.Model):
_inherit = 'stock.picking'
display_action_record_components = fields.Boolean(compute='_compute_display_action_record_components')
@api.depends('state')
def _compute_display_action_record_components(self):
for picking in self:
# Hide if not encoding state
if picking.state in ('draft', 'cancel', 'done'):
picking.display_action_record_components = False
continue
if not picking._is_subcontract():
picking.display_action_record_components = False
continue
# Hide if no components are track
subcontracted_productions = picking._get_subcontracted_productions()
subcontracted_moves = subcontracted_productions.mapped('move_raw_ids')
if all(subcontracted_move.has_tracking == 'none' for subcontracted_move in subcontracted_moves):
picking.display_action_record_components = False
continue
# Hide if the production is to close
if not subcontracted_productions.filtered(lambda mo: mo.state not in ('to_close', 'done')):
picking.display_action_record_components = False
continue
picking.display_action_record_components = True
# -------------------------------------------------------------------------
# Action methods
# -------------------------------------------------------------------------
def action_done(self):
res = super(StockPicking, self).action_done()
productions = self.env['mrp.production']
for picking in self:
for move in picking.move_lines:
if not move.is_subcontract:
continue
production = move.move_orig_ids.production_id
if move._has_tracked_subcontract_components():
move.move_orig_ids.filtered(lambda m: m.state not in ('done', 'cancel')).move_line_ids.unlink()
move_finished_ids = move.move_orig_ids.filtered(lambda m: m.state not in ('done', 'cancel'))
for ml in move.move_line_ids:
ml.copy({
'picking_id': False,
'production_id': move_finished_ids.production_id.id,
'move_id': move_finished_ids.id,
'qty_done': ml.qty_done,
'result_package_id': False,
'location_id': move_finished_ids.location_id.id,
'location_dest_id': move_finished_ids.location_dest_id.id,
})
else:
for move_line in move.move_line_ids:
produce = self.env['mrp.product.produce'].with_context(default_production_id=production.id).create({
'production_id': production.id,
'qty_producing': move_line.qty_done,
'product_uom_id': move_line.product_uom_id.id,
'finished_lot_id': move_line.lot_id.id,
'consumption': 'strict',
})
produce._generate_produce_lines()
produce._record_production()
productions |= production
for subcontracted_production in productions:
if subcontracted_production.state == 'progress':
subcontracted_production.post_inventory()
else:
subcontracted_production.button_mark_done()
# For concistency, set the date on production move before the date
# on picking. (Tracability report + Product Moves menu item)
minimum_date = min(picking.move_line_ids.mapped('date'))
production_moves = subcontracted_production.move_raw_ids | subcontracted_production.move_finished_ids
production_moves.write({'date': minimum_date - timedelta(seconds=1)})
production_moves.move_line_ids.write({'date': minimum_date - timedelta(seconds=1)})
return res
def action_record_components(self):
self.ensure_one()
for move in self.move_lines:
if not move._has_tracked_subcontract_components():
continue
production = move.move_orig_ids.production_id
if not production or production.state in ('done', 'to_close'):
continue
return move._action_record_components()
# -------------------------------------------------------------------------
# Subcontract helpers
# -------------------------------------------------------------------------
def _is_subcontract(self):
self.ensure_one()
return self.picking_type_id.code == 'incoming' and any(m.is_subcontract for m in self.move_lines)
def _get_subcontracted_productions(self):
self.ensure_one()
return self.move_lines.mapped('move_orig_ids.production_id')
def _get_warehouse(self, subcontract_move):
return subcontract_move.warehouse_id or self.picking_type_id.warehouse_id
def _prepare_subcontract_mo_vals(self, subcontract_move, bom):
subcontract_move.ensure_one()
group = self.env['procurement.group'].create({
'name': self.name,
'partner_id': self.partner_id.id,
})
product = subcontract_move.product_id
warehouse = self._get_warehouse(subcontract_move)
vals = {
'company_id': subcontract_move.company_id.id,
'procurement_group_id': group.id,
'product_id': product.id,
'product_uom_id': subcontract_move.product_uom.id,
'bom_id': bom.id,
'location_src_id': subcontract_move.picking_id.partner_id.with_context(force_company=subcontract_move.company_id.id).property_stock_subcontractor.id,
'location_dest_id': subcontract_move.picking_id.partner_id.with_context(force_company=subcontract_move.company_id.id).property_stock_subcontractor.id,
'product_qty': subcontract_move.product_uom_qty,
'picking_type_id': warehouse.subcontracting_type_id.id
}
return vals
def _subcontracted_produce(self, subcontract_details):
self.ensure_one()
for move, bom in subcontract_details:
mo = self.env['mrp.production'].with_context(force_company=move.company_id.id).create(self._prepare_subcontract_mo_vals(move, bom))
self.env['stock.move'].create(mo._get_moves_raw_values())
mo.action_confirm()
# Link the finished to the receipt move.
finished_move = mo.move_finished_ids.filtered(lambda m: m.product_id == move.product_id)
finished_move.write({'move_dest_ids': [(4, move.id, False)]})
mo.action_assign()
| [
"[email protected]"
] | |
b3b434b287cc143cda89ffc6336037d41c32a53d | e00bfd4ef4bc3bfd97cc26e0b6fa08eae90090a4 | /mnist_tf_nn.py | 67e01351550ff086596638d6ae855b0c8471c259 | [] | no_license | benjaminhuanghuang/dl-study | 4995f99ed3776e7e01de8eef8e635ec9295a2e51 | 180f315da5c679f10b101ad0731f26bd21aa5772 | refs/heads/master | 2021-01-25T13:05:08.687512 | 2018-04-19T03:01:06 | 2018-04-19T03:01:06 | 123,523,798 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,069 | py | '''
TensorFlow Tutorials(1)——Windows下环境搭建
https://www.youtube.com/watch?v=gItz_fBTeLM
TensorFlow系列教程(2)——手写数字的识别
https://www.youtube.com/watch?v=gx7iEa9Q-Vs
TensorFlow Tutorials(3)——FC预测自己手写的图片
https://www.youtube.com/watch?v=WKHP6QBlb8Q
'''
import os
import tensorflow as tf
import numpy as np
from tensorflow.examples.tutorials.mnist import input_data
from PIL import Image
import cv2
# Read data
# one-hot vector is a vector which is 0 in most dimensions, and 1 in a single dimension
# For example, 3 would be [0,0,0,1,0,0,0,0,0,0]
mnist = input_data.read_data_sets('./data/MNIST', one_hot=True)
x = tf.placeholder(dtype=tf.float32, shape=[None, 784], name='x')
# mnist.train.labels is a [55000, 10] array of floats
y = tf.placeholder(dtype=tf.float32, shape=[None, 10], name='y')
batch_size = 1000
def add_layer(input_data, input_num, output_num, activation_fun=None):
w = tf.Variable(initial_value=tf.random_normal(shape=[input_num, output_num]))
b = tf.Variable(initial_value=tf.random_normal(shape=[1, output_num]))
# output = input_data * weight + bias
output = tf.add(tf.matmul(input_data, w), b)
if activation_fun:
output = activation_fun(output)
return output
def build_nn(data):
hidden_layer1 = add_layer(data, 784, 100, activation_fun=tf.nn.sigmoid)
hidden_layer2 = add_layer(hidden_layer1, 100, 50, activation_fun=tf.nn.sigmoid)
output_layer = add_layer(hidden_layer2, 50, 10)
return output_layer
def train_nn(data):
# output of NN
output = build_nn(data)
# softmax used for vector compairation
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(labels=y, logits=output))
#
optimizer = tf.train.GradientDescentOptimizer(learning_rate=1).minimize(loss)
saver = tf.train.Saver()
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
if not os.path.exists('checkpoint'):
for i in range(50):
epoch_cost = 0
for _ in range(int(mnist.train.num_examples / batch_size)):
x_data, y_data = mnist.train.next_batch(batch_size)
cost, _ = sess.run([loss, optimizer], feed_dict={x: x_data, y: y_data})
epoch_cost += cost
print('Epoch', i, ": ", epoch_cost)
accuracy = tf.reduce_mean(tf.cast(tf.equal(tf.argmax(y, 1), tf.argmax(output, 1)), tf.float32))
acc = sess.run(accuracy, feed_dict={x: mnist.test.images, y: mnist.test.labels})
print("accuracy: ", acc)
saver.save(sess, './tmp/mnist.skpt')
else:
saver.restore(sess, './tmp/mnist.skpt')
predict('./input.png', sess, output)
def reconstruct_image():
for i in range(10):
path = './imgs/{}'.format(i)
if not os.path.exists(path):
os.makedirs(path)
batch_size = 1
for i in range(int(mnist.train.num_examples / batch_size)):
x_data, y_data = mnist.train.next_batch(batch_size)
img = Image.fromarray(np.reshape(np.array(x_data[0]*255, dtype='uint8'), newshape=(28, 28)))
dir = np.argmax(y_data[0])
img.save('./imgs/{}/{}.bmp'.format(dir, i))
def read_data(path):
image = cv2.imread(path, cv2.IMREAD_GRAYSCALE)
# is square
w, h = image.shape
max_ = max(w, h)
processed_img = cv2.resize(image, dsize=(max_, max_))
processed_img = np.resize(processed_img, new_shape=(1, 784))
return image, processed_img
def predict(image_path, sess, output):
image, processed_image = read_data(image_path)
result = sess.run(output, feed_dict={x: processed_image})
result = np.argmax(result, 1)
print('The prediciton is', result)
cv2.putText(image, 'The prediction is {}'.format(result), (20, 20),
cv2.FONT_HERSHEY_COMPLEX, 1, color=(255, 255, 255))
cv2.imshow('image', image)
cv2.waitKey(0)
sv2.destroyAllWindows()
train_nn(x)
# reconstruct_image()
| [
"[email protected]"
] | |
60a7352ea9fa28baf709bc3938aeeb9ae85a08f7 | 3b84c4b7b16ccfd0154f8dcb75ddbbb6636373be | /google-cloud-sdk/lib/googlecloudsdk/third_party/logging_v2/gapic/config_service_v2_client_config.py | 918b29fd0f8ebf1f3e73a5596dbf58efcd6af258 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | twistedpair/google-cloud-sdk | 37f04872cf1ab9c9ce5ec692d2201a93679827e3 | 1f9b424c40a87b46656fc9f5e2e9c81895c7e614 | refs/heads/master | 2023-08-18T18:42:59.622485 | 2023-08-15T00:00:00 | 2023-08-15T12:14:05 | 116,506,777 | 58 | 24 | null | 2022-02-14T22:01:53 | 2018-01-06T18:40:35 | Python | UTF-8 | Python | false | false | 3,721 | py | # Copyright 2020 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
config = {
"interfaces": {
"google.logging.v2.ConfigServiceV2": {
"retry_codes": {
"idempotent": [
"DEADLINE_EXCEEDED",
"INTERNAL",
"UNAVAILABLE"
],
"non_idempotent": [],
"idempotent2": [
"DEADLINE_EXCEEDED",
"UNAVAILABLE"
]
},
"retry_params": {
"default": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
"initial_rpc_timeout_millis": 20000,
"rpc_timeout_multiplier": 1.0,
"max_rpc_timeout_millis": 20000,
"total_timeout_millis": 600000
}
},
"methods": {
"DeleteSink": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"UpdateSink": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"DeleteExclusion": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
},
"ListBuckets": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent2",
"retry_params_name": "default"
},
"GetBucket": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent2",
"retry_params_name": "default"
},
"UpdateBucket": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
},
"ListSinks": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent2",
"retry_params_name": "default"
},
"GetSink": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent2",
"retry_params_name": "default"
},
"CreateSink": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
},
"ListExclusions": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent2",
"retry_params_name": "default"
},
"GetExclusion": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent2",
"retry_params_name": "default"
},
"CreateExclusion": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
},
"UpdateExclusion": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
},
"GetCmekSettings": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent2",
"retry_params_name": "default"
},
"UpdateCmekSettings": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default"
}
}
}
}
}
| [
"[email protected]"
] | |
8eb00965d7fe2515246764e9e28869e4defe30b5 | 35fe9e62ab96038705c3bd09147f17ca1225a84e | /a10_ansible/library/a10_gslb_zone_service_dns_srv_record.py | 6af19cd9ec408f7a280cb3b28bd71ada0d1abc4f | [] | no_license | bmeidell/a10-ansible | 6f55fb4bcc6ab683ebe1aabf5d0d1080bf848668 | 25fdde8d83946dadf1d5b9cebd28bc49b75be94d | refs/heads/master | 2020-03-19T08:40:57.863038 | 2018-03-27T18:25:40 | 2018-03-27T18:25:40 | 136,226,910 | 0 | 0 | null | 2018-06-05T19:45:36 | 2018-06-05T19:45:36 | null | UTF-8 | Python | false | false | 6,346 | py | #!/usr/bin/python
REQUIRED_NOT_SET = (False, "One of ({}) must be set.")
REQUIRED_MUTEX = (False, "Only one of ({}) can be set.")
REQUIRED_VALID = (True, "")
DOCUMENTATION = """
module: a10_dns-srv-record
description:
-
author: A10 Networks 2018
version_added: 1.8
options:
srv-name:
description:
- Specify Domain Name
port:
description:
- Specify Port (Port Number)
priority:
description:
- Specify Priority
weight:
description:
- Specify Weight, default is 10
ttl:
description:
- Specify TTL
uuid:
description:
- uuid of the object
sampling-enable:
"""
EXAMPLES = """
"""
ANSIBLE_METADATA = """
"""
# Hacky way of having access to object properties for evaluation
AVAILABLE_PROPERTIES = {"port","priority","sampling_enable","srv_name","ttl","uuid","weight",}
# our imports go at the top so we fail fast.
from a10_ansible.axapi_http import client_factory
from a10_ansible import errors as a10_ex
def get_default_argspec():
return dict(
a10_host=dict(type='str', required=True),
a10_username=dict(type='str', required=True),
a10_password=dict(type='str', required=True, no_log=True),
state=dict(type='str', default="present", choices=["present", "absent"])
)
def get_argspec():
rv = get_default_argspec()
rv.update(dict(
port=dict(
type='str' , required=True
),
priority=dict(
type='str'
),
sampling_enable=dict(
type='str'
),
srv_name=dict(
type='str' , required=True
),
ttl=dict(
type='str'
),
uuid=dict(
type='str'
),
weight=dict(
type='str'
),
))
return rv
def new_url(module):
"""Return the URL for creating a resource"""
# To create the URL, we need to take the format string and return it with no params
url_base = "/axapi/v3/gslb/zone/{name}/service/{service-port}+{service-name}/dns-srv-record/{srv-name}+{port}"
f_dict = {}
f_dict["srv-name"] = ""
f_dict["port"] = ""
return url_base.format(**f_dict)
def existing_url(module):
"""Return the URL for an existing resource"""
# Build the format dictionary
url_base = "/axapi/v3/gslb/zone/{name}/service/{service-port}+{service-name}/dns-srv-record/{srv-name}+{port}"
f_dict = {}
f_dict["srv-name"] = module.params["srv-name"]
f_dict["port"] = module.params["port"]
return url_base.format(**f_dict)
def build_envelope(title, data):
return {
title: data
}
def build_json(title, module):
rv = {}
for x in AVAILABLE_PROPERTIES:
v = module.params.get(x)
if v:
rx = x.replace("_", "-")
rv[rx] = module.params[x]
return build_envelope(title, rv)
def validate(params):
# Ensure that params contains all the keys.
requires_one_of = sorted([])
present_keys = sorted([x for x in requires_one_of if params.get(x)])
errors = []
marg = []
if not len(requires_one_of):
return REQUIRED_VALID
if len(present_keys) == 0:
rc,msg = REQUIRED_NOT_SET
marg = requires_one_of
elif requires_one_of == present_keys:
rc,msg = REQUIRED_MUTEX
marg = present_keys
else:
rc,msg = REQUIRED_VALID
if not rc:
errors.append(msg.format(", ".join(marg)))
return rc,errors
def exists(module):
try:
module.client.get(existing_url(module))
return True
except a10_ex.NotFound:
return False
def create(module, result):
payload = build_json("dns-srv-record", module)
try:
post_result = module.client.post(new_url(module), payload)
result.update(**post_result)
result["changed"] = True
except a10_ex.Exists:
result["changed"] = False
except a10_ex.ACOSException as ex:
module.fail_json(msg=ex.msg, **result)
except Exception as gex:
raise gex
return result
def delete(module, result):
try:
module.client.delete(existing_url(module))
result["changed"] = True
except a10_ex.NotFound:
result["changed"] = False
except a10_ex.ACOSException as ex:
module.fail_json(msg=ex.msg, **result)
except Exception as gex:
raise gex
return result
def update(module, result):
payload = build_json("dns-srv-record", module)
try:
post_result = module.client.put(existing_url(module), payload)
result.update(**post_result)
result["changed"] = True
except a10_ex.ACOSException as ex:
module.fail_json(msg=ex.msg, **result)
except Exception as gex:
raise gex
return result
def present(module, result):
if not exists(module):
return create(module, result)
else:
return update(module, result)
def absent(module, result):
return delete(module, result)
def run_command(module):
run_errors = []
result = dict(
changed=False,
original_message="",
message=""
)
state = module.params["state"]
a10_host = module.params["a10_host"]
a10_username = module.params["a10_username"]
a10_password = module.params["a10_password"]
# TODO(remove hardcoded port #)
a10_port = 443
a10_protocol = "https"
valid, validation_errors = validate(module.params)
map(run_errors.append, validation_errors)
if not valid:
result["messages"] = "Validation failure"
err_msg = "\n".join(run_errors)
module.fail_json(msg=err_msg, **result)
module.client = client_factory(a10_host, a10_port, a10_protocol, a10_username, a10_password)
if state == 'present':
result = present(module, result)
elif state == 'absent':
result = absent(module, result)
return result
def main():
module = AnsibleModule(argument_spec=get_argspec())
result = run_command(module)
module.exit_json(**result)
# standard ansible module imports
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
0a41178fc20f8607e3279660130613dca66b6169 | 4a22da169b96b9d8d4cf30c5ea7e167cdbd1e802 | /cluster_uva/simulation35xu_rivanna/simulation35xu_rivanna_8.py | b78fb63ba51f2061a43dfc5f5f6aae48c9fbb2a9 | [] | no_license | LiYan1988/simulationsYuxin | 543fbeaac362f71513c71ceb1d1c300cabf04173 | 5997cecb5d772194900feddd07d7c5001c39b037 | refs/heads/master | 2021-01-19T13:50:15.968470 | 2017-03-17T21:52:33 | 2017-03-17T21:52:33 | 82,420,737 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,033 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Feb 25 22:06:02 2017
@author: misWin
This is a template of python files for simulation 3 and 5 with Xu's algorithem on hebbe
"""
from milp2_xu import *
np.random.seed(0)
batch_id = 8
network_cost = pd.read_csv('nsf-24nodes.csv', header=None, index_col=None)
network_cost = network_cost.as_matrix()
sn = Network(network_cost, modulation='bpsk')
demands_file = 'simulation35xu_rivanna_8.csv'
demands = pd.read_csv(demands_file)
iteration_history_tr, iteration_history_gn = \
sn.iterate(demands, random_state=0, mipstart=True, mipfocus=1,
method=-1, mipgap=0.001)
# gurobi model instances cannot be save by pickle
#models_gn = {}
#models_tr = {}
#for i in iteration_history_gn.keys():
# models_gn[i] = iteration_history_gn[i].pop('model', None)
# models_tr[i] = iteration_history_tr[i].pop('model', None)
iteration_history = (iteration_history_tr, iteration_history_gn)
output_file = 'simulation35xu_rivanna_8.pkl'
save_data(output_file, iteration_history) | [
"[email protected]"
] | |
22da5ff4314cfc620cb8d225b70570619873fe70 | efb7180c05964aee07756dbd4f9982f81559d7e3 | /TradeBot/tradebotapp/migrations/0002_auto_20191005_1543.py | 49018395ae8bfb3525d48159fd3e3be4939ec2eb | [] | no_license | ShunnoSaiful/Trade-Bot | 920ba75225d921f54530fc9f0d10a8eb9eabdaaf | d07489dea5fcf1d1d51a918a3127f620682107f2 | refs/heads/master | 2022-11-24T08:22:00.946773 | 2019-10-29T05:20:08 | 2019-10-29T05:20:08 | 218,207,062 | 0 | 0 | null | 2022-11-22T04:18:04 | 2019-10-29T04:54:41 | JavaScript | UTF-8 | Python | false | false | 673 | py | # Generated by Django 2.1 on 2019-10-05 15:43
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('tradebotapp', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='question',
name='section',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='tradebotapp.Section'),
preserve_default=False,
),
migrations.AlterField(
model_name='question',
name='question',
field=models.CharField(max_length=500),
),
]
| [
"[email protected]"
] | |
298c589f469654840e1fabc25a5a868de853909a | d2304891c5757e0cdb393d95b0c3d3495f6fbf76 | /fourier.py | 3ae0c47f18598a85df9bd0a14022cc42226f6cb0 | [] | no_license | phisyche/Python | 8b555e540f5aeb243434a6f3eceec3ee3835d288 | f8897f008723821fdc8a1d9a3bdf462d2c42e49c | refs/heads/master | 2023-03-15T04:49:51.586572 | 2021-03-26T13:45:08 | 2021-03-26T13:45:08 | 52,379,347 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,062 | py | import scipy
import matplotlib . pyplot as plt
pi = scipy .pi
signal_length = 0.5 #[ seconds ]
sample_rate =500 # sampling rate [Hz]
dt = 1./ sample_rate # time between two samples [s]
df = 1/ signal_length # frequency between points in
#in frequency domain [Hz]
t= scipy . arange (0, signal_length ,dt) #the time vector
n_t=len(t) # length of time vector
# create signal
y= scipy .sin (2* pi *50* t)+ scipy .sin (2* pi *70* t+pi /4)
# compute fourier transform
f= scipy .fft(y)
# work out meaningful frequencies in fourier transform
freqs =df* scipy . arange (0 ,( n_t -1)/2. , dtype ='d') #d= double precision float
n_freq = len ( freqs )
# plot input data y against time
plt. subplot (2 ,1 ,1)
plt. plot (t,y, label ='input data ')
plt. xlabel ('time [s]')
plt. ylabel ('signal ')
# plot frequency spectrum
plt. subplot (2 ,1 ,2)
plt. plot (freqs ,abs(f[0: n_freq ]),
label ='abs( fourier transform )')
plt. xlabel ('frequency [Hz]')
plt. ylabel ('abs(DFT( signal )) ')
# save plot to disk
plt. savefig ('fft1 .pdf ')
plt. show () #and display plot on screen
| [
"[email protected]"
] | |
de73271a124412f46be37471ac8f3093ebe85e83 | eeb593a5351c8a17b5ab35434bead398498d3b7e | /server/echo.py | 58675d43535c2cb401d50a4b7f9b88c26f4ee2c6 | [] | no_license | smit1998/slacker-backend | fe16b4e0fe3e8b67a46508d238d2353535078b01 | 1217864bcb7f396106947f87460a2efc3cf8ffac | refs/heads/master | 2023-02-09T14:47:04.556172 | 2019-11-17T08:54:28 | 2019-11-17T08:54:28 | 288,636,916 | 0 | 0 | null | 2021-01-06T09:04:54 | 2020-08-19T04:55:20 | JavaScript | UTF-8 | Python | false | false | 36 | py | def echo(value):
return value
| [
"[email protected]"
] | |
3872da51193af30d3efc08de4509cb2da1008df4 | 498474967e1480acf5cc0f25756e1d748c677195 | /mmdetection3d/mmdet3d/core/evaluation/kitti_utils/rotate_iou.py | 2f0c9c8e50667a99b84e6537c4b7ab8922a1d7a2 | [
"MIT",
"Apache-2.0"
] | permissive | hustvl/MapTR | adc37f78cbae9d8c909dd8648088a4930bf55377 | feb0664e64684d3207859279f047fa54a1a806f6 | refs/heads/main | 2023-08-25T17:44:47.672149 | 2023-08-14T13:31:17 | 2023-08-14T13:31:17 | 518,672,305 | 643 | 95 | MIT | 2023-09-14T03:30:23 | 2022-07-28T02:20:43 | Python | UTF-8 | Python | false | false | 13,315 | py | # Copyright (c) OpenMMLab. All rights reserved.
#####################
# Based on https://github.com/hongzhenwang/RRPN-revise
# Licensed under The MIT License
# Author: yanyan, [email protected]
#####################
import math
import numba
import numpy as np
from numba import cuda
@numba.jit(nopython=True)
def div_up(m, n):
return m // n + (m % n > 0)
@cuda.jit('(float32[:], float32[:], float32[:])', device=True, inline=True)
def trangle_area(a, b, c):
return ((a[0] - c[0]) * (b[1] - c[1]) - (a[1] - c[1]) *
(b[0] - c[0])) / 2.0
@cuda.jit('(float32[:], int32)', device=True, inline=True)
def area(int_pts, num_of_inter):
area_val = 0.0
for i in range(num_of_inter - 2):
area_val += abs(
trangle_area(int_pts[:2], int_pts[2 * i + 2:2 * i + 4],
int_pts[2 * i + 4:2 * i + 6]))
return area_val
@cuda.jit('(float32[:], int32)', device=True, inline=True)
def sort_vertex_in_convex_polygon(int_pts, num_of_inter):
if num_of_inter > 0:
center = cuda.local.array((2, ), dtype=numba.float32)
center[:] = 0.0
for i in range(num_of_inter):
center[0] += int_pts[2 * i]
center[1] += int_pts[2 * i + 1]
center[0] /= num_of_inter
center[1] /= num_of_inter
v = cuda.local.array((2, ), dtype=numba.float32)
vs = cuda.local.array((16, ), dtype=numba.float32)
for i in range(num_of_inter):
v[0] = int_pts[2 * i] - center[0]
v[1] = int_pts[2 * i + 1] - center[1]
d = math.sqrt(v[0] * v[0] + v[1] * v[1])
v[0] = v[0] / d
v[1] = v[1] / d
if v[1] < 0:
v[0] = -2 - v[0]
vs[i] = v[0]
j = 0
temp = 0
for i in range(1, num_of_inter):
if vs[i - 1] > vs[i]:
temp = vs[i]
tx = int_pts[2 * i]
ty = int_pts[2 * i + 1]
j = i
while j > 0 and vs[j - 1] > temp:
vs[j] = vs[j - 1]
int_pts[j * 2] = int_pts[j * 2 - 2]
int_pts[j * 2 + 1] = int_pts[j * 2 - 1]
j -= 1
vs[j] = temp
int_pts[j * 2] = tx
int_pts[j * 2 + 1] = ty
@cuda.jit(
'(float32[:], float32[:], int32, int32, float32[:])',
device=True,
inline=True)
def line_segment_intersection(pts1, pts2, i, j, temp_pts):
A = cuda.local.array((2, ), dtype=numba.float32)
B = cuda.local.array((2, ), dtype=numba.float32)
C = cuda.local.array((2, ), dtype=numba.float32)
D = cuda.local.array((2, ), dtype=numba.float32)
A[0] = pts1[2 * i]
A[1] = pts1[2 * i + 1]
B[0] = pts1[2 * ((i + 1) % 4)]
B[1] = pts1[2 * ((i + 1) % 4) + 1]
C[0] = pts2[2 * j]
C[1] = pts2[2 * j + 1]
D[0] = pts2[2 * ((j + 1) % 4)]
D[1] = pts2[2 * ((j + 1) % 4) + 1]
BA0 = B[0] - A[0]
BA1 = B[1] - A[1]
DA0 = D[0] - A[0]
CA0 = C[0] - A[0]
DA1 = D[1] - A[1]
CA1 = C[1] - A[1]
acd = DA1 * CA0 > CA1 * DA0
bcd = (D[1] - B[1]) * (C[0] - B[0]) > (C[1] - B[1]) * (D[0] - B[0])
if acd != bcd:
abc = CA1 * BA0 > BA1 * CA0
abd = DA1 * BA0 > BA1 * DA0
if abc != abd:
DC0 = D[0] - C[0]
DC1 = D[1] - C[1]
ABBA = A[0] * B[1] - B[0] * A[1]
CDDC = C[0] * D[1] - D[0] * C[1]
DH = BA1 * DC0 - BA0 * DC1
Dx = ABBA * DC0 - BA0 * CDDC
Dy = ABBA * DC1 - BA1 * CDDC
temp_pts[0] = Dx / DH
temp_pts[1] = Dy / DH
return True
return False
@cuda.jit(
'(float32[:], float32[:], int32, int32, float32[:])',
device=True,
inline=True)
def line_segment_intersection_v1(pts1, pts2, i, j, temp_pts):
a = cuda.local.array((2, ), dtype=numba.float32)
b = cuda.local.array((2, ), dtype=numba.float32)
c = cuda.local.array((2, ), dtype=numba.float32)
d = cuda.local.array((2, ), dtype=numba.float32)
a[0] = pts1[2 * i]
a[1] = pts1[2 * i + 1]
b[0] = pts1[2 * ((i + 1) % 4)]
b[1] = pts1[2 * ((i + 1) % 4) + 1]
c[0] = pts2[2 * j]
c[1] = pts2[2 * j + 1]
d[0] = pts2[2 * ((j + 1) % 4)]
d[1] = pts2[2 * ((j + 1) % 4) + 1]
area_abc = trangle_area(a, b, c)
area_abd = trangle_area(a, b, d)
if area_abc * area_abd >= 0:
return False
area_cda = trangle_area(c, d, a)
area_cdb = area_cda + area_abc - area_abd
if area_cda * area_cdb >= 0:
return False
t = area_cda / (area_abd - area_abc)
dx = t * (b[0] - a[0])
dy = t * (b[1] - a[1])
temp_pts[0] = a[0] + dx
temp_pts[1] = a[1] + dy
return True
@cuda.jit('(float32, float32, float32[:])', device=True, inline=True)
def point_in_quadrilateral(pt_x, pt_y, corners):
ab0 = corners[2] - corners[0]
ab1 = corners[3] - corners[1]
ad0 = corners[6] - corners[0]
ad1 = corners[7] - corners[1]
ap0 = pt_x - corners[0]
ap1 = pt_y - corners[1]
abab = ab0 * ab0 + ab1 * ab1
abap = ab0 * ap0 + ab1 * ap1
adad = ad0 * ad0 + ad1 * ad1
adap = ad0 * ap0 + ad1 * ap1
return abab >= abap and abap >= 0 and adad >= adap and adap >= 0
@cuda.jit('(float32[:], float32[:], float32[:])', device=True, inline=True)
def quadrilateral_intersection(pts1, pts2, int_pts):
num_of_inter = 0
for i in range(4):
if point_in_quadrilateral(pts1[2 * i], pts1[2 * i + 1], pts2):
int_pts[num_of_inter * 2] = pts1[2 * i]
int_pts[num_of_inter * 2 + 1] = pts1[2 * i + 1]
num_of_inter += 1
if point_in_quadrilateral(pts2[2 * i], pts2[2 * i + 1], pts1):
int_pts[num_of_inter * 2] = pts2[2 * i]
int_pts[num_of_inter * 2 + 1] = pts2[2 * i + 1]
num_of_inter += 1
temp_pts = cuda.local.array((2, ), dtype=numba.float32)
for i in range(4):
for j in range(4):
has_pts = line_segment_intersection(pts1, pts2, i, j, temp_pts)
if has_pts:
int_pts[num_of_inter * 2] = temp_pts[0]
int_pts[num_of_inter * 2 + 1] = temp_pts[1]
num_of_inter += 1
return num_of_inter
@cuda.jit('(float32[:], float32[:])', device=True, inline=True)
def rbbox_to_corners(corners, rbbox):
# generate clockwise corners and rotate it clockwise
angle = rbbox[4]
a_cos = math.cos(angle)
a_sin = math.sin(angle)
center_x = rbbox[0]
center_y = rbbox[1]
x_d = rbbox[2]
y_d = rbbox[3]
corners_x = cuda.local.array((4, ), dtype=numba.float32)
corners_y = cuda.local.array((4, ), dtype=numba.float32)
corners_x[0] = -x_d / 2
corners_x[1] = -x_d / 2
corners_x[2] = x_d / 2
corners_x[3] = x_d / 2
corners_y[0] = -y_d / 2
corners_y[1] = y_d / 2
corners_y[2] = y_d / 2
corners_y[3] = -y_d / 2
for i in range(4):
corners[2 * i] = a_cos * corners_x[i] + a_sin * corners_y[i] + center_x
corners[2 * i +
1] = -a_sin * corners_x[i] + a_cos * corners_y[i] + center_y
@cuda.jit('(float32[:], float32[:])', device=True, inline=True)
def inter(rbbox1, rbbox2):
"""Compute intersection of two rotated boxes.
Args:
rbox1 (np.ndarray, shape=[5]): Rotated 2d box.
rbox2 (np.ndarray, shape=[5]): Rotated 2d box.
Returns:
float: Intersection of two rotated boxes.
"""
corners1 = cuda.local.array((8, ), dtype=numba.float32)
corners2 = cuda.local.array((8, ), dtype=numba.float32)
intersection_corners = cuda.local.array((16, ), dtype=numba.float32)
rbbox_to_corners(corners1, rbbox1)
rbbox_to_corners(corners2, rbbox2)
num_intersection = quadrilateral_intersection(corners1, corners2,
intersection_corners)
sort_vertex_in_convex_polygon(intersection_corners, num_intersection)
# print(intersection_corners.reshape([-1, 2])[:num_intersection])
return area(intersection_corners, num_intersection)
@cuda.jit('(float32[:], float32[:], int32)', device=True, inline=True)
def devRotateIoUEval(rbox1, rbox2, criterion=-1):
"""Compute rotated iou on device.
Args:
rbox1 (np.ndarray, shape=[5]): Rotated 2d box.
rbox2 (np.ndarray, shape=[5]): Rotated 2d box.
criterion (int, optional): Indicate different type of iou.
-1 indicate `area_inter / (area1 + area2 - area_inter)`,
0 indicate `area_inter / area1`,
1 indicate `area_inter / area2`.
Returns:
float: iou between two input boxes.
"""
area1 = rbox1[2] * rbox1[3]
area2 = rbox2[2] * rbox2[3]
area_inter = inter(rbox1, rbox2)
if criterion == -1:
return area_inter / (area1 + area2 - area_inter)
elif criterion == 0:
return area_inter / area1
elif criterion == 1:
return area_inter / area2
else:
return area_inter
@cuda.jit(
'(int64, int64, float32[:], float32[:], float32[:], int32)',
fastmath=False)
def rotate_iou_kernel_eval(N,
K,
dev_boxes,
dev_query_boxes,
dev_iou,
criterion=-1):
"""Kernel of computing rotated iou.
Args:
N (int): The number of boxes.
K (int): The number of query boxes.
dev_boxes (np.ndarray): Boxes on device.
dev_query_boxes (np.ndarray): Query boxes on device.
dev_iou (np.ndarray): Computed iou to return.
criterion (int, optional): Indicate different type of iou.
-1 indicate `area_inter / (area1 + area2 - area_inter)`,
0 indicate `area_inter / area1`,
1 indicate `area_inter / area2`.
"""
threadsPerBlock = 8 * 8
row_start = cuda.blockIdx.x
col_start = cuda.blockIdx.y
tx = cuda.threadIdx.x
row_size = min(N - row_start * threadsPerBlock, threadsPerBlock)
col_size = min(K - col_start * threadsPerBlock, threadsPerBlock)
block_boxes = cuda.shared.array(shape=(64 * 5, ), dtype=numba.float32)
block_qboxes = cuda.shared.array(shape=(64 * 5, ), dtype=numba.float32)
dev_query_box_idx = threadsPerBlock * col_start + tx
dev_box_idx = threadsPerBlock * row_start + tx
if (tx < col_size):
block_qboxes[tx * 5 + 0] = dev_query_boxes[dev_query_box_idx * 5 + 0]
block_qboxes[tx * 5 + 1] = dev_query_boxes[dev_query_box_idx * 5 + 1]
block_qboxes[tx * 5 + 2] = dev_query_boxes[dev_query_box_idx * 5 + 2]
block_qboxes[tx * 5 + 3] = dev_query_boxes[dev_query_box_idx * 5 + 3]
block_qboxes[tx * 5 + 4] = dev_query_boxes[dev_query_box_idx * 5 + 4]
if (tx < row_size):
block_boxes[tx * 5 + 0] = dev_boxes[dev_box_idx * 5 + 0]
block_boxes[tx * 5 + 1] = dev_boxes[dev_box_idx * 5 + 1]
block_boxes[tx * 5 + 2] = dev_boxes[dev_box_idx * 5 + 2]
block_boxes[tx * 5 + 3] = dev_boxes[dev_box_idx * 5 + 3]
block_boxes[tx * 5 + 4] = dev_boxes[dev_box_idx * 5 + 4]
cuda.syncthreads()
if tx < row_size:
for i in range(col_size):
offset = (
row_start * threadsPerBlock * K + col_start * threadsPerBlock +
tx * K + i)
dev_iou[offset] = devRotateIoUEval(block_qboxes[i * 5:i * 5 + 5],
block_boxes[tx * 5:tx * 5 + 5],
criterion)
def rotate_iou_gpu_eval(boxes, query_boxes, criterion=-1, device_id=0):
"""Rotated box iou running in gpu. 500x faster than cpu version (take 5ms
in one example with numba.cuda code). convert from [this project](
https://github.com/hongzhenwang/RRPN-revise/tree/master/lib/rotation).
Args:
boxes (torch.Tensor): rbboxes. format: centers, dims,
angles(clockwise when positive) with the shape of [N, 5].
query_boxes (float tensor: [K, 5]): rbboxes to compute iou with boxes.
device_id (int, optional): Defaults to 0. Device to use.
criterion (int, optional): Indicate different type of iou.
-1 indicate `area_inter / (area1 + area2 - area_inter)`,
0 indicate `area_inter / area1`,
1 indicate `area_inter / area2`.
Returns:
np.ndarray: IoU results.
"""
boxes = boxes.astype(np.float32)
query_boxes = query_boxes.astype(np.float32)
N = boxes.shape[0]
K = query_boxes.shape[0]
iou = np.zeros((N, K), dtype=np.float32)
if N == 0 or K == 0:
return iou
threadsPerBlock = 8 * 8
cuda.select_device(device_id)
blockspergrid = (div_up(N, threadsPerBlock), div_up(K, threadsPerBlock))
stream = cuda.stream()
with stream.auto_synchronize():
boxes_dev = cuda.to_device(boxes.reshape([-1]), stream)
query_boxes_dev = cuda.to_device(query_boxes.reshape([-1]), stream)
iou_dev = cuda.to_device(iou.reshape([-1]), stream)
rotate_iou_kernel_eval[blockspergrid, threadsPerBlock,
stream](N, K, boxes_dev, query_boxes_dev,
iou_dev, criterion)
iou_dev.copy_to_host(iou.reshape([-1]), stream=stream)
return iou.astype(boxes.dtype)
| [
"[email protected]"
] | |
5de6389a40c5f6a48f4640a32173b7d6b8948184 | 1bf9f6b0ef85b6ccad8cb029703f89039f74cedc | /src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2019_12_12/_blob_service_client.py | f68a680091129a0af9ab03d85046622ce1f173e5 | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | VSChina/azure-cli-extensions | a1f4bf2ea4dc1b507618617e299263ad45213add | 10b7bfef62cb080c74b1d59aadc4286bd9406841 | refs/heads/master | 2022-11-14T03:40:26.009692 | 2022-11-09T01:09:53 | 2022-11-09T01:09:53 | 199,810,654 | 4 | 2 | MIT | 2020-07-13T05:51:27 | 2019-07-31T08:10:50 | Python | UTF-8 | Python | false | false | 32,292 | py | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import functools
from typing import ( # pylint: disable=unused-import
Union, Optional, Any, Iterable, Dict, List,
TYPE_CHECKING
)
try:
from urllib.parse import urlparse
except ImportError:
from urlparse import urlparse # type: ignore
from azure.core.paging import ItemPaged
from azure.core.pipeline import Pipeline
from azure.core.tracing.decorator import distributed_trace
from ._shared.models import LocationMode
from ._shared.base_client import StorageAccountHostsMixin, TransportWrapper, parse_connection_str, parse_query
from ._shared.parser import _to_utc_datetime
from ._shared.response_handlers import return_response_headers, process_storage_error, \
parse_to_internal_user_delegation_key
from ._generated import AzureBlobStorage, VERSION
from ._generated.models import StorageErrorException, StorageServiceProperties, KeyInfo
from ._container_client import ContainerClient
from ._blob_client import BlobClient
from ._models import ContainerPropertiesPaged, FilteredBlobPaged
from ._serialize import get_api_version
from ._deserialize import service_stats_deserialize, service_properties_deserialize
if TYPE_CHECKING:
from datetime import datetime
from azure.core.pipeline.transport import HttpTransport
from azure.core.pipeline.policies import HTTPPolicy
from ._shared.models import UserDelegationKey
from ._lease import BlobLeaseClient
from ._models import (
ContainerProperties,
BlobProperties,
PublicAccess,
BlobAnalyticsLogging,
Metrics,
CorsRule,
RetentionPolicy,
StaticWebsite,
)
class BlobServiceClient(StorageAccountHostsMixin):
"""A client to interact with the Blob Service at the account level.
This client provides operations to retrieve and configure the account properties
as well as list, create and delete containers within the account.
For operations relating to a specific container or blob, clients for those entities
can also be retrieved using the `get_client` functions.
:param str account_url:
The URL to the blob storage account. Any other entities included
in the URL path (e.g. container or blob) will be discarded. This URL can be optionally
authenticated with a SAS token.
:param credential:
The credentials with which to authenticate. This is optional if the
account URL already has a SAS token. The value can be a SAS token string, an account
shared access key, or an instance of a TokenCredentials class from azure.identity.
If the URL already has a SAS token, specifying an explicit credential will take priority.
:keyword str api_version:
The Storage API version to use for requests. Default value is '2019-07-07'.
Setting to an older version may result in reduced feature compatibility.
.. versionadded:: 12.2.0
:keyword str secondary_hostname:
The hostname of the secondary endpoint.
:keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks.
Defaults to 4*1024*1024, or 4MB.
:keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be
uploaded with only one http PUT request. If the blob size is larger than max_single_put_size,
the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB.
:keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient
algorithm when uploading a block blob. Defaults to 4*1024*1024+1.
:keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False.
:keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB.
:keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call,
the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB.
:keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024,
or 4MB.
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_authentication.py
:start-after: [START create_blob_service_client]
:end-before: [END create_blob_service_client]
:language: python
:dedent: 8
:caption: Creating the BlobServiceClient with account url and credential.
.. literalinclude:: ../samples/blob_samples_authentication.py
:start-after: [START create_blob_service_client_oauth]
:end-before: [END create_blob_service_client_oauth]
:language: python
:dedent: 8
:caption: Creating the BlobServiceClient with Azure Identity credentials.
"""
def __init__(
self, account_url, # type: str
credential=None, # type: Optional[Any]
**kwargs # type: Any
):
# type: (...) -> None
try:
if not account_url.lower().startswith('http'):
account_url = "https://" + account_url
except AttributeError:
raise ValueError("Account URL must be a string.")
parsed_url = urlparse(account_url.rstrip('/'))
if not parsed_url.netloc:
raise ValueError("Invalid URL: {}".format(account_url))
_, sas_token = parse_query(parsed_url.query)
self._query_str, credential = self._format_query_string(sas_token, credential)
super(BlobServiceClient, self).__init__(parsed_url, service='blob', credential=credential, **kwargs)
self._client = AzureBlobStorage(self.url, pipeline=self._pipeline)
self._client._config.version = get_api_version(kwargs, VERSION) # pylint: disable=protected-access
def _format_url(self, hostname):
"""Format the endpoint URL according to the current location
mode hostname.
"""
return "{}://{}/{}".format(self.scheme, hostname, self._query_str)
@classmethod
def from_connection_string(
cls, conn_str, # type: str
credential=None, # type: Optional[Any]
**kwargs # type: Any
): # type: (...) -> BlobServiceClient
"""Create BlobServiceClient from a Connection String.
:param str conn_str:
A connection string to an Azure Storage account.
:param credential:
The credentials with which to authenticate. This is optional if the
account URL already has a SAS token, or the connection string already has shared
access key values. The value can be a SAS token string, an account shared access
key, or an instance of a TokenCredentials class from azure.identity.
Credentials provided here will take precedence over those in the connection string.
:returns: A Blob service client.
:rtype: ~azure.storage.blob.BlobServiceClient
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_authentication.py
:start-after: [START auth_from_connection_string]
:end-before: [END auth_from_connection_string]
:language: python
:dedent: 8
:caption: Creating the BlobServiceClient from a connection string.
"""
account_url, secondary, credential = parse_connection_str(conn_str, credential, 'blob')
if 'secondary_hostname' not in kwargs:
kwargs['secondary_hostname'] = secondary
return cls(account_url, credential=credential, **kwargs)
@distributed_trace
def get_user_delegation_key(self, key_start_time, # type: datetime
key_expiry_time, # type: datetime
**kwargs # type: Any
):
# type: (...) -> UserDelegationKey
"""
Obtain a user delegation key for the purpose of signing SAS tokens.
A token credential must be present on the service object for this request to succeed.
:param ~datetime.datetime key_start_time:
A DateTime value. Indicates when the key becomes valid.
:param ~datetime.datetime key_expiry_time:
A DateTime value. Indicates when the key stops being valid.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:return: The user delegation key.
:rtype: ~azure.storage.blob.UserDelegationKey
"""
key_info = KeyInfo(start=_to_utc_datetime(key_start_time), expiry=_to_utc_datetime(key_expiry_time))
timeout = kwargs.pop('timeout', None)
try:
user_delegation_key = self._client.service.get_user_delegation_key(key_info=key_info,
timeout=timeout,
**kwargs) # type: ignore
except StorageErrorException as error:
process_storage_error(error)
return parse_to_internal_user_delegation_key(user_delegation_key) # type: ignore
@distributed_trace
def get_account_information(self, **kwargs):
# type: (Any) -> Dict[str, str]
"""Gets information related to the storage account.
The information can also be retrieved if the user has a SAS to a container or blob.
The keys in the returned dictionary include 'sku_name' and 'account_kind'.
:returns: A dict of account information (SKU and account type).
:rtype: dict(str, str)
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_service.py
:start-after: [START get_blob_service_account_info]
:end-before: [END get_blob_service_account_info]
:language: python
:dedent: 8
:caption: Getting account information for the blob service.
"""
try:
return self._client.service.get_account_info(cls=return_response_headers, **kwargs) # type: ignore
except StorageErrorException as error:
process_storage_error(error)
@distributed_trace
def get_service_stats(self, **kwargs):
# type: (**Any) -> Dict[str, Any]
"""Retrieves statistics related to replication for the Blob service.
It is only available when read-access geo-redundant replication is enabled for
the storage account.
With geo-redundant replication, Azure Storage maintains your data durable
in two locations. In both locations, Azure Storage constantly maintains
multiple healthy replicas of your data. The location where you read,
create, update, or delete data is the primary storage account location.
The primary location exists in the region you choose at the time you
create an account via the Azure Management Azure classic portal, for
example, North Central US. The location to which your data is replicated
is the secondary location. The secondary location is automatically
determined based on the location of the primary; it is in a second data
center that resides in the same region as the primary location. Read-only
access is available from the secondary location, if read-access geo-redundant
replication is enabled for your storage account.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:return: The blob service stats.
:rtype: Dict[str, Any]
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_service.py
:start-after: [START get_blob_service_stats]
:end-before: [END get_blob_service_stats]
:language: python
:dedent: 8
:caption: Getting service stats for the blob service.
"""
timeout = kwargs.pop('timeout', None)
try:
stats = self._client.service.get_statistics( # type: ignore
timeout=timeout, use_location=LocationMode.SECONDARY, **kwargs)
return service_stats_deserialize(stats)
except StorageErrorException as error:
process_storage_error(error)
@distributed_trace
def get_service_properties(self, **kwargs):
# type: (Any) -> Dict[str, Any]
"""Gets the properties of a storage account's Blob service, including
Azure Storage Analytics.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:returns: An object containing blob service properties such as
analytics logging, hour/minute metrics, cors rules, etc.
:rtype: Dict[str, Any]
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_service.py
:start-after: [START get_blob_service_properties]
:end-before: [END get_blob_service_properties]
:language: python
:dedent: 8
:caption: Getting service properties for the blob service.
"""
timeout = kwargs.pop('timeout', None)
try:
service_props = self._client.service.get_properties(timeout=timeout, **kwargs)
return service_properties_deserialize(service_props)
except StorageErrorException as error:
process_storage_error(error)
@distributed_trace
def set_service_properties(
self, analytics_logging=None, # type: Optional[BlobAnalyticsLogging]
hour_metrics=None, # type: Optional[Metrics]
minute_metrics=None, # type: Optional[Metrics]
cors=None, # type: Optional[List[CorsRule]]
target_version=None, # type: Optional[str]
delete_retention_policy=None, # type: Optional[RetentionPolicy]
static_website=None, # type: Optional[StaticWebsite]
**kwargs
):
# type: (...) -> None
"""Sets the properties of a storage account's Blob service, including
Azure Storage Analytics.
If an element (e.g. analytics_logging) is left as None, the
existing settings on the service for that functionality are preserved.
:param analytics_logging:
Groups the Azure Analytics Logging settings.
:type analytics_logging: ~azure.storage.blob.BlobAnalyticsLogging
:param hour_metrics:
The hour metrics settings provide a summary of request
statistics grouped by API in hourly aggregates for blobs.
:type hour_metrics: ~azure.storage.blob.Metrics
:param minute_metrics:
The minute metrics settings provide request statistics
for each minute for blobs.
:type minute_metrics: ~azure.storage.blob.Metrics
:param cors:
You can include up to five CorsRule elements in the
list. If an empty list is specified, all CORS rules will be deleted,
and CORS will be disabled for the service.
:type cors: list[~azure.storage.blob.CorsRule]
:param str target_version:
Indicates the default version to use for requests if an incoming
request's version is not specified.
:param delete_retention_policy:
The delete retention policy specifies whether to retain deleted blobs.
It also specifies the number of days and versions of blob to keep.
:type delete_retention_policy: ~azure.storage.blob.RetentionPolicy
:param static_website:
Specifies whether the static website feature is enabled,
and if yes, indicates the index document and 404 error document to use.
:type static_website: ~azure.storage.blob.StaticWebsite
:keyword int timeout:
The timeout parameter is expressed in seconds.
:rtype: None
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_service.py
:start-after: [START set_blob_service_properties]
:end-before: [END set_blob_service_properties]
:language: python
:dedent: 8
:caption: Setting service properties for the blob service.
"""
props = StorageServiceProperties(
logging=analytics_logging,
hour_metrics=hour_metrics,
minute_metrics=minute_metrics,
cors=cors,
default_service_version=target_version,
delete_retention_policy=delete_retention_policy,
static_website=static_website
)
timeout = kwargs.pop('timeout', None)
try:
self._client.service.set_properties(props, timeout=timeout, **kwargs)
except StorageErrorException as error:
process_storage_error(error)
@distributed_trace
def list_containers(
self, name_starts_with=None, # type: Optional[str]
include_metadata=False, # type: Optional[bool]
**kwargs
):
# type: (...) -> ItemPaged[ContainerProperties]
"""Returns a generator to list the containers under the specified account.
The generator will lazily follow the continuation tokens returned by
the service and stop when all containers have been returned.
:param str name_starts_with:
Filters the results to return only containers whose names
begin with the specified prefix.
:param bool include_metadata:
Specifies that container metadata to be returned in the response.
The default value is `False`.
:keyword bool include_deleted:
Specifies that deleted containers to be returned in the response. This is for container restore enabled
account. The default value is `False`.
.. versionadded:: 12.4.0
:keyword int results_per_page:
The maximum number of container names to retrieve per API
call. If the request does not specify the server will return up to 5,000 items.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:returns: An iterable (auto-paging) of ContainerProperties.
:rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.ContainerProperties]
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_service.py
:start-after: [START bsc_list_containers]
:end-before: [END bsc_list_containers]
:language: python
:dedent: 12
:caption: Listing the containers in the blob service.
"""
include = ['metadata'] if include_metadata else []
include_deleted = kwargs.pop('include_deleted', None)
if include_deleted:
include.append("deleted")
timeout = kwargs.pop('timeout', None)
results_per_page = kwargs.pop('results_per_page', None)
command = functools.partial(
self._client.service.list_containers_segment,
prefix=name_starts_with,
include=include,
timeout=timeout,
**kwargs)
return ItemPaged(
command,
prefix=name_starts_with,
results_per_page=results_per_page,
page_iterator_class=ContainerPropertiesPaged
)
@distributed_trace
def find_blobs_by_tags(self, filter_expression, **kwargs):
# type: (str, **Any) -> ItemPaged[FilteredBlob]
"""The Filter Blobs operation enables callers to list blobs across all
containers whose tags match a given search expression. Filter blobs
searches across all containers within a storage account but can be
scoped within the expression to a single container.
:param str filter_expression:
The expression to find blobs whose tags matches the specified condition.
eg. "\"yourtagname\"='firsttag' and \"yourtagname2\"='secondtag'"
To specify a container, eg. "@container='containerName' and \"Name\"='C'"
:keyword int results_per_page:
The max result per page when paginating.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:returns: An iterable (auto-paging) response of BlobProperties.
:rtype: ~azure.core.paging.ItemPaged[~azure.storage.blob.FilteredBlob]
"""
results_per_page = kwargs.pop('results_per_page', None)
timeout = kwargs.pop('timeout', None)
command = functools.partial(
self._client.service.filter_blobs,
where=filter_expression,
timeout=timeout,
**kwargs)
return ItemPaged(
command, results_per_page=results_per_page,
page_iterator_class=FilteredBlobPaged)
@distributed_trace
def create_container(
self, name, # type: str
metadata=None, # type: Optional[Dict[str, str]]
public_access=None, # type: Optional[Union[PublicAccess, str]]
**kwargs
):
# type: (...) -> ContainerClient
"""Creates a new container under the specified account.
If the container with the same name already exists, a ResourceExistsError will
be raised. This method returns a client with which to interact with the newly
created container.
:param str name: The name of the container to create.
:param metadata:
A dict with name-value pairs to associate with the
container as metadata. Example: `{'Category':'test'}`
:type metadata: dict(str, str)
:param public_access:
Possible values include: 'container', 'blob'.
:type public_access: str or ~azure.storage.blob.PublicAccess
:keyword container_encryption_scope:
Specifies the default encryption scope to set on the container and use for
all future writes.
.. versionadded:: 12.2.0
:paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope
:keyword int timeout:
The timeout parameter is expressed in seconds.
:rtype: ~azure.storage.blob.ContainerClient
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_service.py
:start-after: [START bsc_create_container]
:end-before: [END bsc_create_container]
:language: python
:dedent: 12
:caption: Creating a container in the blob service.
"""
container = self.get_container_client(name)
kwargs.setdefault('merge_span', True)
timeout = kwargs.pop('timeout', None)
container.create_container(
metadata=metadata, public_access=public_access, timeout=timeout, **kwargs)
return container
@distributed_trace
def delete_container(
self, container, # type: Union[ContainerProperties, str]
lease=None, # type: Optional[Union[BlobLeaseClient, str]]
**kwargs
):
# type: (...) -> None
"""Marks the specified container for deletion.
The container and any blobs contained within it are later deleted during garbage collection.
If the container is not found, a ResourceNotFoundError will be raised.
:param container:
The container to delete. This can either be the name of the container,
or an instance of ContainerProperties.
:type container: str or ~azure.storage.blob.ContainerProperties
:param lease:
If specified, delete_container only succeeds if the
container's lease is active and matches this ID.
Required if the container has an active lease.
:paramtype lease: ~azure.storage.blob.BlobLeaseClient or str
:keyword ~datetime.datetime if_modified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only
if the resource has been modified since the specified time.
:keyword ~datetime.datetime if_unmodified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only if
the resource has not been modified since the specified date/time.
:keyword str etag:
An ETag value, or the wildcard character (*). Used to check if the resource has changed,
and act according to the condition specified by the `match_condition` parameter.
:keyword ~azure.core.MatchConditions match_condition:
The match condition to use upon the etag.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:rtype: None
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_service.py
:start-after: [START bsc_delete_container]
:end-before: [END bsc_delete_container]
:language: python
:dedent: 12
:caption: Deleting a container in the blob service.
"""
container = self.get_container_client(container) # type: ignore
kwargs.setdefault('merge_span', True)
timeout = kwargs.pop('timeout', None)
container.delete_container( # type: ignore
lease=lease,
timeout=timeout,
**kwargs)
@distributed_trace
def undelete_container(self, deleted_container_name, deleted_container_version, **kwargs):
# type: (str, str, str, **Any) -> ContainerClient
"""Restores soft-deleted container.
Operation will only be successful if used within the specified number of days
set in the delete retention policy.
.. versionadded:: 12.4.0
This operation was introduced in API version '2019-12-12'.
:param str deleted_container_name:
Specifies the name of the deleted container to restore.
:param str deleted_container_version:
Specifies the version of the deleted container to restore.
:keyword str new_name:
The new name for the deleted container to be restored to.
If not specified deleted_container_name will be used as the restored container name.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:rtype: ~azure.storage.blob.ContainerClient
"""
new_name = kwargs.pop('new_name', None)
container = self.get_container_client(new_name or deleted_container_name)
try:
container._client.container.restore(deleted_container_name=deleted_container_name, # pylint: disable = protected-access
deleted_container_version=deleted_container_version,
timeout=kwargs.pop('timeout', None), **kwargs)
return container
except StorageErrorException as error:
process_storage_error(error)
def get_container_client(self, container):
# type: (Union[ContainerProperties, str]) -> ContainerClient
"""Get a client to interact with the specified container.
The container need not already exist.
:param container:
The container. This can either be the name of the container,
or an instance of ContainerProperties.
:type container: str or ~azure.storage.blob.ContainerProperties
:returns: A ContainerClient.
:rtype: ~azure.storage.blob.ContainerClient
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_service.py
:start-after: [START bsc_get_container_client]
:end-before: [END bsc_get_container_client]
:language: python
:dedent: 8
:caption: Getting the container client to interact with a specific container.
"""
try:
container_name = container.name
except AttributeError:
container_name = container
_pipeline = Pipeline(
transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access
policies=self._pipeline._impl_policies # pylint: disable = protected-access
)
return ContainerClient(
self.url, container_name=container_name,
credential=self.credential, api_version=self.api_version, _configuration=self._config,
_pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts,
require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key,
key_resolver_function=self.key_resolver_function)
def get_blob_client(
self, container, # type: Union[ContainerProperties, str]
blob, # type: Union[BlobProperties, str]
snapshot=None # type: Optional[Union[Dict[str, Any], str]]
):
# type: (...) -> BlobClient
"""Get a client to interact with the specified blob.
The blob need not already exist.
:param container:
The container that the blob is in. This can either be the name of the container,
or an instance of ContainerProperties.
:type container: str or ~azure.storage.blob.ContainerProperties
:param blob:
The blob with which to interact. This can either be the name of the blob,
or an instance of BlobProperties.
:type blob: str or ~azure.storage.blob.BlobProperties
:param snapshot:
The optional blob snapshot on which to operate. This can either be the ID of the snapshot,
or a dictionary output returned by :func:`~azure.storage.blob.BlobClient.create_snapshot()`.
:type snapshot: str or dict(str, Any)
:returns: A BlobClient.
:rtype: ~azure.storage.blob.BlobClient
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_service.py
:start-after: [START bsc_get_blob_client]
:end-before: [END bsc_get_blob_client]
:language: python
:dedent: 12
:caption: Getting the blob client to interact with a specific blob.
"""
try:
container_name = container.name
except AttributeError:
container_name = container
try:
blob_name = blob.name
except AttributeError:
blob_name = blob
_pipeline = Pipeline(
transport=TransportWrapper(self._pipeline._transport), # pylint: disable = protected-access
policies=self._pipeline._impl_policies # pylint: disable = protected-access
)
return BlobClient( # type: ignore
self.url, container_name=container_name, blob_name=blob_name, snapshot=snapshot,
credential=self.credential, api_version=self.api_version, _configuration=self._config,
_pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts,
require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key,
key_resolver_function=self.key_resolver_function)
| [
"[email protected]"
] | |
6d49a356fda1c916ac953c333cbc4535b0d6a8f6 | 7bb34b9837b6304ceac6ab45ce482b570526ed3c | /external/webkit/Tools/Scripts/webkitpy/layout_tests/port/test_files.py | fbbbea565812a9dfcef1d6ac1700e9fe164ef72c | [
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"GPL-2.0-only",
"LGPL-2.1-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft",
"BSD-3-Clause",
"Apache-2.0"
] | permissive | ghsecuritylab/android_platform_sony_nicki | 7533bca5c13d32a8d2a42696344cc10249bd2fd8 | 526381be7808e5202d7865aa10303cb5d249388a | refs/heads/master | 2021-02-28T20:27:31.390188 | 2013-10-15T07:57:51 | 2013-10-15T07:57:51 | 245,730,217 | 0 | 0 | Apache-2.0 | 2020-03-08T00:59:27 | 2020-03-08T00:59:26 | null | UTF-8 | Python | false | false | 4,698 | py | #!/usr/bin/env python
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""This module is used to find all of the layout test files used by
run-webkit-tests. It exposes one public function - find() -
which takes an optional list of paths. If a list is passed in, the returned
list of test files is constrained to those found under the paths passed in,
i.e. calling find(["LayoutTests/fast"]) will only return files
under that directory."""
import time
from webkitpy.common.system import logutils
_log = logutils.get_logger(__file__)
# When collecting test cases, we include any file with these extensions.
_supported_file_extensions = set(['.html', '.shtml', '.xml', '.xhtml', '.xhtmlmp', '.pl',
'.php', '.svg'])
# When collecting test cases, skip these directories
_skipped_directories = set(['.svn', '_svn', 'resources', 'script-tests'])
def find(port, paths=None):
"""Finds the set of tests under a given list of sub-paths.
Args:
paths: a list of path expressions relative to port.layout_tests_dir()
to search. Glob patterns are ok, as are path expressions with
forward slashes on Windows. If paths is empty, we look at
everything under the layout_tests_dir().
"""
paths = paths or ['*']
filesystem = port._filesystem
return normalized_find(filesystem, normalize(filesystem, port.layout_tests_dir(), paths))
def normalize(filesystem, base_dir, paths):
return [filesystem.normpath(filesystem.join(base_dir, path)) for path in paths]
def normalized_find(filesystem, paths):
"""Finds the set of tests under the list of paths.
Args:
paths: a list of absolute path expressions to search.
Glob patterns are ok.
"""
gather_start_time = time.time()
paths_to_walk = set()
for path in paths:
# If there's an * in the name, assume it's a glob pattern.
if path.find('*') > -1:
filenames = filesystem.glob(path)
paths_to_walk.update(filenames)
else:
paths_to_walk.add(path)
# FIXME: I'm not sure there's much point in this being a set. A list would
# probably be faster.
test_files = set()
for path in paths_to_walk:
files = filesystem.files_under(path, _skipped_directories, _is_test_file)
test_files.update(set(files))
gather_time = time.time() - gather_start_time
_log.debug("Test gathering took %f seconds" % gather_time)
return test_files
def _has_supported_extension(filesystem, filename):
"""Return true if filename is one of the file extensions we want to run a
test on."""
extension = filesystem.splitext(filename)[1]
return extension in _supported_file_extensions
def is_reference_html_file(filename):
"""Return true if the filename points to a reference HTML file."""
if (filename.endswith('-expected.html') or
filename.endswith('-expected-mismatch.html')):
return True
return False
def _is_test_file(filesystem, dirname, filename):
"""Return true if the filename points to a test file."""
return (_has_supported_extension(filesystem, filename) and
not is_reference_html_file(filename))
| [
"[email protected]"
] | |
b4e837341ad2ead1653cd1863d71a44210732c3f | a463f5858c663199b6f6e38d9b2dc93e9a9ae730 | /problem/2003/00_200316/3143_가장빠른문자열타이핑-1.py | cac1f53330c94396e5da0297c859ffa05c118f54 | [] | no_license | do-park/swexpertacademy | 4993f79e3a73697ecdc71e0f654306466626b00b | 7cbbb0957ce5191cb44cd35094da5b0d29783e49 | refs/heads/master | 2020-12-22T19:26:35.257666 | 2020-10-19T02:02:32 | 2020-10-19T02:02:32 | 236,907,286 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | for tc in range(1, int(input()) + 1):
A, B = input().split()
t = A.count(B)
print(f'#{tc} {len(A) - t * len(B) + t}') | [
"[email protected]"
] | |
c22f7efdcb2f21f4d80fc3b4b564f22666cc7f70 | ddb3fc95f44733e20312c0cbfdb37fc7fd1da5e8 | /torch_server.py | bb5ab48b0490566d51b8c5ffed176c3bd852ae15 | [] | no_license | ShenDezhou/EXLNet | 2068ffecb3bebdc705bf49d5ca50076ebd50fab2 | e1700c2d455205e8760c68e83f43520e03e67367 | refs/heads/master | 2023-03-08T17:05:43.205497 | 2021-02-24T07:05:17 | 2021-02-24T07:05:17 | 330,310,719 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,577 | py | import argparse
import itertools
import logging
import os
import time
from types import SimpleNamespace
import falcon
import pandas
import torch
from falcon_cors import CORS
import waitress
import numpy as np
import json
import re
from torch.utils.data import DataLoader
from data import Data
from evaluate import evaluate, handy_tool, calculate_accuracy_f1
from model import RnnForSentencePairClassification, BertYForClassification, NERNet,NERWNet
from utils import load_torch_model
MODEL_MAP = {
'bert': BertYForClassification,
'rnn': NERNet,
'rnnkv': NERWNet
}
logging.basicConfig(level=logging.INFO, format='%(asctime)-18s %(message)s')
logger = logging.getLogger()
cors_allow_all = CORS(allow_all_origins=True,
allow_origins_list=['*'],
allow_all_headers=True,
allow_all_methods=True,
allow_credentials_all_origins=True
)
parser = argparse.ArgumentParser()
parser.add_argument(
'-p', '--port', default=58081,
help='falcon server port')
parser.add_argument(
'-c', '--config_file', default='config/rnn_config.json',
help='model config file')
args = parser.parse_args()
model_config=args.config_file
def result_to_json(string, tags):
item = {"string": string, "entities": []}
entity_name = ""
entity_start = 0
idx = 0
i = -1
zipped = zip(string, tags)
listzip = list(zipped)
last = len(listzip)
for char, tag in listzip:
i += 1
if tag == 3:
item["entities"].append({"word": char, "start": idx, "end": idx+1, "type":'s'})
elif tag == 0:
entity_name += char
entity_start = idx
elif tag == 1:
if (entity_name != "") and (i == last):
entity_name += char
item["entities"].append({"word": entity_name, "start": entity_start, "end": idx + 1, "type": 'bms'})
entity_name = ""
else:
entity_name += char
elif tag == 2: # or i == len(zipped)
entity_name += char
item["entities"].append({"word": entity_name, "start": entity_start, "end": idx + 1, "type": 'bms'})
entity_name = ""
else:
entity_name = ""
entity_start = idx
idx += 1
return item
class TorchResource:
def __init__(self):
logger.info("...")
# 0. Load config
with open(model_config) as fin:
self.config = json.load(fin, object_hook=lambda d: SimpleNamespace(**d))
if torch.cuda.is_available():
self.device = torch.device('cuda')
else:
self.device = torch.device('cpu')
# 1. Load data
self.data = Data(vocab_file=os.path.join(self.config.model_path, 'vocab.txt'),
max_seq_len=self.config.max_seq_len,
model_type=self.config.model_type, config=self.config)
# 2. Load model
self.model = MODEL_MAP[self.config.model_type](self.config)
self.model = load_torch_model(
self.model, model_path=os.path.join(self.config.model_path, 'model.bin'))
self.model.to(self.device)
logger.info("###")
def flatten(self, ll):
return list(itertools.chain(*ll))
def cleanall(self, content):
return content.replace(" ", "", 10**10)
def split(self, content):
line = re.findall('(.*?(?:[\n ]|.$))', content)
sublines = []
for l in line:
if len(l) > self.config.max_seq_len:
ll = re.findall('(.*?(?:[。,]|.$))', l)
sublines.extend(ll)
else:
sublines.append(l)
sublines = [l for l in sublines if len(l.strip())> 0]
return sublines
def bert_classification(self, content):
logger.info('1:{}'.format( content))
lines = self.split(content)
rows = []
for line in lines:
rows.append( {'content': line})
df = pandas.DataFrame(rows)
filename = "data/{}.csv".format(time.time())
df.to_csv(filename, index=False, columns=['content'])
test_set, sc_list, label_list, row_list = self.data.load_file(filename, train=False)
# token_list = []
# for line in sc_list:
# tokens = self.data.tokenizer.convert_ids_to_tokens(line)
# token_list.append(tokens)
data_loader_test = DataLoader(
test_set, batch_size=self.config.batch_size, shuffle=False)
# Evaluate
answer_list, length_list = evaluate(self.model, data_loader_test, self.device, isTest=True)
mod_tokens_list = handy_tool(row_list, length_list)
result = [result_to_json(t, s) for t, s in zip(mod_tokens_list, answer_list)]
entities = [item['entities'] for item in result]
entities = self.flatten(entities)
return {"data": entities}
def on_get(self, req, resp):
logger.info("...")
resp.set_header('Access-Control-Allow-Origin', '*')
resp.set_header('Access-Control-Allow-Methods', '*')
resp.set_header('Access-Control-Allow-Headers', '*')
resp.set_header('Access-Control-Allow-Credentials','true')
content = req.get_param('text', True)
# clean_content =
#clean_content = self.cleanall(content)
resp.media = self.bert_classification(content)
logger.info("###")
def on_post(self, req, resp):
"""Handles POST requests"""
resp.set_header('Access-Control-Allow-Origin', '*')
resp.set_header('Access-Control-Allow-Methods', '*')
resp.set_header('Access-Control-Allow-Headers', '*')
resp.set_header('Access-Control-Allow-Credentials', 'true')
resp.set_header("Cache-Control", "no-cache")
data = req.stream.read(req.content_length)
data = data.decode('utf-8')
# regex = re.compile(r'\\(?![/u"])')
# data = regex.sub(r"\\", data)
jsondata = json.loads(data)
# clean_title = shortenlines(jsondata['1'])
# clean_content = cleanall(jsondata['2'])
content = jsondata['text']
# clean_content = self.cleanall(content)
resp.media = self.bert_classification(content)
logger.info("###")
if __name__=="__main__":
api = falcon.API(middleware=[cors_allow_all.middleware])
api.req_options.auto_parse_form_urlencoded = True
api.add_route('/z', TorchResource())
waitress.serve(api, port=args.port, threads=48, url_scheme='http')
| [
"[email protected]"
] | |
a285078b484c69bd8ed5c87ab10b6da5ede6f928 | cf0ab8503d4d704045070deea1e2125375711e86 | /apps/metrics/v1/urls.py | 94ae63aec636a2d71a2111dd8c53dd21cbddec4c | [] | no_license | faierbol/syncano-platform | c3c6468600115752fd9fa5e46a0ad59f75f6bc9c | 879111874d1ef70418b4890cf970720b0a2be4d8 | refs/heads/master | 2023-07-20T10:13:40.066127 | 2021-02-08T15:01:13 | 2021-02-08T15:01:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 398 | py | # coding=UTF8
from django.urls import path
from rest_framework.routers import SimpleRouter
from apps.metrics import views
router = SimpleRouter()
router.register('hourly', views.HourlyStatsViewSet, base_name='hour-aggregate')
router.register('daily', views.DailyStatsViewSet, base_name='day-aggregate')
urlpatterns = [
path('', views.StatsLinksView.as_view(), name='stats'),
] + router.urls
| [
"[email protected]"
] | |
9dce403fd0a95cf1630555d797f78315313c6029 | c2a2e40309bb86e36ec1cd283c5aaa3149e28afd | /dev/_import.py | caad219057d7da4bd4fc66308afeedc8fa245d1a | [
"MIT"
] | permissive | ossdev07/asn1crypto | 5fc9f864eec3f109812050fa4ed50d1f343a6aad | 213e4fb24f5f1a7f78c9f08862b0b8728fec1510 | refs/heads/master | 2020-12-03T23:33:55.056818 | 2019-12-29T06:46:19 | 2019-12-29T06:46:19 | 229,199,270 | 0 | 1 | MIT | 2019-12-20T05:53:29 | 2019-12-20T05:53:29 | null | UTF-8 | Python | false | false | 3,279 | py | # coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import imp
import sys
import os
from . import build_root, package_name, package_root
if sys.version_info < (3,):
getcwd = os.getcwdu
else:
getcwd = os.getcwd
def _import_from(mod, path, mod_dir=None, allow_error=False):
"""
Imports a module from a specific path
:param mod:
A unicode string of the module name
:param path:
A unicode string to the directory containing the module
:param mod_dir:
If the sub directory of "path" is different than the "mod" name,
pass the sub directory as a unicode string
:param allow_error:
If an ImportError should be raised when the module can't be imported
:return:
None if not loaded, otherwise the module
"""
if mod_dir is None:
mod_dir = mod.replace('.', os.sep)
if not os.path.exists(path):
return None
if not os.path.exists(os.path.join(path, mod_dir)) \
and not os.path.exists(os.path.join(path, mod_dir + '.py')):
return None
if os.sep in mod_dir:
append, mod_dir = mod_dir.rsplit(os.sep, 1)
path = os.path.join(path, append)
try:
mod_info = imp.find_module(mod_dir, [path])
return imp.load_module(mod, *mod_info)
except ImportError:
if allow_error:
raise
return None
def _preload(require_oscrypto, print_info):
"""
Preloads asn1crypto and optionally oscrypto from a local source checkout,
or from a normal install
:param require_oscrypto:
A bool if oscrypto needs to be preloaded
:param print_info:
A bool if info about asn1crypto and oscrypto should be printed
"""
if print_info:
print('Working dir: ' + getcwd())
print('Python ' + sys.version.replace('\n', ''))
asn1crypto = None
oscrypto = None
if require_oscrypto:
# Some CI services don't use the package name for the dir
if package_name == 'oscrypto':
oscrypto_dir = package_root
else:
oscrypto_dir = os.path.join(build_root, 'oscrypto')
oscrypto_tests = None
if os.path.exists(oscrypto_dir):
oscrypto_tests = _import_from('oscrypto_tests', oscrypto_dir, 'tests')
if oscrypto_tests is None:
import oscrypto_tests
asn1crypto, oscrypto = oscrypto_tests.local_oscrypto()
else:
if package_name == 'asn1crypto':
asn1crypto_dir = package_root
else:
asn1crypto_dir = os.path.join(build_root, 'asn1crypto')
if os.path.exists(asn1crypto_dir):
asn1crypto = _import_from('asn1crypto', asn1crypto_dir)
if asn1crypto is None:
import asn1crypto
if print_info:
print(
'\nasn1crypto: %s, %s' % (
asn1crypto.__version__,
os.path.dirname(asn1crypto.__file__)
)
)
if require_oscrypto:
print(
'oscrypto: %s backend, %s, %s' % (
oscrypto.backend(),
oscrypto.__version__,
os.path.dirname(oscrypto.__file__)
)
)
| [
"[email protected]"
] | |
14b2409d1b96d2af04379ef25da43b732b845361 | 77bb6b9fd3efdad268a99c19da3b104a0914801b | /Binary Tree/binary_tree_to_doubly_link_list.py | 2d0e72b857549cf35681bc53df47b26b164d521b | [] | no_license | Akasurde/Algorithms | 2dc72a9b5fd30b20aba9ddce61e763a6b086c628 | b6b79f267d39e3f352ffb4c91c544bdbd042092e | refs/heads/master | 2021-01-14T08:55:07.928438 | 2015-06-21T20:21:21 | 2015-06-21T20:21:21 | 65,351,103 | 0 | 1 | null | 2016-08-10T04:41:13 | 2016-08-10T04:41:13 | null | UTF-8 | Python | false | false | 737 | py | # -*- coding: UTF-8 -*-
# Convert a given Binary Tree to Doubly Linked List
import binary_tree
def convert_to_doubly_linked_list(root):
if not root:
return
if root.left:
left = convert_to_doubly_linked_list(root.left)
while left.right:
left = left.right
left.right = root
root.left = left
if root.right:
right = convert_to_doubly_linked_list(root.right)
while right.left:
right = right.left
root.right = right
right.left = root
return root
def print_list(root):
if not root:
return
while root.left:
root = root.left
head = root
while head:
print head.data
head = head.right
tree = binary_tree.construct_binary_tree()
root = convert_to_doubly_linked_list(tree.root)
print_list(root) | [
"[email protected]"
] | |
ff09c2d5e520b2eb37116dfc90e2c1cffa1cad18 | 8039c88ee57447e6d3b973853aae8c3421ffa8e9 | /backend/todoapp/migrations/0001_initial.py | 57b9a5013f42dabf23f8ddf73d723da991e2d32b | [] | no_license | mahidulmoon/djreact-ExpenceTracker | 3553ebe2a141bf0a5b67a46ce6871d675c4ec098 | 196c73cc7f484d3ef423699c8527c8f4f8422e9f | refs/heads/master | 2023-02-09T06:43:33.292205 | 2020-06-05T12:44:18 | 2020-06-05T12:44:18 | 269,003,222 | 2 | 0 | null | 2021-01-06T03:53:52 | 2020-06-03T06:04:32 | JavaScript | UTF-8 | Python | false | false | 726 | py | # Generated by Django 3.0.6 on 2020-06-02 05:59
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Todoapp',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.TextField()),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
] | |
2c0ae8a4900e9a3dbe5fc23d17c814448e073a6d | eb0bb5267035c0222da0c072c5dcd85b46099904 | /test/tw-1630.t | c4c0bd1e81926ba3b08e72cc170987370ae53fb3 | [
"MIT"
] | permissive | bjornreppen/task | 6d96f578eec7b9cceeb4d728caeda87e7a446949 | a9eac8bb715ac8f51073c080ac439bf5c09493e8 | refs/heads/master | 2021-05-30T07:48:39.263967 | 2015-10-21T20:50:42 | 2015-10-21T20:50:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,539 | t | #!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
###############################################################################
#
# Copyright 2006 - 2015, Paul Beckingham, Federico Hernandez.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# http://www.opensource.org/licenses/mit-license.php
#
###############################################################################
import sys
import os
import unittest
# Ensure python finds the local simpletap module
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from basetest import Task, TestCase
class TestBug1630(TestCase):
def setUp(self):
"""Executed before each test in the class"""
self.t = Task()
self.t("add zero")
self.t("add one due:7d")
self.t("add two due:10d")
def test_attribute_modifier_with_duration(self):
"""Verify that 'due.before:9d' is correctly interpreted"""
code, out, err = self.t("due.before:9d list rc.verbose:nothing")
self.assertNotIn("zero", out)
self.assertIn("one", out)
self.assertNotIn("two", out)
def test_attribute_no_modifier_with_duration(self):
"""Verify that 'due:7d' is correctly interpreted"""
code, out, err = self.t("due:7d list rc.verbose:nothing")
self.assertNotIn("zero", out)
self.assertIn("one", out)
self.assertNotIn("two", out)
if __name__ == "__main__":
from simpletap import TAPTestRunner
unittest.main(testRunner=TAPTestRunner())
# vim: ai sts=4 et sw=4 ft=python
| [
"[email protected]"
] | |
f5e375e99b0730d782fd2e5d0ba13d46113a0354 | 173e0af5c08469ec7fcc06020b2ecece9e511f2c | /predictions_lstm.py | 0f85ee2583b0aa7df1d422e63be3237071715d06 | [
"MIT"
] | permissive | codedecde/BiLSTM-CCM | f6726b025560c5027c3be807cee1a1d250a95048 | a89719e97dfe1221f018d60106b75393cddf5f7c | refs/heads/master | 2021-08-16T03:24:37.454184 | 2020-03-05T08:49:08 | 2020-03-05T08:49:08 | 100,819,812 | 2 | 1 | MIT | 2020-03-05T08:49:10 | 2017-08-19T20:31:41 | Python | UTF-8 | Python | false | false | 9,442 | py | import lstm_model as lm
import keras
from keras.layers import Input, GRU, Embedding, Dense, Dropout, concatenate
from keras.models import Model
from keras.layers.wrappers import Bidirectional
from keras.callbacks import *
import keras.backend as K
from keras.optimizers import Adam
import numpy as np
import sys
from keras.utils import Progbar
import os
from keras.preprocessing.sequence import pad_sequences
from keras.callbacks import *
from keras.utils import to_categorical
import cPickle as cp
import pdb
import subprocess
def get_weights(model ,idx, options):
BASE_DIR = '/home/bass/DataDir/BTPData/Keras_Models/'
MODEL_DIR = BASE_DIR + 'MODEL_' + str(idx) + '/'
MAX_VAL_ACC = -1
best_model = ''
for filename in os.listdir(MODEL_DIR):
if not filename.startswith('weights'):
continue
val_acc = int(filename.split('.')[2])
if val_acc >= MAX_VAL_ACC:
MAX_VAL_ACC = val_acc
best_model = filename
assert best_model != ''
# print 'LOADING FOR IDX', idx, ' FROM FILE', MODEL_DIR + best_model
sys.stdout.flush()
model.load_weights(MODEL_DIR + best_model)
return model
def get_ccm_seq(filename, len_of_seq, dummy_set):
f = open(filename)
problem_line = f.readline()
num_rows = int(f.readline().strip().split()[1])
num_cols = int(f.readline().strip().split()[1])
non_zeros = int(f.readline().strip().split()[1])
status = f.readline()
Objective = f.readline().strip().split()[1]
blank_line = f.readline()
column_line = f.readline()
dash_line = f.readline()
temp = ''
ii = 0
# ---- Reach the table with the variable and values --- #
while(1):
temp = f.readline()
ii = int(filter(lambda x: x != '',temp.strip().split())[0])
if(ii == num_rows):
break
blank_line = f.readline()
column_line = f.readline()
dash_line = f.readline()
all_data = [None for idx in xrange(num_cols)] # Each col is a variable. This stores (var_name, var_value)
curr_col = 0
ii = 0
counter = 1
var_name = ''
index = 0
var_value = 0
# ---- Parse the table with the variable and values
while(1):
line = f.readline().strip().split('*')
if(len(line) == 2):
if(line[0] == ''):
var_value = int(filter(lambda x: x != '',line[1].split())[0])
all_data[index - 1] = (var_name,var_value)
else:
x = line[0].split()
index = int(x[0])
var_name = x[1]
var_value = int(filter(lambda x: x != '',line[1].split())[0])
all_data[index - 1] = (var_name,var_value)
elif(len(line) == 1):
x = line[0].split()
index = int(x[0])
var_name = x[1]
if(all_data[num_cols-1] is None):
continue
else:
break
# ---- Generate the label sequence ---------- #
pos_value = filter(lambda x: x[1] == 1, all_data) # All the variables with value 1
tag_seq = [None for ix in xrange(len_of_seq)]
for (var_name, _) in pos_value:
if var_name in dummy_set:
# this is a dummy
continue
var_name = var_name.split('_')
ix = int(var_name[-1])
assert tag_seq[ix] is None
if ix == 0:
tag_seq[ix] = var_name[1]
else:
assert tag_seq[ix-1] is not None and tag_seq[ix-1] == var_name[0]
tag_seq[ix] = var_name[1]
assert all([w is not None for w in tag_seq])
return tag_seq
def ccm_inference(predictions, rho, options):
# 1.1 Generate the CCM
START_TAG = "START"
weights = []
for ix, pred in enumerate(predictions):
weights.append({})
if ix == 0:
src = START_TAG
for dst in options['CLASSES_2_IX']:
weights[ix]["{}_{}".format(src,dst)] = pred[options['CLASSES_2_IX'][dst]]
else:
for src in options['CLASSES_2_IX']:
for dst in options['CLASSES_2_IX']:
weights[ix]["{}_{}".format(src,dst)] = pred[options['CLASSES_2_IX'][dst]]
ccm_writebuf = 'maximize\n'
# Objective function
objective_function = ''
for ix in xrange(len(weights)):
if ix == 0:
src = START_TAG
for dest in options['CLASSES_2_IX']:
weight = weights[ix]["{}_{}".format(src, dest)]
token = str(abs(weight)) + src + '_' + dest + '_' + str(ix)
if weight >= 0.:
objective_function = token if objective_function == '' else objective_function + ' + ' + token
else:
objective_function += ' - ' + token
else:
for src in options['CLASSES_2_IX']:
for dest in options['CLASSES_2_IX']:
weight = weights[ix]["{}_{}".format(src, dest)]
token = str(abs(weight)) + src + '_' + dest + '_' + str(ix)
if weight >= 0.:
objective_function += token if objective_function == '' else ' + ' + token
else:
objective_function += ' - ' + token
# ---- The Attribute constraint (soft) --- #
if rho is not None:
objective_function += ' - ' + str(rho) + 'D1'
dummy_set = set(['D1']) # A set of Dummy variables used to implement soft constraints
else:
dummy_set = set([])
ccm_writebuf += objective_function
#----- Now the constraints --- #
ccm_writebuf += '\nsubject to\n'
# ---- consistency for y_0 --- #
constraints = ''
for tag in options['CLASSES_2_IX']:
token = START_TAG + '_' + tag + '_' + str(0)
constraints += token if constraints == '' else ' + ' + token
constraints += ' = 1\n'
ccm_writebuf += constraints
# ---- consistency between y_0 and y_1 -- #
for src in options['CLASSES_2_IX']:
constraints = START_TAG + '_' + src + '_' + str(0)
for dest in options['CLASSES_2_IX']:
token = src + '_' + dest + '_' + str(1)
constraints += ' - ' + token
constraints += ' = 0\n'
ccm_writebuf += constraints
# ---- consistency between y_i and y_(i+1) -#
for ix in xrange(1,len(weights)-1):
for common_tag in options['CLASSES_2_IX']:
constraints = ''
for src in options['CLASSES_2_IX']:
token = src + '_' + common_tag + '_' + str(ix)
constraints += token if constraints == '' else ' + ' + token
for dest in options['CLASSES_2_IX']:
token = common_tag + '_' + dest + '_' + str(ix + 1)
constraints += ' - ' + token
constraints += ' = 0\n'
ccm_writebuf += constraints
# ---- TYPE Constraint : There has to be at least one type -------- #
constraints = START_TAG + '_' + 'type' + '_' + str(0)
for ii in xrange(1,len(weights)):
for src in options['CLASSES_2_IX']:
token = src + '_' + 'type' + '_' + str(ii)
constraints += ' + ' + token
constraints += ' > 1\n'
ccm_writebuf += constraints
# --- ATTR Constraint : There has to be at least one attr (soft) -- #
constraints = START_TAG + '_' + 'attr' + '_' + str(0)
for ii in xrange(1,len(weights)):
for src in options['CLASSES_2_IX']:
token = src + '_' + 'attr' + '_' + str(ii)
constraints += ' + ' + token
constraints += ' D1'
constraints += ' > 1\n'
# --- Declare all variables as binary ------- #
ccm_writebuf += 'binary\n'
for ix in xrange(len(weights)):
for tags in weights[ix]:
variable = tags + '_' + str(ix)
ccm_writebuf += variable + '\n'
for dummy_vars in dummy_set:
ccm_writebuf += dummy_vars + '\n'
ccm_writebuf += 'end\n'
# 1.2 Run the solver
FILENAME = "ilp_problem.lp"
GLPK_LOCATION = "/usr/bin/glpsol"
TEMP_FILENAME = "temp.out"
open(FILENAME,'wb').write(ccm_writebuf)
proc = subprocess.Popen([GLPK_LOCATION, '--cpxlp', FILENAME, '-o', TEMP_FILENAME], stdout = subprocess.PIPE)
(out, err) = proc.communicate()
if not err is None:
print err
seq_len = predictions.shape[0]
# 1.3 Process the output and cleanup
tag_seq = get_ccm_seq(TEMP_FILENAME, seq_len, dummy_set)
proc = subprocess.Popen(['rm', FILENAME, TEMP_FILENAME], stdout = subprocess.PIPE)
(out, err) = proc.communicate()
return tag_seq
def get_prediction(model, post, idx, options, rho = None):
# Preprocess the input
sentence = [feat.split(' ')[0] for feat in post.split('\n')]
sentence_vect = [options['VOCAB'][elem.lower()] + 1 if elem.lower() in options['VOCAB'] else len(options['VOCAB']) + 1 for elem in sentence]
sentence_vect = pad_sequences([sentence_vect], maxlen=options['MAX_LEN'], padding='post')
model = get_weights(model, idx, options)
predictions = model.predict(sentence_vect)
predictions = predictions[:,:len(sentence),:] # 1 x len(sent) x num_classes
# Sanity check
if rho is None:
predictions = np.argmax(predictions, axis=-1).flatten()
predictions_labels = [options['IX_2_CLASSES'][w] for w in predictions]
else:
predictions_labels = ccm_inference(predictions[0], rho, options)
return '\n'.join(predictions_labels)
def get_rho(posts, ix):
num_satisfied = 0.
for jx in xrange(len(posts)):
if jx==ix:
continue
post = posts[jx]
num_satisfied += 1. if (len(filter(lambda x: x == 'attr', [f.split(' ')[-1] for f in post.split('\n')]) ) > 0 ) else 0
num_unsatisfied = len(posts) - 1 - num_satisfied
# smoothing
num_satisfied += 1.
num_unsatisfied += 1.
rho_attr = np.log(num_satisfied) - np.log(num_unsatisfied)
return rho_attr
if __name__ == "__main__":
options = lm.get_options()
TRAIN_FILE = options['DATA_PATH']
posts = open(TRAIN_FILE).read().split('\n\n')
# rhos = [get_rho(posts, ix) for ix in xrange(len(posts))]
rhos = [None for ix in xrange(len(posts))]
RANGE = 136
# RANGE = len(posts)
model = lm.create_model(options)
predictions = []
bar = Progbar(RANGE)
for idx in xrange(RANGE):
prediction = get_prediction(model,posts[idx], idx, options, rhos[ix])
predictions.append(prediction)
bar.update(idx+1)
PREDICTION_FILE = '/home/bass/DataDir/BTPData/Predictions_New/prediction_keras.txt'
open(PREDICTION_FILE,'wb').write('\n\n'.join(predictions))
| [
"[email protected]"
] | |
3a51822baafc23044111a837cfa2333102d2ba8b | a869f208770692f65c265563a11333d6577b1d94 | /callback_python.py | 6a4dbd97f0cd0194b3e8584e457ad764ef6c765d | [] | no_license | malep2007/python-scripts | 8ea1c7fb78b5111348f5c4f54e084612f82647e4 | e57c01f79447be3dcf193a427db6bbabe4a06665 | refs/heads/master | 2020-08-03T14:53:57.424520 | 2019-09-29T12:16:01 | 2019-09-29T12:16:01 | 211,792,498 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 297 | py | def copy_and_manipulate(array, callback):
output = []
for i in array:
output.append(callback(i))
return output
def add_one(num):
num+=1 # freaking weird that python needs to evaluate this before it is returned
return num
print(copy_and_manipulate([1,2,3], add_one))
| [
"[email protected]"
] | |
18febcc8fb69368992e5125b1013d83ecad3f43e | 9ffa2c1d9472c0d686433a353764d03da2159205 | /supertools/survey_reader.py | 665bdabfa542b11185fd826035fba2fe1fdb172e | [
"MIT"
] | permissive | zagaran/instant-census | 7c1a0ab0ff282ebc56dd3a35d18a3ab444da1bfb | 62dd5bbc62939f43776a10708ef663722ead98af | refs/heads/master | 2023-05-08T00:21:24.426828 | 2021-05-31T18:19:02 | 2021-05-31T18:19:02 | 372,590,104 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,388 | py | from mongolia import ID_KEY
from backend.admin_portal.survey_builder_helpers import choices_append
from utils.time import datetime
from constants.database import ANSWER_VALUE
from database.backbone.schedules import (Schedule, Question, Conditional)
from database.backbone.cohorts import Cohort
from database.tracking.users import Users
def compile_survey(survey):
# split survey into set of lines
lines = survey.split("\n")
# separate the header to create the schedule and isolate the cohort
header = lines[0]
schedule, cohort = parse_header(header)
# create global user attributes
if "attributes:" in lines[1]:
#make line lower case in order to process attributes correctly
line = lines[1].lower()
attributes = line.split(": ")
attributes = attributes[1].split("; ")
attributes = parameter_parser((attributes))
for key in attributes:
Cohort(_id=cohort).add_custom_attribute(key, attributes[key])
for user in Users(cohort_id=cohort):
user.add_custom_attribute(key, attributes[key])
lines = lines[1:]
else: attributes = None
# parse remaining lines of survey
remaining = lines[1:]
while remaining:
remaining = parse(schedule, remaining, 0, cohort, attributes)
def parse_header(line):
# this function parses the header and creates the schedule object
# returns both the schedule object and the cohort id
print("You have called the parse header function.")
sections = line.split(": ")
params = sections[1].split("; ")
parameters = parameter_parser(params)
sched = Schedule.create(parameters, random_id=True)
return sched, parameters["cohort_id"]
def parameter_parser(line):
# this function parses parameters divided by the equals sign and returns a dictionary
# for lists, must conform to json (raise invalid json exception)
print("You have called the parameter parser function.")
param_dict = {}
for x in line:
parameter = x.split(" = ")
for a, b in enumerate(parameter):
if "[" and "]" in b:
b = b.strip("[]")
b = b.split(', ') #TODO: uh-oh commageddon! what about integers (though there shouldn't be)? things that do not have quotes?
for i, x in enumerate(b):
x = x.strip('\'\"')
#print(x)
b[i] = x
parameter[a] = b
elif parameter[a] in ("true", "True"):
parameter[a] = True
elif parameter[a] in ("false", "False"):
parameter[a] = False
elif "20" and "-" in b:
parameter[a] = convert_to_datetime(b)
elif b.isdigit():
parameter[a] = int(b)
param_dict[parameter[0]] = parameter[1]
# ensure that the cohort key in the dictionary is attached to the correct value
if "cohort" in param_dict:
param_dict['cohort'] = Cohort(cohort_name=param_dict['cohort'])[ID_KEY]
param_dict['cohort_id'] = param_dict.pop('cohort')
return param_dict
def convert_to_datetime(date_string):
date_formatted = datetime.strptime(date_string, "%Y-%m-%d %M:%S:%f")
return date_formatted
def parse_line(line, node, cohort, attributes):
# parses lines in the survey in order to create questions, conditionals, and messages
print("You have called the parse line function.")
# first, formats text and divides the head (before the colon) and the rest of the text
# information contained in the head indicates what sort of database object is to be created
text = line.strip()
if "{" in text:
head, text = text.split("{")
text = text.strip("}")
else: head = text
# create dictionary of kwargs found in the head (to be fed into database objects)
if ": " in head:
head = head.strip()
parameters = head.split(": ")
if "=" in head:
parameters = parameters[1].split("; ")
parameters = parameter_parser(parameters)
else: parameters = {}
if head.startswith("set attribute"):
change_attribute(line, node, attributes)
head = ""
if "if" in head:
if " attribute " in head:
#separate string
head = head.split("if attribute ")
head = head[1].split(" is ")
attribute, comparison = head[0], head[1]
comparison = comparison.strip(":")
#check if attribute in attributes
if attributes is None:
raise Exception ("You have an attribute in your survey syntax, but no attributes declared!")
elif attribute not in attributes:
raise Exception ("An attribute you are using is not declared in your survey!")
conditional = Conditional.create({"cohort_id": cohort,
"attribute": attribute,
"comparison": comparison},
random_id=True)
else:
if "yes" in head:
comparison = "yes"
elif "no" in head:
comparison = "no"
else:
comparison = head.strip("if ")
comparison = comparison.strip(":")
conditional = Conditional.create({"cohort_id": cohort,
"attribute": ANSWER_VALUE,
"comparison": comparison},
random_id=True)
node.add_action("conditional", {"database_id": conditional[ID_KEY]})
node = conditional
else:
if "question" in head:
node = create_question(text, node, cohort, attributes, **parameters)
elif "message" in head:
node = create_message(text, node, attributes)
return node
def create_question(text, node, cohort, attributes, **kwargs):
# creates a question with parameters passed in via kwargs
kwargs["cohort_id"] = cohort
kwargs["text"] = text
question = Question.create(kwargs,
random_id=True)
node.add_action("send_question", {"database_id": question[ID_KEY]})
if "auto_append" in kwargs and kwargs["auto_append"] == True:
question.update(choices_append=choices_append(question))
# returns the created question as the new node
node = question
return node
def change_attribute(text, node, attributes):
# changes an existing attribute
text = text.split("set attribute ")
text = text[1].split(" to ")
attribute_key, attribute_value = text[0], text[1]
if attributes is None:
raise Exception ("You have an attribute in your survey syntax, but no attributes declared!")
elif attribute_key not in attributes:
raise Exception ("An attribute you are using is not declared in your survey!")
if attribute_value == "*answer":
attribute_value = ANSWER_VALUE
node.add_action("set_attribute", {"attribute_name": attribute_key, "attribute_value": attribute_value})
def create_message(text, node, attributes):
# creates a message
node.add_action("send_message", {"text": text})
node = "message"
return node
def parse(node, remaining, indent, cohort, attributes):
# recursive function that parses each line of the survey
# call parse_line on first line
node = parse_line(remaining[0], node, cohort, attributes)
remaining = remaining[1:]
# parse according to whether the next line has the same indentation level
while remaining:
next_indent = get_indentation_level(remaining[0], indent)
if next_indent <= indent:
return remaining
else:
remaining = parse(node, remaining, indent + 1, cohort, attributes)
def get_indentation_level(line, current_indent):
# determines indentation level of a given line in the survey text, assuming first line indent is 0
n = len(line) - len(line.lstrip(" "))
if n % 4 != 0:
raise Exception("Your indentation sucks. Learn how to count in fours.")
elif n > n + 4:
raise Exception("Your thumb got stuck on the space bar.")
elif "\t" in line:
raise Exception("Please do not use any tabs, thank you very much.")
return n / 4 | [
"[email protected]"
] | |
65ed4e161dfa15a766e3541d97ca7beeebb6d745 | 1d9595555d45c9f31edc164275c392f9d4fc001a | /xmlutils/renderer/base.py | 39d84cc6c8f5ddd5e1bce3a6a9d138dc13bd919a | [
"BSD-2-Clause"
] | permissive | tokibito/python-xmlutils | 9a0f364df3c32b15d4ae58e3ccf07fb3c9e004e0 | f173b8ef01fb740d097d1875f205f714982b5829 | refs/heads/master | 2020-07-25T07:52:37.899389 | 2010-03-30T11:55:52 | 2010-03-30T11:55:52 | 208,221,406 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 145 | py | class BaseRenderer(object):
def __init__(self, *args, **kwargs):
pass
def render(self, node):
raise NotImplementedError
| [
"[email protected]"
] | |
552877857c701781809eee505c222d299fae243a | 3b9bf497cd29cea9c24462e0411fa8adbfa6ba60 | /leetcode/Problems/1032--Stream-of-Characters-Hard.py | 9ce827464f36efa080a24e21215caae9b4642bae | [] | no_license | niteesh2268/coding-prepation | 918823cb7f4965bec096ec476c639a06a9dd9692 | 19be0766f6b9c298fb32754f66416f79567843c1 | refs/heads/master | 2023-01-02T05:30:59.662890 | 2020-10-17T13:12:34 | 2020-10-17T13:12:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,023 | py | class TrieNode:
def __init__(self):
self.children = defaultdict()
self.isWordEnd = False
class StreamChecker:
def __init__(self, words: List[str]):
self.root = TrieNode()
self.maxLen = 0
for word in words:
self.maxLen = max(self.maxLen ,len(word))
self.insert(word[::-1])
self.q = ''
def insert(self, word):
temp = self.root
for ch in word:
if ch not in temp.children:
temp.children[ch] = TrieNode()
temp = temp.children[ch]
temp.isWordEnd = True
def query(self, letter: str) -> bool:
self.q = (letter + self.q)[:self.maxLen]
temp = self.root
for ch in self.q:
if not temp:
return False
if ch not in temp.children:
return False
if temp.children[ch] and temp.children[ch].isWordEnd:
return True
temp = temp.children[ch] | [
"akualajayaprakash@gmailcom"
] | akualajayaprakash@gmailcom |
a883655d8a3bb0994ede721e1eb19c5f49814972 | 17856275ae788e15d3b089dd2f9f291488a0af78 | /modules/post_details.py | 30721d6d9358938a23693f952d8171cc9c022371 | [] | no_license | Bawya1098/OfficeCafe-Projects | 71a603cb1e307b039ed414ebc8421e25d46811f6 | 346be83bcdee9e410e4ba6980bed49b24f70ca2c | refs/heads/master | 2020-04-22T03:17:33.667193 | 2019-03-11T06:40:31 | 2019-03-11T06:40:31 | 170,081,664 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 270 | py | def post_data(connection, user_data):
cursor = connection.cursor()
cursor.execute("""insert into orders(employee_id,cart_items) values(%s,%s);""",
(user_data['employee_id'], user_data['cart_items']))
connection.commit()
cursor.close()
| [
"[email protected]"
] | |
c6ddb80fc7ad55e16f7e31c3afd45024cca8d9a0 | ea9e9756f6b5b7022015049d92c399ee03cfde67 | /知吾煮/AutoTest_DL/interface/test_upload.py | e094f84e214ffe6d284efa82d6cc51de6ce3045b | [] | no_license | Godlovesli/jiaojiewendang | 724e0f01028cc5cbcc9ce5807bd057e15172eb91 | 579551eabfc83226804ccfbf8b868192614d7166 | refs/heads/master | 2020-04-05T03:38:41.212627 | 2018-11-07T09:31:51 | 2018-11-07T09:31:51 | 156,521,937 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,890 | py | #!/usr/bin/env python
#_*_ coding:utf-8 _*_
# -*- __author__ = 'feng' -*-
from base.base import MyTest
from base.login import Login
import unittest
import json
from HTMLTestRunner import HTMLTestRunner
import urllib, urllib2
from poster.encode import multipart_encode
from poster.streaminghttp import register_openers
from cryptutil import generateNonce, generateSignature,getSessionSecurity,encryptAES,decryptAES,md5
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
class uploadTest(MyTest):
'''上传图片'''
url_path = '/file/upload'
@classmethod
def setUpClass(cls):
pass
def test_upload_success(self):
'''上传图片成功'''
r = self.publish('POST',
self.url_path,
{'filename': open(r'D:\test.jpg', 'rb')},
)
print r
js = json.loads(r)
self.assertEqual(js['state'], 1)
self.assertEqual(js['message'], u'上传成功')
def test_upload_null(self):
'''必填参数的值为空'''
r = self.publish('POST',
self.url_path,
{'filename': ''},
)
print r
js = json.loads(r)
self.assertEqual(js['state'],-4)
self.assertIn("'filename' is not present",js['message'])
def test_upload_panull(self):
'''必填参数为空'''
r = self.publish('POST',
self.url_path,
{'': open(r'D:\test.jpg', 'rb')},
)
print r
js = json.loads(r)
self.assertEqual(js['state'], -4)
self.assertIn("'filename' is not present", js['message'])
def test_upload_signerror(self):
'''sign不正确'''
self.url = self.base_url + self.url_path
self.signature = generateSignature(self.nonce, "POST", self.url)
register_openers()
data, headers = multipart_encode({'filename': open(r'D:\test.jpg', 'rb')})
request = urllib2.Request(self.url, data=data, headers=headers)
request.add_header('nonce', self.nonce)
request.add_header('signature', self.signature+'e')
request.add_header('User-Agent', 'chunmiapp')
response = urllib2.urlopen(request)
result = response.read()
print result
js = json.loads(result)
self.assertEqual(js['state'],-2)
self.assertIn('拦截请求授权出错',js['message'])
def test_upload_noncerror(self):
'''nonce不正确'''
self.url = self.base_url + self.url_path
self.signature = generateSignature(self.nonce, "POST", self.url)
register_openers()
data, headers = multipart_encode({'filename': open(r'D:\test.jpg', 'rb')})
request = urllib2.Request(self.url, data=data, headers=headers)
request.add_header('nonce', self.nonce+'e')
request.add_header('signature', self.signature)
request.add_header('User-Agent', 'chunmiapp')
response = urllib2.urlopen(request)
result = response.read()
print result
js = json.loads(result)
self.assertEqual(js['state'],-2)
self.assertIn('拦截请求授权出错',js['message'])
if __name__ == '__main__':
# unittest.main()
testunit = unittest.TestSuite()
testunit.addTest(uploadTest('test_upload_success'))
testunit.addTest(uploadTest('test_upload_null'))
testunit.addTest(uploadTest('test_upload_panull'))
testunit.addTest(uploadTest('test_upload_signerror'))
testunit.addTest(uploadTest('test_upload_noncerror'))
fp = open('./upload.html', 'wb')
runner = HTMLTestRunner(stream=fp,
title=u'上传图片接口测试报告',
description=u'用例执行情况:')
runner.run(testunit)
fp.close()
| [
"[email protected]"
] | |
9fef8d5dc64926520d73300c46c095a61f502d6b | 9045393c9ace1cfb50b4c03efaafcf815ccce472 | /tests/test_flow.py | 5ea479c7f7be45454b6d21e1c7f9b4ed9adb4aaf | [
"MIT"
] | permissive | pombredanne/single_file_module-project | 554aa3aaa63e32eb1c5473085b968b1160eef9ec | 243f4c9cd9e6ef04ec7e8a48a0fed9645d6933b2 | refs/heads/master | 2021-01-23T06:06:07.098548 | 2017-07-14T21:53:35 | 2017-07-14T21:53:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 661 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import random
import pytest
from sfm import flow
def bet_and_win(lower=1, upper=100, threshold=50):
value = random.randint(lower, upper)
if value > threshold:
return value
else:
raise Exception("%s point, You Lose!" % value)
def test_try_ntime():
# Successful case
value = flow.try_ntime(10000, bet_and_win, 1, 10, 5)
assert value > 5
# Unsuccessful case
with pytest.raises(Exception):
value = flow.try_ntime(1, bet_and_win, 1, 10000, 9999)
if __name__ == "__main__":
import os
pytest.main([os.path.basename(__file__), "--tb=native", "-s", ])
| [
"[email protected]"
] | |
e3863a265db5c7c39dd3f86727766c78fc33f2b4 | d498d66fbe9bae2f86161f5c157d52c9433c6a8c | /mysite/mysite/urls.py | a577c6e46d6f5fde2d75a5260689f6c54dccb1ee | [] | no_license | sulembutproton/promosys | 9330176d175f29d1e86991d93570981f6cad8317 | 2f67f08e2f07ab9ae0f25d9ac67905ebb1bbbda2 | refs/heads/master | 2023-03-11T20:31:06.152814 | 2021-03-01T15:52:31 | 2021-03-01T15:52:31 | 343,463,484 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,291 | py | """
samples URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
import os
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls import url
from django.contrib.auth import views as auth_views
from django.views.static import serve
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
urlpatterns = [
path('admin/', admin.site.urls), # Keep
path('', include('ads.urls')), # Change to ads.urls
path('accounts/', include('django.contrib.auth.urls')), # Keep
url(r'^oauth/', include('social_django.urls', namespace='social')), # Keep
]
"""
# Sample applications
path('hello/', include('hello.urls')),
path('users/', include('users.urls')),
path('tracks/', include('tracks.urls')),
path('views/', include('views.urls')),
path('route/', include('route.urls', namespace='nsroute')),
path('tmpl/', include('tmpl.urls')),
path('gview/', include('gview.urls')),
path('session/', include('session.urls')),
path('authz/', include('authz.urls')),
path('getpost/', include('getpost.urls')),
path('form/', include('form.urls')),
path('crispy/', include('crispy.urls')),
path('myarts/', include('myarts.urls')),
path('menu/', include('menu.urls')),
path('forums/', include('forums.urls')),
path('pics/', include('pics.urls')),
path('favs/', include('favs.urls')),
path('favsql/', include('favsql.urls')),
path('rest/', include('rest.urls')),
path('autos/', include('autos.urls')),
path('usermodel/', include('usermodel.urls')),
path('chat/', include('chat.urls')),
path('util/', include('util.urls')),
path('well/', include('well.urls')),
path('tagme/', include('tagme.urls')),
"""
# Serve the static HTML
"""
urlpatterns += [
url(r'^site/(?P<path>.*)$', serve,
{'document_root': os.path.join(BASE_DIR, 'site'),
'show_indexes': True},
name='site_path'
),
]
"""
# Serve the favicon - Keep for later
urlpatterns += [
path('favicon.ico', serve, {
'path': 'favicon.ico',
'document_root': os.path.join(BASE_DIR, 'ads/static'),
}
),
]
# Switch to social login if it is configured - Keep for later
try:
from . import github_settings
social_login = 'registration/login_social.html'
urlpatterns.insert(0,
path('accounts/login/', auth_views.LoginView.as_view(template_name=social_login))
)
print('Using', social_login, 'as the login template')
except:
print('Using registration/login.html as the login template')
# References
# https://docs.djangoproject.com/en/3.0/ref/urls/#include
| [
"[email protected]"
] | |
5b3c899903378bc31aeee6e2a698d7b316d2b9ed | a1352de184b2a60295b90ba5472579c6dc8abb29 | /misc/python/strucdump.py | a7ea13d9294eccf04d1da1d6211a77fe61beb026 | [] | no_license | gunmetalbackupgooglecode/corkami | 2b5e99b1cfd94f716e35293e915d536e468aec75 | 1a52e71ec205f1cb7ce493789474209dd1b47911 | refs/heads/master | 2016-09-16T06:33:52.261541 | 2015-02-11T18:41:13 | 2015-02-11T18:41:13 | 32,167,637 | 25 | 11 | null | null | null | null | UTF-8 | Python | false | false | 2,465 | py | #simple hex structure viewer
#TODO: classify!
# Ange Albertini, BSD Licence, 2011
import struct
import sys
fn = sys.argv[1]
last = -1
lastdata = []
lastrend = -1
INDENT = "\t"
COLS = 2
tags_types = [
('BOGUSTYPE', 50829),
]
TAGS = dict([(i[1], i[0]) for i in tags_types] + tags_types)
for i,j in TAGS.iteritems():
TAGS[j] = i
def ph(start, end, cmt=None, skip=None, ccb=None):
global r, last, lastrend, INDENT
if end > len(r):
end = len(r)
if cmt is None:
cmt = ""
if ccb is not None:
cmt = parseformat(r[start:end], ccb) + " " + cmt
cmt = cmt.splitlines()
rstart = (start / (16*COLS)) * (16*COLS)
rend = (end / (16*COLS) * (16*COLS)) + (10 if (end % 0x10 > 0) else 0)
heads = range(rstart, rend, (16*COLS))
if skip is None:
skip = len(heads)
elif skip == -1:
skip = 1
non_skipped = True
for line, head in enumerate(heads):
if line > skip and line < len(heads) - skip:
if non_skipped:
print INDENT + "[..]"
non_skipped = False
continue
if head==lastrend and line == 0:
print INDENT + " ",
else:
print INDENT + "%03x:" % head,
for i in range((16*COLS)):
if (head + i < start) or (head + i > end - 1):
print " ",
else:
print "%02x" % ord(r[head + i]),
print("// " + cmt[line] if line < len(cmt) else "")
last = end
lastdata = r[start:end]
lastrend = heads[-1]
fcuts = []
with open(fn, "rb") as f:
r = f.read()
def tag_cb(d):
return "0x%02x (%s)" % (d, TAGS[d])
def small_hex(d):
if 0 <= d < 10:
return "%i" % d
else:
return "0x%X" % d
def types(d):
return "%s (%s)" % (small_hex(d), {1:"Byte", 2:"Ascii", 3:"Short", 4:"Long", 5:"Rational"}[d])
def dec(d):
return "%i" % d
STRUCTURE = [["H,Tag", tag_cb], ["H,Type", types], ["I,Count", small_hex], ["I,ValOffset", small_hex]]
def parseformat(d,f):
s = []
for f in f:
type_, name = f[0].split(",")
size = struct.calcsize(type_)
val = struct.unpack(type_, d[:size])[0]
d = d[size:]
if len(f) == 1:
s.append("%s:0x%x" % (name, val))
else:
s.append("%s:%s" % (name, f[1](val)))
return ", ".join(s)
| [
"[email protected]"
] | |
2aa508143b15c41cf17a324f991bc0fe83031d58 | 5c2e4266abf6d2be9102d5309bf94071a1eae1db | /02 高级语法系列/cp 爬虫/基础/v18.py | 478a6d18945b1ac16874c9eb6b549f405533e545 | [] | no_license | 13834319675/python | 8176d5da47136b9b3ec290eaa0b699c6b1e7a8ab | 3e6f04670f6f01006f827794865488dd40bca380 | refs/heads/master | 2021-07-11T18:29:12.894401 | 2021-07-05T08:29:27 | 2021-07-05T08:29:27 | 171,112,613 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,756 | py |
'''
破解有道词典
V1
'''
from urllib import request, parse
def youdao():
k = input("请输入要翻译的内容:")
url = "http://fanyi.youdao.com/translate?smartresult=dict&smartresult=rule"
data = {
"i": k,
"from":"AUTO",
"to": "AUTO",
"smartresult": "dict",
"client": "fanyideskweb",
"salt": "1523100789519",
"sign": "b8a55a436686cd89873fa46514ccedbe",
"doctype": "json",
"version": "2.1",
"keyfrom": "fanyi.web",
"action":"FY_BY_REALTIME",
"typoResult": "false"
}
# 参数data需要是bytes格式
data = parse.urlencode(data).encode()
headers = {
"Accept": "application/json,text/javascript,*/*;q=0.01",
#"Accept-Encoding": "gzip,deflate",
"Accept-Language": "zh-CN,zh;q=0.9",
"Connection": "keep-alive",
"Content-Length": "200",
"Content-Type": "application/x-www-form-urlencoded;charset=UTF-8",
"Cookie": "[email protected];JSESSIONID=aaaTLWzfvp5Hfg9mAhFkw;OUTFOX_SEARCH_USER_ID_NCOO=1999296830.4784973;___rl__test__cookies=1523100789517",
"Host": "fanyi.youdao.com",
"Origin": "http://fanyi.youdao.com",
"Referer": "http://fanyi.youdao.com/",
"User-Agent": "Mozilla/5.0( X11; Linux x86_64) AppleWebKit/537.36(KHTML, like Gecko) Chrome/64.0.3282.119 Safari/537.36 X-Requested-With: XMLHttpRequest"
}
req = request.Request(url=url, data=data, headers=headers)
rsp = request.urlopen(req)
html = rsp.read().decode()
print(html)
if __name__ == '__main__':
youdao()
| [
"[email protected]"
] | |
4e47cceaecf1e3209157eb0200600ae705e2fc5e | 747f759311d404af31c0f80029e88098193f6269 | /addons/olap/cube/levels/level_normal.py | 28102ac873d17b2195509cea402caaffd1a5a756 | [] | no_license | sgeerish/sirr_production | 9b0d0f7804a928c0c582ddb4ccb7fcc084469a18 | 1081f3a5ff8864a31b2dcd89406fac076a908e78 | refs/heads/master | 2020-05-19T07:21:37.047958 | 2013-09-15T13:03:36 | 2013-09-15T13:03:36 | 9,648,444 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 70 | py | /home/openerp/production/extra-addons/olap/cube/levels/level_normal.py | [
"[email protected]"
] | |
62750b17337dd5468a944195eea64784f470b4ac | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/303/usersdata/284/92253/submittedfiles/testes.py | e388e395e0c9c5be69b317d970dd592eb7e72f48 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,020 | py | linha = []
coluna = []
for linha in range (0,3,1):
for coluna in range (0,3,1):
print('%d %d' %(linha,coluna))
import random
def solicitaSimboloDoHumano():
letra = 0
while not (letra == 'O' or letra == 'X'):
print('Qual símbolo você deseja utilizar no jogo? ')
letra = input().upper()
if letra == 'X':
return ['X','O']
else:
return ['O','X']
def sorteioPrimeiraJogada():
if random.randint(1,2) == 1:
return 'Computador'
else:
return 'Jogador'
def jogadaHumana(tabuleiro):
movimento = 0
while movimento not in ( [0 0],[1 1],[2 2],[0 1],[0 2],[1 0],[1 2],[2 0],[2 1] ) .split() or not vazio(tabuleiro, int(movimento)):
print('Qual a sua jogada, []?'.format(nome))
movimento = input()
return int(movimento)
def jogadaComputador(tabuleiro, letraComputador):
if letraComputador == 'X':
letraJogador = 'O'
else:
letraJogador = 'X'
for i in range(1, 10):
copy = mostraTabuleiro(tabuleiro)
if vazio(copy, i):
movimentacao(copy, letraComputador, i)
if verificaVencedor(copy, letraComputador):
return i
for i in range(1, 10):
copy = mostraTabuleiro(tabuleiro)
if vazio(copy, i):
movimentacao(copy, letraJogador, i)
if verificaVencedor(copy, letraJogador):
return i
movimento = movAleatoria(tabuleiro, [1, 3, 7, 9])
if movimento != None:
return movimento
if vazio(tabuleiro, 5):
return 5
return movAleatoria(tabuleiro, [2, 4, 6, 8])
#def validaJogada()
def mostraTabuleiro(tabuleiro):
dupeTabuleiro = []
for i in tabuleiro:
dupeTabuleiro.append(i)
return dupeTabuleiro
def verificaVencedor(tabuleiro, letra):
return ((tabuleiro[7] == letra and tabuleiro[8] == letra and tabuleiro[9] == letra) or
(tabuleiro[4] == letra and tabuleiro[5] == letra and tabuleiro[6] == letra) or
(tabuleiro[1] == letra and tabuleiro[2] == letra and tabuleiro[3] == letra) or
(tabuleiro[7] == letra and tabuleiro[4] == letra and tabuleiro[1] == letra) or
(tabuleiro[8] == letra and tabuleiro[5] == letra and tabuleiro[2] == letra) or
(tabuleiro[9] == letra and tabuleiro[6] == letra and tabuleiro[3] == letra) or
(tabuleiro[7] == letra and tabuleiro[5] == letra and tabuleiro[3] == letra) or
(tabuleiro[9] == letra and tabuleiro[5] == letra and tabuleiro[1] == letra))
#################################################################################
def vazio(tabuleiro, movimento):
return tabuleiro[movimento] == ' '
def desenhaTabuleiro(tabuleiro):
print(' ' + tabuleiro[7] + ' | ' + tabuleiro[8] + ' | ' + tabuleiro[9])
print(' ' + tabuleiro[4] + ' | ' + tabuleiro[5] + ' | ' + tabuleiro[6])
print(' ' + tabuleiro[1] + ' | ' + tabuleiro[2] + ' | ' + tabuleiro[3])
def jogarNovamente():
print('Você deseja jogar novamente? ')
return input().lower().startswith('sim')
def movimentacao(tabuleiro, letra, movimento):
tabuleiro[movimento] = letra
def movAleatoria(tabuleiro, movimentosList):
movPossiveis = []
for i in movimentosList:
if vazio(tabuleiro, i):
movPossiveis.append(i)
if len(movPossiveis) != 0:
return random.choice(movPossiveis)
else:
return None
def completo(tabuleiro):
for i in range(1, 10):
if vazio(tabuleiro, i):
return False
return True
print('Bem vindo ao JogoDaVelha do grupo X')
nome = input('Qual o seu nome (ou apelido)? ')
while True:
tabul = [' '] * 10
letraJogador, letraComputador = solicitaSimboloDoHumano()
turn = sorteioPrimeiraJogada()
print('Vencedor do sorteio para início do jogo: {}'.format(turn))
rodando = True
while rodando:
if turn == 'Jogador':
desenhaTabuleiro(tabul)
movimento = jogadaHumana(tabul)
movimentacao(tabul, letraJogador, movimento)
if verificaVencedor(tabul, letraJogador):
desenhaTabuleiro(tabul)
print('Vencedor: {}'.format(nome))
rodando = False
else:
if completo(tabul):
desenhaTabuleiro(tabul)
print('Deu Velha!')
break
else:
turn = 'Computador'
else:
movimento = jogadaComputador(tabul, letraComputador)
movimentacao(tabul, letraComputador, movimento)
if verificaVencedor(tabul, letraComputador):
desenhaTabuleiro(tabul)
print('Vencedor: Computador')
rodando = False
else:
if completo(tabul):
desenhaTabuleiro(tabul)
print('Deu Velha!')
break
else:
turn = 'Jogador'
if not jogarNovamente():
break
| [
"[email protected]"
] | |
1f52fc7b98a2980e9da9429774fccee2b8e054d2 | ba8d566fe1fca8584601d70e209fef358d5aea16 | /pyntcloud/sampling/s_mesh.py | 5eb979a4acd73d38bf46e280431bc1d48fbb5e2b | [] | no_license | threerivers3d-jc/pyntcloud | 6d316954fdedbd0d336e51ca0c887913f077c6cb | 81311d4cbca037a755353dc5fcf80acad9189513 | refs/heads/master | 2021-01-21T15:26:53.939132 | 2017-06-21T17:41:48 | 2017-06-21T17:41:48 | 91,844,119 | 0 | 0 | null | 2017-05-19T20:46:30 | 2017-05-19T20:46:30 | null | UTF-8 | Python | false | false | 3,658 | py | import numpy as np
import pandas as pd
from ..base import Sampling
from ..geometry.areas import triangle_area_multi
class Sampling_Mesh(Sampling):
"""
"""
def __init__(self, pyntcloud, rgb=False, normals=False):
super().__init__(pyntcloud)
self.rgb = rgb
self.normals = normals
def extract_info(self):
v1, v2, v3 = self.pyntcloud.get_mesh_vertices(rgb=self.rgb, normals=self.normals)
self.v1_xyz = v1[:, :3]
self.v2_xyz = v2[:, :3]
self.v3_xyz = v3[:, :3]
if self.rgb:
self.v1_rgb = v1[:, 3:6]
self.v2_rgb = v2[:, 3:6]
self.v3_rgb = v3[:, 3:6]
if self.normals:
self.v1_normals = v1[:, 6:]
self.v2_normals = v2[:, 6:]
self.v3_normals = v3[:, 6:]
elif self.normals:
self.v1_normals = v1[:, 3:6]
self.v2_normals = v2[:, 3:6]
self.v3_normals = v3[:, 3:6]
class RandomMesh(Sampling_Mesh):
""" Sample points adjusting probabilities according to triangle area.
Parameters
----------
n: int
Number of points to be sampled.
rgb: bool, optional
Default: False
Indicates if rgb values will be also sampled.
normals: bool, optional
Default: False
Indicates if normals will be also sampled.
"""
def __init__(self, pyntcloud, n, rgb=False, normals=False):
super().__init__(pyntcloud, rgb, normals)
self.n = n
def compute(self):
areas = triangle_area_multi(self.v1_xyz, self.v2_xyz, self.v3_xyz)
probabilities = areas / np.sum(areas)
random_idx = np.random.choice(np.arange(len(areas)), size=self.n, p=probabilities)
v1_xyz = self.v1_xyz[random_idx]
v2_xyz = self.v2_xyz[random_idx]
v3_xyz = self.v3_xyz[random_idx]
# (n, 1) the 1 is for broadcasting
u = np.random.rand(self.n, 1)
v = np.random.rand(self.n, 1)
is_a_problem = u + v > 1
u[is_a_problem] = 1 - u[is_a_problem]
v[is_a_problem] = 1 - v[is_a_problem]
result = pd.DataFrame()
result_xyz = (v1_xyz * u) + (v2_xyz * v) + ((1 - (u + v)) * v3_xyz)
result_xyz = result_xyz.astype(np.float32)
result["x"] = result_xyz[:,0]
result["y"] = result_xyz[:,1]
result["z"] = result_xyz[:,2]
if self.rgb:
v1_rgb = self.v1_rgb[random_idx]
v2_rgb = self.v2_rgb[random_idx]
v3_rgb = self.v3_rgb[random_idx]
result_rgb = (v1_rgb * u) + (v2_rgb * v) + ((1 - (u + v)) * v3_rgb)
result_rgb = result_rgb.astype(np.uint8)
result["red"] = result_rgb[:,0]
result["green"] = result_rgb[:,1]
result["blue"] = result_rgb[:,2]
if self.normals:
v1_normals = self.v1_normals[random_idx]
v2_normals = self.v2_normals[random_idx]
v3_normals = self.v3_normals[random_idx]
sum_normals = v1_normals + v2_normals + v3_normals
result_normals = sum_normals / np.linalg.norm(sum_normals, axis=1)[..., None]
result_normals = result_normals.astype(np.float32)
result["nx"] = result_normals[:,0]
result["ny"] = result_normals[:,1]
result["nz"] = result_normals[:,2]
return result | [
"[email protected]"
] | |
f10d28f05fae8ab19c89469f1599948f39a0b6c6 | 9fbab0fd689ba1abbae4439a0e5bda315932f177 | /tools/effectiveT3/effectiveT3.py | 0254f325166f39f946564fa379f1c4f90dbfa794 | [
"MIT"
] | permissive | Imoteph/pico_galaxy | 437cb5c9b231896fcfcafec902a0ed1f1c9646b3 | 2c81bd9f238aacf7fb2ac2b027706eff31d23cc5 | refs/heads/master | 2021-01-11T23:00:08.239492 | 2016-11-04T12:06:36 | 2016-11-04T12:06:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,603 | py | #!/usr/bin/env python
"""Wrapper for EffectiveT3 v1.0.1 for use in Galaxy.
This script takes exactly five command line arguments:
* model name (e.g. TTSS_STD-1.0.1.jar)
* threshold (selective or sensitive)
* an input protein FASTA filename
* output tabular filename
It then calls the standalone Effective T3 v1.0.1 program (not the
webservice), and reformats the semi-colon separated output into
tab separated output for use in Galaxy.
"""
import sys
import os
import subprocess
# The Galaxy auto-install via tool_dependencies.xml will set this environment variable
effective_t3_dir = os.environ.get("EFFECTIVET3", "/opt/EffectiveT3/")
effective_t3_jar = os.path.join(effective_t3_dir, "TTSS_GUI-1.0.1.jar")
if "-v" in sys.argv or "--version" in sys.argv:
# TODO - Get version of the JAR file dynamically?
print("Wrapper v0.0.16, TTSS_GUI-1.0.1.jar")
sys.exit(0)
if len(sys.argv) != 5:
sys.exit("Require four arguments: model, threshold, input protein FASTA file & output tabular file")
model, threshold, fasta_file, tabular_file = sys.argv[1:]
if not os.path.isfile(fasta_file):
sys.exit("Input FASTA file not found: %s" % fasta_file)
if threshold not in ["selective", "sensitive"] \
and not threshold.startswith("cutoff="):
sys.exit("Threshold should be selective, sensitive, or cutoff=..., not %r" % threshold)
def clean_tabular(raw_handle, out_handle):
"""Clean up Effective T3 output to make it tabular."""
count = 0
positive = 0
errors = 0
for line in raw_handle:
if not line or line.startswith("#") \
or line.startswith("Id; Description; Score;"):
continue
assert line.count(";") >= 3, repr(line)
# Normally there will just be three semi-colons, however the
# original FASTA file's ID or description might have had
# semi-colons in it as well, hence the following hackery:
try:
id_descr, score, effective = line.rstrip("\r\n").rsplit(";", 2)
# Cope when there was no FASTA description
if "; " not in id_descr and id_descr.endswith(";"):
id = id_descr[:-1]
descr = ""
else:
id, descr = id_descr.split("; ", 1)
except ValueError:
sys.exit("Problem parsing line:\n%s\n" % line)
parts = [s.strip() for s in [id, descr, score, effective]]
out_handle.write("\t".join(parts) + "\n")
count += 1
if float(score) < 0:
errors += 1
if effective.lower() == "true":
positive += 1
return count, positive, errors
def run(cmd):
# Avoid using shell=True when we call subprocess to ensure if the Python
# script is killed, so too is the child process.
try:
child = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except Exception, err:
sys.exit("Error invoking command:\n%s\n\n%s\n" % (" ".join(cmd), err))
# Use .communicate as can get deadlocks with .wait(),
stdout, stderr = child.communicate()
return_code = child.returncode
if return_code or stderr.startswith("Exception in thread"):
cmd_str = " ".join(cmd) # doesn't quote spaces etc
if stderr and stdout:
sys.exit("Return code %i from command:\n%s\n\n%s\n\n%s" % (return_code, cmd_str, stdout, stderr))
else:
sys.exit("Return code %i from command:\n%s\n%s" % (return_code, cmd_str, stderr))
if not os.path.isdir(effective_t3_dir):
sys.exit("Effective T3 folder not found: %r" % effective_t3_dir)
if not os.path.isfile(effective_t3_jar):
sys.exit("Effective T3 JAR file not found: %r" % effective_t3_jar)
if not os.path.isdir(os.path.join(effective_t3_dir, "module")):
sys.exit("Effective T3 module folder not found: %r" % os.path.join(effective_t3_dir, "module"))
effective_t3_model = os.path.join(effective_t3_dir, "module", model)
if not os.path.isfile(effective_t3_model):
sys.stderr.write("Contents of %r is %s\n"
% (os.path.join(effective_t3_dir, "module"),
", ".join(repr(p) for p in os.listdir(os.path.join(effective_t3_dir, "module")))))
sys.stderr.write("Main JAR was found: %r\n" % effective_t3_jar)
sys.exit("Effective T3 model JAR file not found: %r" % effective_t3_model)
# We will have write access whereever the output should be,
temp_file = os.path.abspath(tabular_file + ".tmp")
# Use absolute paths since will change current directory...
tabular_file = os.path.abspath(tabular_file)
fasta_file = os.path.abspath(fasta_file)
cmd = ["java", "-jar", effective_t3_jar,
"-f", fasta_file,
"-m", model,
"-t", threshold,
"-o", temp_file,
"-q"]
try:
# Must run from directory above the module subfolder:
os.chdir(effective_t3_dir)
except Exception:
sys.exit("Could not change to Effective T3 folder: %s" % effective_t3_dir)
run(cmd)
if not os.path.isfile(temp_file):
sys.exit("ERROR - No output file from Effective T3")
out_handle = open(tabular_file, "w")
out_handle.write("#ID\tDescription\tScore\tEffective\n")
data_handle = open(temp_file)
count, positive, errors = clean_tabular(data_handle, out_handle)
data_handle.close()
out_handle.close()
os.remove(temp_file)
if errors:
print("%i sequences, %i positive, %i errors"
% (count, positive, errors))
else:
print("%i/%i sequences positive" % (positive, count))
if count and count == errors:
# Galaxy will still allow them to see the output file
sys.exit("All your sequences gave an error code")
| [
"[email protected]"
] | |
622607fa7a7c60daa2e8c156f9d58d46032e3a4a | f1f21ba2236da38a49a8185ce33b3ce4a4424c1d | /apps/drug_target_interaction/moltrans_dti/preprocess.py | 3919994d6fa4be6c6b1b0d8c44ecb5fd98b3b5e8 | [
"Apache-2.0"
] | permissive | PaddlePaddle/PaddleHelix | 75a07c2f14475e56e72f4573b2cf82a91d1cbfda | e6ab0261eb719c21806bbadfd94001ecfe27de45 | refs/heads/dev | 2023-08-05T03:34:55.009355 | 2023-08-01T09:30:44 | 2023-08-01T09:30:44 | 314,704,349 | 771 | 197 | Apache-2.0 | 2023-08-01T09:15:07 | 2020-11-21T00:53:39 | Python | UTF-8 | Python | false | false | 5,501 | py | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Data preprocessing
"""
from helper import utils
import paddle
from paddle import io
import os
import numpy as np
import pandas as pd
import codecs
from subword_nmt.apply_bpe import BPE
# Set global variable, drug max position, target max position
D_MAX = 50
T_MAX = 545
drug_vocab_path = './vocabulary/drug_bpe_chembl_freq_100.txt'
drug_codes_bpe = codecs.open(drug_vocab_path)
drug_bpe = BPE(drug_codes_bpe, merges=-1, separator='')
drug_temp = pd.read_csv('./vocabulary/subword_list_chembl_freq_100.csv')
drug_index2word = drug_temp['index'].values
drug_idx = dict(zip(drug_index2word, range(0, len(drug_index2word))))
target_vocab_path = './vocabulary/target_bpe_uniprot_freq_500.txt'
target_codes_bpe = codecs.open(target_vocab_path)
target_bpe = BPE(target_codes_bpe, merges=-1, separator='')
target_temp = pd.read_csv('./vocabulary/subword_list_uniprot_freq_500.csv')
target_index2word = target_temp['index'].values
target_idx = dict(zip(target_index2word, range(0, len(target_index2word))))
def drug_encoder(input_smiles):
"""
Drug Encoder
Args:
input_smiles: input drug sequence.
Returns:
v_d: padded drug sequence.
temp_mask_d: masked drug sequence.
"""
temp_d = drug_bpe.process_line(input_smiles).split()
try:
idx_d = np.asarray([drug_idx[i] for i in temp_d])
except:
idx_d = np.array([0])
flag = len(idx_d)
if flag < D_MAX:
v_d = np.pad(idx_d, (0, D_MAX - flag), 'constant', constant_values=0)
temp_mask_d = [1] * flag + [0] * (D_MAX - flag)
else:
v_d = idx_d[:D_MAX]
temp_mask_d = [1] * D_MAX
return v_d, np.asarray(temp_mask_d)
def target_encoder(input_seq):
"""
Target Encoder
Args:
input_seq: input target sequence.
Returns:
v_t: padded target sequence.
temp_mask_t: masked target sequence.
"""
temp_t = target_bpe.process_line(input_seq).split()
try:
idx_t = np.asarray([target_idx[i] for i in temp_t])
except:
idx_t = np.array([0])
flag = len(idx_t)
if flag < T_MAX:
v_t = np.pad(idx_t, (0, T_MAX - flag), 'constant', constant_values=0)
temp_mask_t = [1] * flag + [0] * (T_MAX - flag)
else:
v_t = idx_t[:T_MAX]
temp_mask_t = [1] * T_MAX
return v_t, np.asarray(temp_mask_t)
def concordance_index1(y, f):
"""
Compute the concordance index (CI)
Args:
y (ndarray): 1-dim ndarray representing the Kd from the ground truth.
f (ndarray): 1-dim ndarray representing the predicted Kd from the model.
Returns:
ci (float): the concordance index.
"""
ind = np.argsort(y)
y = y[ind]
f = f[ind]
i = len(y) - 1
j = i - 1
z = 0.0
S = 0.0
while i > 0:
while j >= 0:
if y[i] > y[j]:
z = z + 1
u = f[i] - f[j]
if u > 0:
S = S + 1
elif u == 0:
S = S + 0.5
j = j - 1
i = i - 1
j = i - 1
ci = S / z
return ci
class DataEncoder(io.Dataset):
"""
Data Encoder
"""
def __init__(self, ids, label, dti_data):
"""
Initialization
"""
super(DataEncoder, self).__init__()
self.ids = ids
self.label = label
self.data = dti_data
def __len__(self):
"""
Get size
"""
return len(self.ids)
def __getitem__(self, idx):
"""
Get embeddings of drug and target, label
"""
idx = self.ids[idx]
d_input = self.data.iloc[idx]['SMILES']
t_input = self.data.iloc[idx]['Target Sequence']
res = []
d_out, mask_d_out = drug_encoder(d_input)
res.append(d_out)
res.append(mask_d_out)
t_out, mask_t_out = target_encoder(t_input)
res.append(t_out)
res.append(mask_t_out)
labels = self.label[idx]
res.append(labels)
return res
class DataEncoderTest(io.Dataset):
"""
Data Encoder for Test
"""
def __init__(self, ids, dti_data):
"""
Initialization
"""
super(DataEncoderTest, self).__init__()
self.ids = ids
self.data = dti_data
def __len__(self):
"""
Get size
"""
return len(self.ids)
def __getitem__(self, idx):
"""
Get embeddings of drug and target
"""
idx = self.ids[idx]
d_input = self.data.iloc[idx]['SMILES']
t_input = self.data.iloc[idx]['Target Sequence']
res = []
d_out, mask_d_out = drug_encoder(d_input)
res.append(d_out)
res.append(mask_d_out)
t_out, mask_t_out = target_encoder(t_input)
res.append(t_out)
res.append(mask_t_out)
return res | [
"[email protected]"
] | |
bf27c415d0043fd1bc4f0ca0022fb76d57c961a7 | c73e8286579e7ba673440bac44bd527d416beaef | /MyNeutronSimHitAnalyzer/test_myneutronsimhitanalyzer_cfg.py | fefea3af0908ab0115fa557d29761f7fc67dfbf8 | [] | no_license | pietverwilligen/MyAnalyzers | 6815cec717f2ede3f44070466e33c9b292d5b452 | ccd04521747625d82a19174ebebb6a2cba53ddf2 | refs/heads/master | 2023-05-24T12:42:17.134907 | 2023-05-18T11:55:05 | 2023-05-18T11:55:05 | 16,079,295 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 5,417 | py | import FWCore.ParameterSet.Config as cms
process = cms.Process("Demo")
process.load("FWCore.MessageService.MessageLogger_cfi")
# process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
# process.load('Configuration.Geometry.GeometryExtended_cff')
# process.load('Configuration.Geometry.GeometryExtendedPostLS1_cff')
# process.load('Configuration.Geometry.GeometryExtended2015Reco_cff')
# process.load('Configuration.Geometry.GeometryExtended2015_cff')
# process.load('Configuration.Geometry.GeometryExtended2023MuonReco_cff')
# process.load('Configuration.Geometry.GeometryExtended2023Muon_cff')
# process.load('Configuration.Geometry.GeometryExtended2023D17Reco_cff')
# process.load('Configuration.Geometry.GeometryExtended2023D17_cff')
# process.load('Configuration.StandardSequences.MagneticField_38T_PostLS1_cff') # ... assume not necessary anymore ...
# process.load('Configuration.Geometry.GeometryExtended2018Reco_cff')
# process.load('Configuration.Geometry.GeometryExtended2018_cff')
process.load('Configuration.Geometry.GeometryExtended2026D99Reco_cff')
process.load('Configuration.Geometry.GeometryExtended2026D99_cff')
# process.load('Geometry.CommonDetUnit.globalTrackingGeometry_cfi')
# process.load("Geometry.MuonNumbering.muonNumberingInitialization_cfi")
process.load("Geometry.RPCGeometry.rpcGeometry_cfi") # ... needed? see if I can get rid of it ...
process.load("Geometry.CSCGeometry.cscGeometry_cfi")
process.load("Geometry.DTGeometry.dtGeometry_cfi")
# process.load("Geometry.GEMGeometry.gemGeometry_cfi") # ... does not exist ...
process.load("Alignment.CommonAlignmentProducer.FakeAlignmentSource_cfi")
# Load Events from python file
# ---------------------------------------------------------------------------------------
# option A
# ---------------------------------------------------------------------------------------
# process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
# readFiles = cms.untracked.vstring()
# secFiles = cms.untracked.vstring()
# source = cms.Source ("PoolSource",fileNames = readFiles, secondaryFileNames = secFiles)
# ---------------------------------------------------------------------------------------
# option B
# ---------------------------------------------------------------------------------------
# process.load("MinBias_Phase2_14TeV_TuneCP5_100k_Neutron_XS_2026D99_1E4s")
process.load("Test_MinBias_Phase2_14TeV_TuneCP5_100k_Neutron_XS_2026D99_1E4s")
# ---------------------------------------------------------------------------------------
# option C
# ---------------------------------------------------------------------------------------
# process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(1) )
# process.source = cms.Source ("PoolSource",
# fileNames = cms.untracked.vstring('/store/user/piet/NeutronBackground/MinBias_Phase2_14TeV_GEN_SIM_XS_2026D99mod_100k_1E4s_13X_v1/crab_MinBias_Phase2_14TeV_100k_1E4s_XS_13X_v1/230504_162117/0000/step1_228.root'))
# ---------------------------------------------------------------------------------------
process.demo = cms.EDAnalyzer('MyNeutronSimHitAnalyzer',
# ---------
# PdfFileNameBase = cms.untracked.string("MyNeutronSimHistograms_Run2_Neutron_XS_1E4s"),
# RootFileName = cms.untracked.string("MyNeutronSimHistograms_Run2_Neutron_XS_1E4s.root"),
# ---------
# PdfFileNameBase = cms.untracked.string("MyNeutronSimHistograms_Run2_Neutron_XS_1E4s_SH30eV"),
# RootFileName = cms.untracked.string("MyNeutronSimHistograms_Run2_Neutron_XS_1E4s_SH30eV.root"),
# ---------
# PdfFileNameBase = cms.untracked.string("MyNeutronSimHistograms_Run2_Neutron_XS_1E4s_Test"),
# RootFileName = cms.untracked.string("MyNeutronSimHistograms_Run2_Neutron_XS_1E4s_Test.root"),
# ---------
PdfFileNameBase = cms.untracked.string("MyNeutronSimHistograms_Phase2_2026D99_Neutron_XS_1E4s"),
RootFileName = cms.untracked.string("MyNeutronSimHistograms_Phase2_2026D99_Neutron_XS_1E4s.root"),
# ---------
BunchSpacing = cms.untracked.double(25.0),
COMEnergy = cms.untracked.double(13.0),
MaxSimTime = cms.untracked.double(10000000000000.0), # 10000s = 10^13 ns [in ns]
# MaxSimTime = cms.untracked.double(1000000000000.0), # 1000s = 10^12 ns [in ns]
# MaxSimTime = cms.untracked.double(100000000000.0), # 100s = 10^11 ns [in ns]
# MaxSimTime = cms.untracked.double(10000000000.0), # 10s = 10^10 ns [in ns]
# MaxSimTime = cms.untracked.double(100000000.0), # 100ms = 10^8 ns [in ns]
EDepCut30eV = cms.untracked.bool(True),
PhysicsDebug = cms.untracked.bool(True),
TechnicDebug = cms.untracked.bool(True),
)
process.p = cms.Path(process.demo)
| [
"[email protected]"
] | |
763c3a099affc2ad5ad0175153483332b443d153 | d0efa2026b7ed22ff4f9aa76c27ae2474c30f26d | /openapi_client/models/location.py | f6293ef80984ebfdac4c0af34fa105e138e19331 | [] | no_license | begum-akbay/Python | 2075650e0ddbf1c51823ebd749742646bf221603 | fe8b47e29aae609b7510af2d21e53b8a575857d8 | refs/heads/master | 2023-03-28T00:11:00.997194 | 2021-03-25T16:38:17 | 2021-03-25T16:38:17 | 351,499,957 | 0 | 0 | null | 2021-03-25T16:38:17 | 2021-03-25T16:15:16 | Python | UTF-8 | Python | false | false | 7,016 | py | # coding: utf-8
"""
Payment Gateway API Specification.
The documentation here is designed to provide all of the technical guidance required to consume and integrate with our APIs for payment processing. To learn more about our APIs please visit https://docs.firstdata.com/org/gateway. # noqa: E501
The version of the OpenAPI document: 21.1.0.20210122.001
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class Location(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'location_id': 'str',
'merchant_address': 'FraudAddress',
'hierarchy': 'str',
'timezone_offset': 'str',
'user_defined': 'object'
}
attribute_map = {
'location_id': 'locationId',
'merchant_address': 'merchantAddress',
'hierarchy': 'hierarchy',
'timezone_offset': 'timezoneOffset',
'user_defined': 'userDefined'
}
def __init__(self, location_id=None, merchant_address=None, hierarchy=None, timezone_offset=None, user_defined=None): # noqa: E501
"""Location - a model defined in OpenAPI""" # noqa: E501
self._location_id = None
self._merchant_address = None
self._hierarchy = None
self._timezone_offset = None
self._user_defined = None
self.discriminator = None
if location_id is not None:
self.location_id = location_id
if merchant_address is not None:
self.merchant_address = merchant_address
if hierarchy is not None:
self.hierarchy = hierarchy
if timezone_offset is not None:
self.timezone_offset = timezone_offset
if user_defined is not None:
self.user_defined = user_defined
@property
def location_id(self):
"""Gets the location_id of this Location. # noqa: E501
The unique ID of this location. # noqa: E501
:return: The location_id of this Location. # noqa: E501
:rtype: str
"""
return self._location_id
@location_id.setter
def location_id(self, location_id):
"""Sets the location_id of this Location.
The unique ID of this location. # noqa: E501
:param location_id: The location_id of this Location. # noqa: E501
:type: str
"""
self._location_id = location_id
@property
def merchant_address(self):
"""Gets the merchant_address of this Location. # noqa: E501
:return: The merchant_address of this Location. # noqa: E501
:rtype: FraudAddress
"""
return self._merchant_address
@merchant_address.setter
def merchant_address(self, merchant_address):
"""Sets the merchant_address of this Location.
:param merchant_address: The merchant_address of this Location. # noqa: E501
:type: FraudAddress
"""
self._merchant_address = merchant_address
@property
def hierarchy(self):
"""Gets the hierarchy of this Location. # noqa: E501
Free-text field to describe a hierarchy the merchant would like to provide. # noqa: E501
:return: The hierarchy of this Location. # noqa: E501
:rtype: str
"""
return self._hierarchy
@hierarchy.setter
def hierarchy(self, hierarchy):
"""Sets the hierarchy of this Location.
Free-text field to describe a hierarchy the merchant would like to provide. # noqa: E501
:param hierarchy: The hierarchy of this Location. # noqa: E501
:type: str
"""
self._hierarchy = hierarchy
@property
def timezone_offset(self):
"""Gets the timezone_offset of this Location. # noqa: E501
The timezone offset from UTC to the merchants timezone configuration, specified in the format +hh:mm. # noqa: E501
:return: The timezone_offset of this Location. # noqa: E501
:rtype: str
"""
return self._timezone_offset
@timezone_offset.setter
def timezone_offset(self, timezone_offset):
"""Sets the timezone_offset of this Location.
The timezone offset from UTC to the merchants timezone configuration, specified in the format +hh:mm. # noqa: E501
:param timezone_offset: The timezone_offset of this Location. # noqa: E501
:type: str
"""
self._timezone_offset = timezone_offset
@property
def user_defined(self):
"""Gets the user_defined of this Location. # noqa: E501
A JSON object that can carry any additional information about the location that might be helpful for fraud detection. # noqa: E501
:return: The user_defined of this Location. # noqa: E501
:rtype: object
"""
return self._user_defined
@user_defined.setter
def user_defined(self, user_defined):
"""Sets the user_defined of this Location.
A JSON object that can carry any additional information about the location that might be helpful for fraud detection. # noqa: E501
:param user_defined: The user_defined of this Location. # noqa: E501
:type: object
"""
self._user_defined = user_defined
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Location):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
ad7e6bc02120ef80805bcd33c41a5689fdc3b3ae | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/222/users/4327/codes/1685_2471.py | 3839f412181a0170517221e3c9d2dc2bcfbbf2a6 | [] | no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 361 | py | ida=int(input("idade:"))
imc=float(input("imc:"))
print("Entradas: {} anos e IMC {}".format(ida,imc))
if ((ida<=0 or ida>130) and imc<=0):
print("Dados invalidos")
elif(ida<45 and imc<22):
print("Risco: Baixo")
elif(ida>=45 and imc<22):
print("Risco: Medio")
elif(ida<45 and imc>=22):
print("Risco: Medio")
elif(ida>=45 and imc>=22):
print("Risco: Alto") | [
"[email protected]"
] | |
245ae97ad6a1378e1321ff246b20b6073791fbf7 | d30855895ee0c6ddaef493039dd0e0f1298eeae6 | /demo3.py | 05468c0c352ed93b62578658570a7927fdf16d41 | [
"GPL-1.0-or-later",
"GPL-3.0-only",
"MIT"
] | permissive | Ezhil-Language-Foundation/open-tamil | f5f28463bff4400aa131b4a428e8f3e17aa63997 | 8ea745440f96fe587cf0959d12e990ad7923e60e | refs/heads/main | 2022-12-23T13:50:19.758812 | 2022-12-16T21:56:02 | 2022-12-16T21:56:02 | 14,263,826 | 246 | 72 | MIT | 2022-09-24T17:49:10 | 2013-11-09T19:48:48 | JavaScript | UTF-8 | Python | false | false | 531 | py | # -*- coding: utf-8 -*-
## (C) 2019 Muthiah Annamalai,
## This module is part of solthiruthi project under open-tamil umbrella.
## This code maybe used/distributed under MIT LICENSE.
from solthiruthi.dictionary import DictionaryBuilder, TamilVU
from solthiruthi.tamil99kbd import inv_confusion_matrix as kbd_cm
from solthiruthi.typographical import corrections
TVU, _ = DictionaryBuilder.create(TamilVU)
wl = corrections("அன்பம்", TVU, kbd_cm, ed=2)
for c in wl:
print(("u'%s'," % c))
print(("L = %d" % len(wl)))
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.