blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d33b2b4cab54b838414fd70c755f3bcd6fb1580f | 5d34d74965504c363dc294c1ba97a46393759995 | /channels/tech_weekly_radar/app.py | c5371872c92041105e68e3e47f6e22824e230e65 | [
"MIT"
] | permissive | Nalorokk/reddit2telegram | 7f898b7d17771e9de98c7f176a5a1d071f6d47d9 | 28bfc1271f40b219ee7a34e8338fa93f0d44cbd2 | refs/heads/master | 2020-03-18T08:29:33.946768 | 2018-05-23T04:25:52 | 2018-05-23T04:25:52 | 134,513,083 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,893 | py | #encoding:utf-8
import csv
import importlib
import random
import datetime
import pymongo
import yaml
from utils import SupplyResult
subreddit = 'all'
t_channel = '@r_channels'
def get_active_period(r2t, channel_name):
min_cursor = r2t.stats.find({'channel' : channel_name.lower()}).sort([('ts', pymongo.ASCENDING)]).limit(1)
min_ts = min_cursor.next()['ts']
max_cursor = r2t.stats.find({'channel' : channel_name.lower()}).sort([('ts', pymongo.DESCENDING)]).limit(1)
max_ts = max_cursor.next()['ts']
diff = max_ts - min_ts
return diff.days
def get_newly_active(r2t, channels_list):
newly_active = list()
for channel in channels_list:
days_active = get_active_period(r2t, channel)
if days_active <= 31:
newly_active.append(channel)
return newly_active
def get_top_growers_for_last_week(r2t, channels_list):
top_growers = dict()
now = datetime.datetime.now()
for channel in channels_list:
week_ago_cursor = r2t.stats.find({
'channel': channel.lower(),
'ts': {'$gte': now - datetime.timedelta(days=7)}
}).sort([('ts', pymongo.ASCENDING)]).limit(100)
for stat_record in week_ago_cursor:
if 'members_cnt' in stat_record:
week_ago_members_cnt = stat_record['members_cnt']
break
current_cursor = r2t.stats.find({'channel': channel.lower()}).sort([('ts', pymongo.DESCENDING)]).limit(100)
for stat_record in current_cursor:
if 'members_cnt' in stat_record:
current_members_cnt = stat_record['members_cnt']
break
grow = current_members_cnt - week_ago_members_cnt
if grow >= 10:
top_growers[channel] = grow
return sorted(top_growers, key=top_growers.get, reverse=True)[:3]
def send_post(submission, r2t):
config_filename = 'configs/prod.yml'
with open(config_filename) as config_file:
config = yaml.load(config_file.read())
channels_list = list()
with open(config['cron_file']) as tsv_file:
tsv_reader = csv.DictReader(tsv_file, delimiter='\t')
for row in tsv_reader:
submodule_name = row['submodule_name']
submodule = importlib.import_module('channels.{}.app'.format(submodule_name))
channel_name = submodule.t_channel
if ('@' in channel_name) and (channel_name not in ['@r_channels_test', '@r_channels']):
channels_list.append(channel_name)
newly_active = get_newly_active(r2t, channels_list)
text_to_send = '<b>Weekend news</b>\n\n'
if len(newly_active) > 0:
text_to_send += '🎉 Welcome to newly active channels: {channels_list}. 🎈🎈\n\n'.format(channels_list=', '.join(newly_active))
text_to_send += '🏆 Channel of the week: {channel_name}. Join and enjoy!\n\n'.format(channel_name=random.choice(channels_list))
top_growers = get_top_growers_for_last_week(r2t, channels_list)
if len(top_growers) > 0:
text_to_send += '🔥 Hottest channels of the week: {channels}.\n\n'.format(channels=', '.join(top_growers))
list_of_channels = ['{n}. {channel}'.format(n=str(i + 1).zfill(2), channel=channel)
for i, channel in enumerate(random.sample(channels_list, k=len(channels_list)))]
text_to_send += '⬇️ All active channels:\n{list_of_channels}\n\n'.format(list_of_channels='\n'.join(list_of_channels))
text_to_send += '🙋\nQ: How can I help?\nA: Promote your favorite channels!\n\n'
text_to_send += 'Q: How to make similar channels?\nA: Ask here or use manual at https://github.com/Fillll/reddit2telegram.\n\n'
text_to_send += 'Q: Where to donate?\nA: http://bit.ly/r2t_donate'
r2t.send_text(text_to_send, parse_mode='HTML')
# It's not a proper supply, so just stop.
return SupplyResult.STOP_THIS_SUPPLY
| [
"[email protected]"
] | |
71c30464092ca759bbb801e6282b2bc827f19be1 | ceb620c4be8b34f4aa08156226187db081fc3b55 | /loca_13/vat_retention/models/retention_vat.py | a78c159f4bcd7d8ea5616ae7b08ad7533a48d860 | [] | no_license | hjrhjr/entrenamiento_13_odoo_ref | f73e292b91d085473283f63a88ccd2363a03d9bf | 9a492c006d9c0aab68d0b095281dafda97ebdfda | refs/heads/main | 2023-08-25T06:46:39.075724 | 2021-10-19T14:51:27 | 2021-10-19T14:51:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,751 | py | # -*- coding: utf-8 -*-
import logging
from odoo import api, fields, models, _
from odoo.exceptions import UserError, ValidationError
from datetime import datetime
_logger = logging.getLogger('__name__')
class InvoiceLineInherit(models.Model):
_inherit = 'account.move.line'
retention_id = fields.Many2one('vat.retention', string='VAT Retention')
class VatRetentionTaxLines(models.Model):
"""This model is about tax withheld in a invoice."""
_name = 'vat.retention.tax.lines'
name = fields.Char(string='Tax name', size=40)
tax_id = fields.Many2one('account.tax', string="Tax")
company_id = fields.Many2one('res.company', string='Company')
vat_ret_line_id = fields.Many2one('vat.retention.invoice.line', ondelete="cascade",string='vat_ret_line_id')
base_tax = fields.Float(string='Base tax')
tax_amount = fields.Float(string='Tax amount')
amount_withheld = fields.Float(string='Amount withheld')
class VatRetentionInvoiceLine(models.Model):
"""This model is for a line invoices withholed."""
_name = 'vat.retention.invoice.line'
def formato_fecha(self):
fecha = str(self.invoice_id.invoice_date)
fecha_aux=fecha
ano=fecha_aux[0:4]
mes=fecha[5:7]
dia=fecha[8:10]
resultado=dia+"/"+mes+"/"+ano
return resultado
def float_format(self,valor):
#valor=self.base_tax
if valor:
result = '{:,.2f}'.format(valor)
result = result.replace(',','*')
result = result.replace('.',',')
result = result.replace('*','.')
else:
result = "0,00"
return result
def valida_excento(self,id_tax,id_retention):
tipo=self.tax_id.aliquot
valor_excento=0
cant_reduced=0
cant_general=0
cant_additional=0
resultado=''
lista_det = self.env['vat.retention.invoice.line'].search([('retention_id','=',self.retention_id.id)])
for det in lista_det:
if det.tax_id.amount==0:
valor_excento=valor_excento+det.amount_untaxed
if det.tax_id.aliquot=='reduced':
cant_reduced=cant_reduced+1
if det.tax_id.aliquot=='general':
cant_general=cant_general+1
if det.tax_id.aliquot=='additional':
cant_additional=cant_additional+1
if tipo=='general' and cant_general>0:
resultado=str(self.float_format(valor_excento))
if tipo=='reduced' and cant_reduced>0 and cant_general==0:
resultado=str(self.float_format(valor_excento))
if tipo=='additional' and cant_additional>0 and cant_reduced==0 and cant_general==0:
resultado=str(self.float_format(valor_excento))
return str(resultado)
#@api.depends('amount_vat_ret', 'retention_rate')
def _compute_amount_withheld(self):
return 0
"""This function compute the VAT retention."""
#amount = (self.amount_vat_ret * self.retention_rate) / 100
#_logger.info('\n\n\n amount %s \n\n\n', amount)
#self.retention_amount = amount
#voucher = self.env['vat.retention'].search([('id', '=', self.retention_id.id)])
#_logger.info("\n\n\n voucher %s\n\n\n",voucher)
#voucher.vat_retentioned = amount
name = fields.Char(string='Description')
retention_id = fields.Many2one('vat.retention', string='Vat retention')
amount_untaxed = fields.Float(string='Amount untaxed')
invoice_number = fields.Char(string='Invoice number')
amount_vat_ret = fields.Float(string='Amount tax')
retention_amount = fields.Float(string='Retention', readonly=True, store=True)
retention_rate = fields.Float(string='Rate', help="The retention rate can vary between 75% al 100% depending on the taxpayer.")
move_id = fields.Many2one('account.move', string='Asiento')
invoice_id = fields.Many2one('account.move', string='Invoice', ondelete='restrict', help="Retention invoice")
tax_line_ids = fields.One2many('vat.retention.tax.lines', 'vat_ret_line_id', string='tax lines')
#campo por agregar
# tax_book_id = fields.Many2one('tax.book', string="Tax book")
# campos a ser eliminados
tax_id = fields.Many2one('account.tax', string='Tax')
# sql constrain por agregar
# _sql_constraints = [
# ('one_name', 'unique (invoice_id)', 'message')
# ]
class RetentionVat(models.Model):
"""This is a main model for rentetion vat control."""
_name = 'vat.retention'
_inherit = ['mail.thread', 'mail.activity.mixin']
journal_id=fields.Char(string='journal_id')
move_id = fields.Many2one('account.move', string='Id del movimiento')
"""def unlink(self):
for vat in self:
if vat.state=='posted':
raise UserError(_("El comprobante de retencion IVA ya esta Publicado, No se puede eliminar"))
return super(RetentionVat,self).unlink() """
def formato_fecha2(self):
fecha = str(self.voucher_delivery_date)
fecha_aux=fecha
ano=fecha_aux[0:4]
mes=fecha[5:7]
dia=fecha[8:10]
resultado=dia+"/"+mes+"/"+ano
return resultado
def periodo(self):
fecha = str(self.voucher_delivery_date)
fecha_aux=fecha
ano=fecha_aux[0:4]
mes=fecha[5:7]
dia=fecha[8:10]
resultado=ano+"-"+mes
return resultado
def float_format2(self,valor):
#valor=self.base_tax
if valor:
result = '{:,.2f}'.format(valor)
result = result.replace(',','*')
result = result.replace('.',',')
result = result.replace('*','.')
else:
result = "0,00"
return result
def doc_cedula(self,aux):
#nro_doc=self.partner_id.vat
busca_partner = self.env['res.partner'].search([('id','=',aux)])
for det in busca_partner:
tipo_doc=busca_partner.doc_type
nro_doc=str(busca_partner.vat)
nro_doc=nro_doc.replace('V','')
nro_doc=nro_doc.replace('v','')
nro_doc=nro_doc.replace('E','')
nro_doc=nro_doc.replace('e','')
nro_doc=nro_doc.replace('G','')
nro_doc=nro_doc.replace('g','')
nro_doc=nro_doc.replace('J','')
nro_doc=nro_doc.replace('j','')
nro_doc=nro_doc.replace('P','')
nro_doc=nro_doc.replace('p','')
nro_doc=nro_doc.replace('-','')
if tipo_doc=="v":
tipo_doc="V"
if tipo_doc=="e":
tipo_doc="E"
if tipo_doc=="g":
tipo_doc="G"
if tipo_doc=="j":
tipo_doc="J"
if tipo_doc=="p":
tipo_doc="P"
resultado=str(tipo_doc)+"-"+str(nro_doc)
return resultado
#raise UserError(_('cedula: %s')%resultado)
#@api.depends('retention_line_ids.retention_amount')
def _amount_all(self):
""" It shows total in this form view"""
return 0
#amount = 0
#retention = 0
#for invoice in self.retention_line_ids:
# amount += invoice.amount_untaxed
# retention += invoice.retention_amount
#self.amount_untaxed = amount
#self.vat_retentioned = retention
@api.model
def _type(self):
"""Return invoice type."""
return self._context.get('type', 'in_refund')
# CORRELATIVO Segun indicaciones del seniat
name = fields.Char(string='Voucher number', default='New')
# datos del proveedor
partner_id = fields.Many2one('res.partner', string='Partner')
rif = fields.Char(string='RIF')
# datos de emision y entrega del comprobante
accouting_date = fields.Date(string='Accounting date', help='Voucher generation date', readonly="True")
voucher_delivery_date = fields.Date(string='Voucher delivery date')
# datos de la factura
invoice_id = fields.Many2one('account.move', string="Invoice")
invoice_number = fields.Char(string='Invoice Number')
invoice_ctrl_num = fields.Char(string='Invoice control number')
company_id = fields.Many2one('res.company', string="Company", default=lambda self: self.env.company)
# retenciones aplicadas
retention_line_ids = fields.One2many('vat.retention.invoice.line', 'retention_id', string='Retention')
# totales
amount_untaxed = fields.Float(string='Importe Base', help='This concept is tax base')
vat_retentioned = fields.Float(string='VAT retentioned')
#datos contables
# journal_id = fields.Many2one('account.journal', string='Journal')
currency_id = fields.Many2one('res.currency', string='Currency')
account_id = fields.Many2one('account.account', string='Account')
manual=fields.Boolean(default=True)
line_ids = fields.One2many('account.move.line', 'retention_id', string='Invoice lines',
copy=True, readonly=True,
states={'draft': [('readonly', False)]})
type = fields.Selection(selection=[
('out_invoice', 'Customer Invoice'),
('in_invoice','Supplier Invoince'),
('in_refund','Suplier Refund'),
('out_refund','Customer Refund'),
('in_receipt','Nota Debito cliente'),
('out_receipt','Nota Debito proveedor'),
], string="Type invoice", store=True, default=_type)
# otra informacion
state = fields.Selection(selection=[
('draft', 'Draft'),
('posted', 'Posted'),
# ('done', 'Done'),
('cancel', 'Cancelled')
], string='Status', readonly=True, copy=False, tracking=True,
default='draft')
is_supplier = fields.Boolean(string='Supplier')
is_customer = fields.Boolean(string='Customer')
description = fields.Char(string="Description", help="Description about this voucher.")
@api.onchange('partner_id')
def _rif(self):
if self.partner_id:
_logger.info("\n\n\n RIF \n\n\n")
self.rif = self.partner_id.vat
else:
self.rif = ''
def action_cancel(self):
if self.invoice_id.state == 'cancel':
self.write({'state': 'cancel'})
else:
raise ValidationError("Debe cancelar primero la factura")
#@api.model
def cargar_fact(self):
if not self.invoice_id.id:
raise UserError(_(' Debe Seleccionar una Factura Interna'))
if self.invoice_id.id:
map_id = self.env['account.move'].search([('id','=',self.invoice_id.id)],order="id asc")
#raise UserError(_(' map_id:%s')%map_id)
#self.rif=map_id.name # ojo aqui esta la clave
if not map_id.partner_id.ret_agent:
raise UserError(_(' La empresa %s no esta configurada como agente de retencion iva')%map_id.partner_id.name)
else:
if map_id.vat_ret_id.id:
raise UserError(_(' Esta Factura ya tiene asignado un comprobante de retencion'))
if not map_id.vat_ret_id:
acum_iva=0
acum_mon_ret=0
retention = self.env['vat.retention']
self.rif=map_id.rif
self.partner_id=map_id.partner_id.id
self.accouting_date=datetime.now()
self.voucher_delivery_date=datetime.now()
self.invoice_number=map_id.invoice_number
self.move_id= self.invoice_id.id,
self.journal_id=self.invoice_id.journal_id.id
self.type=self.invoice_id.type
self.invoice_ctrl_num=self.invoice_id.invoice_ctrl_number
self.manual=False
lista_movline = self.env['account.move.line'].search([('move_id','=',self.invoice_id.id)])
for det_mov_line in lista_movline:
importe_base=det_mov_line.price_subtotal
monto_total=det_mov_line.price_total
monto_iva=(monto_total-importe_base)
acum_iva=acum_iva+monto_iva
monto_retenido=(monto_iva*map_id.partner_id.vat_retention_rate/100)
acum_mon_ret=acum_mon_ret+monto_retenido
ret_lines = self.env['vat.retention.invoice.line']
values = {
'name': self.invoice_id.name,
'invoice_id': self.invoice_id.id,
'move_id': self.invoice_id.id,
'invoice_number': map_id.invoice_number,
'amount_untaxed': importe_base,
'retention_amount':monto_retenido,
'amount_vat_ret':monto_iva,
'retention_rate':map_id.partner_id.vat_retention_rate,
'retention_id':self.id,
'tax_id':det_mov_line.tax_ids.id,
}
if monto_iva!=0:
ret_line = ret_lines.create(values)
self.amount_untaxed=acum_iva
self.vat_retentioned=acum_mon_ret
map_id.write({
'vat_ret_id':self.id,
})
def action_posted(self):
#raise UserError(_('ID MOVE = %s')%self)
if not self.voucher_delivery_date:
raise ValidationError("Debe establecer una fecha de entrega")
self.state = 'posted'
nombre_ret_iva = self.get_name()
id_move=self.registro_movimiento_retencion(nombre_ret_iva)
idv_move=id_move.id
valor=self.registro_movimiento_linea_retencion(idv_move,nombre_ret_iva)
moves= self.env['account.move'].search([('id','=',idv_move)])
moves.filtered(lambda move: move.journal_id.post_at != 'bank_rec').post()
##self.concilio_saldo_pendiente()
def action_draft(self):
#self.state = 'draft'
for item in self:
_logger.info("\n\n\n\n self %s \n\n\n\n", type(self))
_logger.info("\n\n\n self %s \n\n\n", self)
# @api.onchange('partner_id')
def get_address_partner(self):
location = ''
streets = ''
if self.partner_id:
location = self._get_state_and_city()
streets = self._get_streets()
return (streets + " " + location)
def _get_state_and_city(self):
state = ''
city = ''
if self.partner_id.state_id:
state = "Edo." + " " + str(self.partner_id.state_id.name or '')
_logger.info("\n\n\n state %s \n\n\n", state)
if self.partner_id.city:
city = str(self.partner_id.city or '')
# _logger.info("\n\n\n city %s\n\n\n", city)
result = city + " " + state
_logger.info("\n\n\n result %s \n\n\n", result)
return result
def _get_streets(self):
street2 = ''
av = ''
if self.partner_id.street:
av = str(self.partner_id.street or '')
if self.partner_id.street2:
street2 = str(self.partner_id.street2 or '')
result = av + " " + street2
return result
def get_company_address(self):
location = ''
streets = ''
if self.company_id:
streets = self._get_company_street()
location = self._get_company_state_city()
_logger.info("\n\n\n street %s location %s\n\n\n", streets, location)
return (streets + " " + location)
def _get_company_street(self):
street2 = ''
av = ''
if self.company_id.street:
av = str(self.company_id.street or '')
if self.company_id.street2:
street2 = str(self.company_id.street2 or '')
result = av + " " + street2
return result
def _get_company_state_city(self):
state = ''
city = ''
if self.company_id.state_id:
state = "Edo." + " " + str(self.company_id.state_id.name or '')
_logger.info("\n\n\n state %s \n\n\n", state)
if self.company_id.city:
city = str(self.company_id.city or '')
_logger.info("\n\n\n city %s\n\n\n", city)
result = city + " " + state
_logger.info("\n\n\n result %s \n\n\n", result)
return result
#def unlink(self):
"""Throw an exception if the retention voucher is not in cancel state."""
#for voucher in self:
#raise ValidationError(_("No se pueden eliminar comprobantes"))
@api.model
def create(self, vals):
partners=vals['type']
#partners=vals['partners']
#del vals['partners']
if vals.get('name', 'New') == 'New':
_logger.info("\n\n\n vals.get.tpye %s \n\n\n", vals.get('type', 'in_invoice'))
if partners=='in_invoice' or partners=='in_refund' or partners=='in_receipt':
vals['name'] = self.env['ir.sequence'].next_by_code('purchase.vat.retention.voucher.number') or '/'
_logger.info("\n\n\n vals[name] %s \n\n\n",vals['name'])
else:
vals['name']= '00000000'
return super().create(vals)
def conv_div_extranjera(self,valor):
self.invoice_id.currency_id.id
fecha_contable_doc=self.invoice_id.date
monto_factura=self.invoice_id.amount_total
valor_aux=0
#raise UserError(_('moneda compañia: %s')%self.company_id.currency_id.id)
if self.invoice_id.currency_id.id!=self.company_id.currency_id.id:
tasa= self.env['res.currency.rate'].search([('currency_id','=',self.invoice_id.currency_id.id),('name','<=',self.invoice_id.date)],order="name asc")
for det_tasa in tasa:
if fecha_contable_doc>=det_tasa.name:
valor_aux=det_tasa.rate
rate=round(1/valor_aux,2) # LANTA
#rate=round(valor_aux,2) # ODOO SH
resultado=valor/rate
else:
resultado=valor
return resultado
def registro_movimiento_retencion(self,consecutivo_asiento):
#raise UserError(_('darrell = %s')%self.partner_id.vat_retention_rate)
name = consecutivo_asiento
signed_amount_total=0
amont_totall=self.vat_retentioned #self.conv_div_extranjera(self.vat_retentioned)
#amount_itf = round(float(total_monto) * float((igtf_porcentage / 100.00)),2)
if self.type=="in_invoice" or self.type=="in_receipt":
signed_amount_total=amont_totall
if self.type=="out_invoice" or self.type=="out_receipt":
signed_amount_total=(-1*amont_totall)
if self.type=="out_invoice" or self.type=="out_refund" or self.type=="out_receipt":
id_journal=self.partner_id.ret_jrl_id.id
rate_valor=self.partner_id.vat_retention_rate
if self.type=="in_invoice" or self.type=="in_refund" or self.type=="in_receipt":
if self.env.company.confg_ret_proveedores=="c":#loca14
id_journal=self.env.company.partner_id.ret_jrl_id.id#loca14
rate_valor=self.env.company.partner_id.vat_retention_rate#loca14
if self.env.company.confg_ret_proveedores=="p":#loca14
id_journal=self.partner_id.ret_jrl_id.id
rate_valor=self.partner_id.vat_retention_rate
#raise UserError(_('papa = %s')%signed_amount_total)
value = {
'name': name,
'date': self.move_id.date,#listo
#'amount_total':self.vat_retentioned,# LISTO
'partner_id': self.partner_id.id, #LISTO
'journal_id':id_journal,
'ref': "Retención del %s %% IVA de la Factura %s" % (rate_valor,self.move_id.name),
#'amount_total':self.vat_retentioned,# LISTO
#'amount_total_signed':signed_amount_total,# LISTO
'type': "entry",# estte campo es el que te deja cambiar y almacenar valores
'vat_ret_id': self.id,
'company_id':self.env.company.id,#loca14
#'currency_id':self.invoice_id.currency_id.id,
}
#raise UserError(_('value= %s')%value)
move_obj = self.env['account.move']
move_id = move_obj.create(value)
#raise UserError(_('move_id= %s')%move_id)
return move_id
def registro_movimiento_linea_retencion(self,id_movv,consecutivo_asiento):
#raise UserError(_('ID MOVE = %s')%id_movv)
name = consecutivo_asiento
valores = self.vat_retentioned #self.conv_div_extranjera(self.vat_retentioned) #VALIDAR CONDICION
#raise UserError(_('valores = %s')%valores)
cero = 0.0
if self.type=="out_invoice" or self.type=="out_refund" or self.type=="out_receipt":
cuenta_ret_cliente=self.partner_id.account_ret_receivable_id.id# cuenta retencion cliente
cuenta_ret_proveedor=self.partner_id.account_ret_payable_id.id#cuenta retencion proveedores
cuenta_clien_cobrar=self.partner_id.property_account_receivable_id.id
cuenta_prove_pagar = self.partner_id.property_account_payable_id.id
rate_valor=self.partner_id.vat_retention_rate
if self.type=="in_invoice" or self.type=="in_refund" or self.type=="in_receipt":
if self.env.company.confg_ret_proveedores=="c":#loca14
cuenta_ret_cliente=self.env.company.partner_id.account_ret_receivable_id.id# loca14 cuenta retencion cliente
cuenta_ret_proveedor=self.env.company.partner_id.account_ret_payable_id.id# loca14 cuenta retencion proveedores
cuenta_clien_cobrar=self.env.company.partner_id.property_account_receivable_id.id #loca14
cuenta_prove_pagar = self.env.company.partner_id.property_account_payable_id.id #loca14
rate_valor=self.env.company.partner_id.vat_retention_rate #loca14
if self.env.company.confg_ret_proveedores=="p": #loca14
cuenta_ret_cliente=self.partner_id.account_ret_receivable_id.id# cuenta retencion cliente
cuenta_ret_proveedor=self.partner_id.account_ret_payable_id.id#cuenta retencion proveedores
cuenta_clien_cobrar=self.partner_id.property_account_receivable_id.id
cuenta_prove_pagar = self.partner_id.property_account_payable_id.id
rate_valor=self.partner_id.vat_retention_rate
tipo_empresa=self.move_id.type
#raise UserError(_('papa = %s')%tipo_empresa)
if tipo_empresa=="in_invoice" or tipo_empresa=="in_receipt":#aqui si la empresa es un proveedor
cuenta_haber=cuenta_ret_proveedor
cuenta_debe=cuenta_prove_pagar
if tipo_empresa=="in_refund":
cuenta_haber=cuenta_prove_pagar
cuenta_debe=cuenta_ret_proveedor
if tipo_empresa=="out_invoice" or tipo_empresa=="out_receipt":# aqui si la empresa es cliente
cuenta_haber=cuenta_clien_cobrar
cuenta_debe=cuenta_ret_cliente
if tipo_empresa=="out_refund":
cuenta_haber=cuenta_ret_cliente
cuenta_debe=cuenta_clien_cobrar
balances=cero-valores
value = {
'name': name,
'ref' : "Retención del %s %% IVA de la Factura %s" % (rate_valor,self.move_id.name),
'move_id': int(id_movv),
'date': self.move_id.date,
'partner_id': self.partner_id.id,
'account_id': cuenta_haber,
#'currency_id':self.invoice_id.currency_id.id,
#'amount_currency': 0.0,
#'date_maturity': False,
'credit': valores,
'debit': 0.0, # aqi va cero EL DEBITO CUNDO TIENE VALOR, ES QUE EN ACCOUNT_MOVE TOMA UN VALOR
'balance':-valores, # signo negativo
'price_unit':balances,
'price_subtotal':balances,
'price_total':balances,
}
move_line_obj = self.env['account.move.line']
move_line_id1 = move_line_obj.create(value)
balances=valores-cero
value['account_id'] = cuenta_debe
value['credit'] = 0.0 # aqui va cero
value['debit'] = valores
value['balance'] = valores
value['price_unit'] = balances
value['price_subtotal'] = balances
value['price_total'] = balances
move_line_id2 = move_line_obj.create(value)
def concilio_saldo_pendiente(self):
id_retention=self.id
tipo_empresa=self.move_id.type
if tipo_empresa=="in_invoice" or tipo_empresa=="out_refund":#aqui si la empresa es un proveedor
type_internal="payable"
if tipo_empresa=="out_invoice" or tipo_empresa=="in_refund":# aqui si la empresa es cliente
type_internal="receivable"
busca_movimientos = self.env['account.move'].search([('vat_ret_id','=',id_retention)])
for det_movimientos in busca_movimientos:
busca_line_mov = self.env['account.move.line'].search([('move_id','=',det_movimientos.id),('account_internal_type','=',type_internal)])
if busca_line_mov.credit==0:
id_move_debit=busca_line_mov.id
monto_debit=busca_line_mov.debit
if busca_line_mov.debit==0:
id_move_credit=busca_line_mov.id
monto_credit=busca_line_mov.credit
if tipo_empresa=="in_invoice" or tipo_empresa=="out_refund":
monto=monto_debit
if tipo_empresa=="out_invoice" or tipo_empresa=="in_refund":
monto=monto_credit
value = {
'debit_move_id':id_move_debit,
'credit_move_id':id_move_credit,
'amount':monto,
'max_date':self.accouting_date,
}
self.env['account.partial.reconcile'].create(value)
#raise UserError(_('concilia = %s')%busca_movimientos)
def get_name(self):
'''metodo que crea el Nombre del asiento contable si la secuencia no esta creada, crea una con el
nombre: 'l10n_ve_cuenta_retencion_iva'''
self.ensure_one()
SEQUENCE_CODE = 'l10n_ve_cuenta_retencion_iva'
company_id = 1
IrSequence = self.env['ir.sequence'].with_context(force_company=1)
name = IrSequence.next_by_code(SEQUENCE_CODE)
# si aún no existe una secuencia para esta empresa, cree una
if not name:
IrSequence.sudo().create({
'prefix': 'RET_IVA/',
'name': 'Localización Venezolana Retenciones IVA %s' % 1,
'code': SEQUENCE_CODE,
'implementation': 'no_gap',
'padding': 8,
'number_increment': 1,
'company_id': self.env.company.id,#loca14
})
name = IrSequence.next_by_code(SEQUENCE_CODE)
return name
| [
"[email protected]"
] | |
7c901dd586db3c2163b73b8f6fe6fb98eb5601eb | 10b3d1ce02eaa4908dc16ca378ddfb1955b2d625 | /MV3D_TF_release/lib/datasets/kitti_mv3d.py | 89c9798b9fcbd959dd7851e2ed8b0745a38e18fc | [
"MIT",
"BSD-3-Clause"
] | permissive | ZiningWang/Sparse_Pooling | 7281aa0d974849eac8c48faa5ba08519b091ef6e | f46882832d0e2fed5ab4a0af15cead44fd3c6faa | refs/heads/master | 2023-05-26T08:47:16.232822 | 2023-05-20T08:39:11 | 2023-05-20T08:39:11 | 141,640,800 | 56 | 21 | null | null | null | null | UTF-8 | Python | false | false | 40,261 | py | # WZN: Note here we unify all LIDAR points to camera frame!!!
__author__ = 'yuxiang' # derived from honda.py by fyang
import datasets
import datasets.kitti_mv3d
import os
import time
import PIL
import datasets.imdb
import numpy as np
from matplotlib import pyplot as plt
import scipy.sparse
from utils.cython_bbox import bbox_overlaps
from utils.boxes_grid import get_boxes_grid
import subprocess
import pickle
from fast_rcnn.config import cfg
import math
from rpn_msr.generate_anchors import generate_anchors_bv
from utils.transform import camera_to_lidar_cnr, lidar_to_corners_single, computeCorners3D, lidar_3d_to_bv, lidar_cnr_to_3d,bv_anchor_to_lidar,lidar_cnr_to_camera_bv,lidar_cnr_to_bv_cnr
class kitti_mv3d(datasets.imdb):
def __init__(self, image_set, kitti_path=None,object_name='cars'):
datasets.imdb.__init__(self, image_set)
self._image_set = image_set
# self._kitti_path = '$Faster-RCNN_TF/data/KITTI'
self._kitti_path = self._get_default_path() if kitti_path is None \
else kitti_path
# self._data_path = '$Faster-RCNN_TF/data/KITTI/object'
self._data_path = os.path.join(self._kitti_path, 'object')
self._set_label_dir()
self.set_object(object_name)
'''
if object_name=='cars':
#for cars
self._classes = ('__background__', 'Car', 'Van', 'Truck', 'Tram')#, 'Pedestrian', 'Cyclist')
self._class_to_ind = dict(zip(self.classes, [0,1,1,1,1]))
elif object_name=='peds':
#for peds and cyclists
#self.num_classes = 3 #0 for background, 1 ped, 2 for cyc, 3 for non-interested region
self._classes = ('__background__', 'Pedestrian')
self._class_to_ind = dict(zip(self.classes, [0,1]))
else:
assert False, 'invalid training object'
'''
self._image_ext = '.png'
self._lidar_ext = '.npy'
self._lidar_pc_ext = '.npy'
self._subset = object_name
self._image_index = self._load_image_set_index()
# Default to roidb handler
self._roidb_handler = self.gt_roidb
self.config = {'top_k': 100000}
# statistics for computing recall
# self._num_boxes_all = np.zeros(self.num_classes, dtype=np.int)
# self._num_boxes_covered = np.zeros(self.num_classes, dtype=np.int)
# self._num_boxes_proposal = 0
assert os.path.exists(self._kitti_path), \
'KITTI path does not exist: {}'.format(self._kitti_path)
assert os.path.exists(self._data_path), \
'Path does not exist: {}'.format(self._data_path)
def set_object(self,object_name):
if object_name=='cars':
#for cars
self._classes = ('__background__', 'Car', 'Van', 'Truck', 'Tram')#, 'Pedestrian', 'Cyclist')
self.classes_write = ('Car','Pedestrian')
self._class_to_ind = dict(zip(self.classes, [0,1,1,1,1]))
elif object_name=='peds':
#for peds and cyclists
#self.num_classes = 3 #0 for background, 1 ped, 2 for cyc, -1 for non-interested region, -2 for person_sitting (because thet have bv_boxes)
self._classes = ('__background__', 'Pedestrian','Person_sitting') #,'DontCare'
self.classes_write = ('Car', 'Pedestrian')
self._class_to_ind = dict(zip(self.classes, [0,1,1])) # I think treating them as 1 makes more positives, that's good #,-1
else:
assert False, 'invalid training object'
self._subset = object_name
self._roidb_handler = self.gt_roidb
def image_path_at(self, i):
"""
Return the absolute path to image i in the image sequence.
"""
return self.image_path_from_index(self.image_index[i])
def lidar_pc_path_at(self, i):
if self._image_set == 'test':
prefix = 'testing/lidar_pc' #for voxel
else:
prefix = 'training/lidar_pc' #for voxel
# lidar_bv_path = '$Faster-RCNN_TF/data/KITTI/object/training/lidar_bv/000000.npy'
lidar_path = os.path.join(self._data_path, prefix, self.image_index[i] + self._lidar_pc_ext)
assert os.path.exists(lidar_path), \
'Path does not exist: {}'.format(lidar_path)
return lidar_path
def lidar_path_at(self, i):
"""
Return the absolute path to lidar i in the lidar sequence.
"""
return self.lidar_path_from_index(self.image_index[i])
def calib_at(self, i):
"""
Return the calib sequence.
"""
index = self.image_index[i]
calib_ori = self._load_kitti_calib(index)
calib = np.zeros((4, 12))
calib[0,:] = calib_ori['P2'].reshape(12)
calib[1,:] = calib_ori['P3'].reshape(12)
calib[2,:9] = calib_ori['R0'].reshape(9)
calib[3,:] = calib_ori['Tr_velo2cam'].reshape(12)
return calib
def image_path_from_index(self, index):
"""
Construct an image path from the image's "index" identifier.
"""
# set the prefix
if self._image_set == 'test':
prefix = 'testing/image_2'
else:
prefix = 'training/image_2'
# image_path = '$Faster-RCNN_TF/data/KITTI/object/training/image_2/000000.png'
image_path = os.path.join(self._data_path, prefix, index + self._image_ext)
assert os.path.exists(image_path), \
'Path does not exist: {}'.format(image_path)
return image_path
def lidar_path_from_index(self, index):
"""
Construct an image path from the image's "index" identifier.
"""
# set the prefix
if self._image_set == 'test':
prefix = 'testing/lidar_bv' #for MV3D
else:
prefix = 'training/lidar_bv' #for MV3D
# lidar_bv_path = '$Faster-RCNN_TF/data/KITTI/object/training/lidar_bv/000000.npy'
lidar_bv_path = os.path.join(self._data_path, prefix, index + self._lidar_ext)
assert os.path.exists(lidar_bv_path), \
'Path does not exist: {}'.format(lidar_bv_path)
return lidar_bv_path
def _load_image_set_index(self):
"""
Load the indexes listed in this dataset's image set file.
"""
# image_set_file = '$Faster-RCNN_TF/data/KITTI/ImageSets/train.txt'
image_set_file = os.path.join(self._kitti_path, 'ImageSets',self._image_set + '.txt')
self.list_dir = image_set_file
assert os.path.exists(image_set_file), \
'Path does not exist: {}'.format(image_set_file)
with open(image_set_file) as f:
#WZN: return lines without '\n'
image_index = [x.rstrip('\n') for x in f.readlines()]
print ('image sets length: ', len(image_index))
return image_index
def _get_default_path(self):
"""
Return the default path where KITTI is expected to be installed.
"""
return os.path.join(datasets.ROOT_DIR, 'data', 'KITTI')
def gt_roidb(self):
"""
Return the database of ground-truth regions of interest.
This function loads/saves from/to a cache file to speed up future calls.
WZN: first time read Kitti labels, and save a cache
"""
cache_file = os.path.join(self.cache_path, self.name +'_' +self._subset + '_gt_roidb.pkl')
if os.path.exists(cache_file):
with open(cache_file, 'rb') as fid:
roidb = pickle.load(fid)
print ('{} gt roidb loaded from {}'.format(self.name, cache_file))
return roidb
gt_roidb = [self._load_kitti_annotation(index)
for index in self.image_index]
with open(cache_file, 'wb') as fid:
pickle.dump(gt_roidb, fid, pickle.HIGHEST_PROTOCOL)
print ('wrote gt roidb to {}'.format(cache_file))
return gt_roidb
def _load_kitti_calib(self, index):
"""
load projection matrix
"""
if self._image_set == 'test':
prefix = 'testing/calib'
else:
prefix = 'training/calib'
calib_dir = os.path.join(self._data_path, prefix, index + '.txt')
# P0 = np.zeros(12, dtype=np.float32)
# P1 = np.zeros(12, dtype=np.float32)
# P2 = np.zeros(12, dtype=np.float32)
# P3 = np.zeros(12, dtype=np.float32)
# R0 = np.zeros(9, dtype=np.float32)
# Tr_velo_to_cam = np.zeros(12, dtype=np.float32)
# Tr_imu_to_velo = np.zeros(12, dtype=np.float32)
# j = 0
with open(calib_dir) as fi:
lines = fi.readlines()
# assert(len(lines) == 8)
# obj = lines[0].strip().split(' ')[1:]
# P0 = np.array(obj, dtype=np.float32)
# obj = lines[1].strip().split(' ')[1:]
# P1 = np.array(obj, dtype=np.float32)
obj = lines[2].strip().split(' ')[1:]
P2 = np.array(obj, dtype=np.float32)
obj = lines[3].strip().split(' ')[1:]
P3 = np.array(obj, dtype=np.float32)
obj = lines[4].strip().split(' ')[1:]
R0 = np.array(obj, dtype=np.float32)
obj = lines[5].strip().split(' ')[1:]
Tr_velo_to_cam = np.array(obj, dtype=np.float32)
# obj = lines[6].strip().split(' ')[1:]
# P0 = np.array(obj, dtype=np.float32)
return {'P2' : P2.reshape(3,4),
'P3' : P3.reshape(3,4),
'R0' : R0.reshape(3,3),
'Tr_velo2cam' : Tr_velo_to_cam.reshape(3, 4)}
def _set_label_dir(self):
self.gt_dir = os.path.join(self._data_path, 'training/label_2')
def _load_kitti_annotation(self, index):
"""
Load image and bounding boxes info from txt file in the KITTI
format.
WZN: The non-interested area (Dontcare) is just ignored (treated same as background)
"""
if self._image_set == 'test':
return {'ry' : np.array([]),
'lwh' : np.array([]),
'boxes' : np.array([]), #xy box in image
#'boxes_bv' : boxes_bv, #xy box in bird view
'boxes_3D_cam' : np.array([]), #[xyz_center, lwh] in 3D, cam frame
#'boxes_3D' : boxes3D_lidar, #[xyz_center, lwh] in 3D, absolute
'boxes3D_cam_corners' : np.array([]), #8 corners of box in 3D, cam frame
#'boxes_corners' : boxes3D_corners, #8 corners of box in 3D
#'boxes_bv_corners' : boxes_bv_corners, #4 corners of box in bird view
'gt_classes': np.array([]), #classes
'gt_overlaps' : np.array([]), #default 1, changed later
'xyz' : np.array([]),
'alphas' :np.array([]),
'diff_level': np.array([]),
'flipped' : False}
else:
# filename = '$Faster-RCNN_TF/data/KITTI/object/training/label_2/000000.txt'
filename = os.path.join(self.gt_dir, index + '.txt')
# print("Loading: ", filename)
# calib
calib = self._load_kitti_calib(index)
Tr = np.dot(calib['R0'],calib['Tr_velo2cam'])
# print 'Loading: {}'.format(filename)
with open(filename, 'r') as f:
lines = f.readlines()
num_objs = len(lines)
translation = np.zeros((num_objs, 3), dtype=np.float32)
rys = np.zeros((num_objs), dtype=np.float32)
lwh = np.zeros((num_objs, 3), dtype=np.float32)
boxes = np.zeros((num_objs, 4), dtype=np.float32)
boxes_bv = np.zeros((num_objs, 4), dtype=np.float32)
boxes3D = np.zeros((num_objs, 6), dtype=np.float32)
boxes3D_lidar = np.zeros((num_objs, 6), dtype=np.float32)
boxes3D_cam_cnr = np.zeros((num_objs, 24), dtype=np.float32)
boxes3D_corners = np.zeros((num_objs, 24), dtype=np.float32)
alphas = np.zeros((num_objs), dtype=np.float32)
gt_classes = np.zeros((num_objs), dtype=np.int32)
overlaps = np.zeros((num_objs, self.num_classes), dtype=np.float32)
# new difficulty level for in training evaluation
diff_level = np.zeros((num_objs), dtype=np.int32)
# print(boxes3D.shape)
# Load object bounding boxes into a data frame.
ix = -1
for line in lines:
obj = line.strip().split(' ')
try:
#WZN.strip() removes white spaces
cls = self._class_to_ind[obj[0].strip()]
# print cls
except:
continue
# ignore objects with undetermined difficult level
level = self._get_obj_level(obj)
if level > 3:
continue
ix += 1
# 0-based coordinates
alpha = float(obj[3])
x1 = float(obj[4])
y1 = float(obj[5])
x2 = float(obj[6])
y2 = float(obj[7])
h = float(obj[8])
w = float(obj[9])
l = float(obj[10])
tx = float(obj[11])
ty = float(obj[12])
tz = float(obj[13])
ry = float(obj[14])
diff_level[ix]=level
if obj[0].strip() == 'Person_sitting':
diff_level[ix]=-1
rys[ix] = ry
lwh[ix, :] = [l, w, h]
alphas[ix] = alpha
translation[ix, :] = [tx, ty, tz]
boxes[ix, :] = [x1, y1, x2, y2]
boxes3D[ix, :] = [tx, ty, tz, l, w, h]
# convert boxes3D cam to 8 corners(cam)
boxes3D_cam_cnr_single = computeCorners3D(boxes3D[ix, :], ry)
boxes3D_cam_cnr[ix, :] = boxes3D_cam_cnr_single.reshape(24)
# convert 8 corners(cam) to 8 corners(lidar)
boxes3D_corners[ix, :] = camera_to_lidar_cnr(boxes3D_cam_cnr_single, Tr)
# convert 8 corners(cam) to lidar boxes3D, note this is not ivertible because we LOSE ry!
boxes3D_lidar[ix, :] = lidar_cnr_to_3d(boxes3D_corners[ix, :], lwh[ix,:])
# convert 8 corners(lidar) to lidar bird view
boxes_bv[ix, :] = lidar_3d_to_bv(boxes3D_lidar[ix, :])
# boxes3D_corners[ix, :] = lidar_to_corners_single(boxes3D_lidar[ix, :])
gt_classes[ix] = cls
overlaps[ix, cls] = 1.0
rys.resize(ix+1)
lwh.resize(ix+1, 3)
translation.resize(ix+1, 3)
alphas.resize(ix+1)
boxes.resize(ix+1, 4)
boxes_bv.resize(ix+1, 4)
boxes3D.resize(ix+1, 6)
boxes3D_lidar.resize(ix+1, 6)
boxes3D_cam_cnr.resize(ix+1, 24)
boxes3D_corners.resize(ix+1, 24)
boxes_bv_corners = lidar_cnr_to_bv_cnr(boxes3D_corners)
gt_classes.resize(ix+1)
# print(self.num_classes)
overlaps.resize(ix+1, self.num_classes)
diff_level.resize(ix+1)
# if index == '000142':
# print(index)
# print(overlaps)
overlaps = scipy.sparse.csr_matrix(overlaps)
# if index == '000142':
# print(overlaps)
#if ix>=0:
# print index
return {'ry' : rys,
'lwh' : lwh,
'boxes' : boxes, #xy box in image
#'boxes_bv' : boxes_bv, #xy box in bird view
'boxes_3D_cam' : boxes3D, #[xyz_center, lwh] in 3D, cam frame
#'boxes_3D' : boxes3D_lidar, #[xyz_center, lwh] in 3D, absolute
'boxes3D_cam_corners' : boxes3D_cam_cnr, #8 corners of box in 3D, cam frame
#'boxes_corners' : boxes3D_corners, #8 corners of box in 3D
#'boxes_bv_corners' : boxes_bv_corners, #4 corners of box in bird view
'gt_classes': gt_classes, #classes
'gt_overlaps' : overlaps, #default 1, changed later
'xyz' : translation,
'alphas' :alphas,
'diff_level': diff_level,
'flipped' : False}
def _get_obj_level(self, obj):
height = float(obj[7]) - float(obj[5]) + 1
trucation = float(obj[1])
occlusion = float(obj[2])
if height >= 40 and trucation <= 0.15 and occlusion <= 0:
return 1
elif height >= 25 and trucation <= 0.3 and occlusion <= 1:
return 2
#WZN: changed from <=2 to <2
elif height >= 25 and trucation <= 0.5 and occlusion < 2:
return 3
else:
return 4
def _write_kitti_results_file(self, all_boxes, all_boxes3D):
# use_salt = self.config['use_salt']
# comp_id = ''
# if use_salt:
# comp_id += '{}'.format(os.getpid())
#WZN: only write 2D detection result.
path = os.path.join(datasets.ROOT_DIR, 'kitti/results', 'kitti_' + self._subset + '_' + self._image_set + '_' \
+ '-' + time.strftime('%m-%d-%H-%M-%S',time.localtime(time.time())), 'data')
if os.path.exists(path):
pass
else:
os.makedirs(path)
for im_ind, index in enumerate(self.image_index):
filename = os.path.join(path, index + '.txt')
with open(filename, 'wt') as f:
for cls_ind, cls in enumerate(self.classes):
if cls=='__background__' or cls=='DontCare' or cls=='Person_sitting':
continue
dets = all_boxes[cls_ind][im_ind]
# dets3D = all_boxes3D[cls_ind][im_ind]
# alphas = all_alphas[cls_ind][im_ind]
if dets == []:
continue
# the KITTI server expects 0-based indices
for k in range(dets.shape[0]):
# TODO
# alpha = dets3D[k, 0] - np.arctan2(dets3D[k, 4], dets3D[k, 6])
alpha = 0
# WZN: .lower() changes letters to lower case.
f.write('{:s} -1 -1 {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} -1 -1 -1 -1 -1 -1 -1 -1\n' \
.format(cls.lower(), alpha, \
dets[k, 0], dets[k, 1], dets[k, 2], dets[k, 3]))
return path
def _write_kitti_results_bv_file(self, all_2d_boxes, all_ry, all_bv_boxes, calibs, all_scores,result_path=None):
# use_salt = self.config['use_salt']
# comp_id = ''
# if use_salt:
# comp_id += '{}'.format(os.getpid())
#WZN: only write 2D detection result.
if result_path is None:
result_path = 'kitti/results'
else:
result_path = 'kitti/results/'+result_path
path = os.path.join(datasets.ROOT_DIR, result_path, 'kitti_' + self._subset + '_' + self._image_set + '_' \
+ '-' + time.strftime('%m-%d-%H-%M-%S',time.localtime(time.time())), 'data')
if os.path.exists(path):
pass
else:
os.makedirs(path)
print_debug=0
for im_ind, index in enumerate(self.image_index):
filename = os.path.join(path, index + '.txt')
with open(filename, 'wt') as f:
for cls_ind, cls_name in enumerate(self.classes_write):
if cls_name == '__background__':
continue
dets2d = all_2d_boxes[cls_ind][im_ind] # should be [x1,y1,x2,y2]
if dets2d is None:
continue
#print im_ind, len(all_2d_boxes[cls_ind])
rys = all_ry[cls_ind][im_ind]
calib = calibs[im_ind]
scores = all_scores[cls_ind][im_ind].reshape([-1])
R0 = calib[2,:9].reshape((3,3))
Tr_velo2cam = calib[3,:].reshape((3,4))
#print R0, Tr_velo2cam
Tr = np.dot(R0,Tr_velo2cam)
detslidar = bv_anchor_to_lidar(all_bv_boxes[cls_ind][im_ind]) # should be [x,y,z,l,w,h] in lidar
dets_bv_cam = np.zeros((detslidar.shape[0],4))
ry_bv = np.zeros(detslidar.shape[0])
for iry, ry in enumerate(rys):
detscorner = lidar_to_corners_single(detslidar[iry,:],ry) # should be corners in lidar
dets_bv_cam[iry,:],ry_bv[iry] = lidar_cnr_to_camera_bv(detscorner, Tr)
# the KITTI server expects 0-based indices
alpha = 0
k=0
#if print_debug==0:
# print cls_name.lower(), alpha, dets2d[k, 0], dets2d[k, 1], dets2d[k, 2], dets2d[k, 3],dets_bv_cam[k,3],dets_bv_cam[k,2],dets_bv_cam[k,0],dets_bv_cam[k,1],ry_bv[k], scores[k]
# print scores.shape,ry_bv.shape
# print_debug=1
for k in range(dets2d.shape[0]):
# WZN: .lower() changes letters to lower case.
f.write('{:s} -1 -1 {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} -1000 {:.2f} {:.2f} {:.2f} -1000 {:.2f} {:.2f} {:.3f}\n' \
.format(cls_name.lower(), alpha, \
dets2d[k, 0], dets2d[k, 1], dets2d[k, 2], dets2d[k, 3],\
dets_bv_cam[k,3],dets_bv_cam[k,2],dets_bv_cam[k,0],dets_bv_cam[k,1],\
ry_bv[k], scores[k]*1000)) # WZN: *1000 is used in MSCNN, does not change result but makes it more readble
return path
def _write_kitti_results_bv_cnr_file(self, all_2d_boxes, all_ry, all_3d_cnrs, calibs, all_scores,result_path=None):
# use_salt = self.config['use_salt']
# comp_id = ''
# if use_salt:
# comp_id += '{}'.format(os.getpid())
#WZN: only write 2D detection result.
if result_path is None:
result_path = 'kitti/results'
else:
result_path = 'kitti/results/'+result_path
path = os.path.join(datasets.ROOT_DIR, result_path, 'kitti_' + self._subset + '_' + self._image_set + '_' \
+ '-' + time.strftime('%m-%d-%H-%M-%S',time.localtime(time.time())), 'data')
if os.path.exists(path):
pass
else:
os.makedirs(path)
print_debug=0
for im_ind, index in enumerate(self.image_index):
filename = os.path.join(path, index + '.txt')
with open(filename, 'wt') as f:
for cls_ind, cls_name in enumerate(self.classes_write):
if cls_name == '__background__':
continue
dets2d = all_2d_boxes[cls_ind][im_ind] # should be [x1,y1,x2,y2]
if dets2d is None:
continue
#print im_ind, len(all_2d_boxes[cls_ind])
rys = all_ry[cls_ind][im_ind]
calib = calibs[im_ind]
scores = all_scores[cls_ind][im_ind].reshape([-1])
R0 = calib[2,:9].reshape((3,3))
Tr_velo2cam = calib[3,:].reshape((3,4))
#print R0, Tr_velo2cam
Tr = np.dot(R0,Tr_velo2cam)
#detslidar = bv_anchor_to_lidar(all_bv_boxes[cls_ind][im_ind]) # should be [x,y,z,l,w,h] in lidar
detscorners = all_3d_cnrs[cls_ind][im_ind]
dets_bv_cam = np.zeros((detscorners.shape[0],4))
ry_bv = np.zeros(detscorners.shape[0])
for iry, ry in enumerate(rys):
#detscorner = lidar_to_corners_single(detslidar[iry,:],ry) # should be corners in lidar
dets_bv_cam[iry,:],ry_bv[iry] = lidar_cnr_to_camera_bv(detscorners[iry,:], Tr)
# the KITTI server expects 0-based indices
alpha = 0
k=0
#if print_debug==0:
# print cls_name.lower(), alpha, dets2d[k, 0], dets2d[k, 1], dets2d[k, 2], dets2d[k, 3],dets_bv_cam[k,3],dets_bv_cam[k,2],dets_bv_cam[k,0],dets_bv_cam[k,1],ry_bv[k], scores[k]
# print scores.shape,ry_bv.shape
# print_debug=1
for k in range(dets2d.shape[0]):
# WZN: .lower() changes letters to lower case.
f.write('{:s} -1 -1 {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} -1000 {:.2f} {:.2f} {:.2f} -1000 {:.2f} {:.2f} {:.3f}\n' \
.format(cls_name.lower(), alpha, \
dets2d[k, 0], dets2d[k, 1], dets2d[k, 2], dets2d[k, 3],\
dets_bv_cam[k,3],dets_bv_cam[k,2],dets_bv_cam[k,0],dets_bv_cam[k,1],\
ry_bv[k], scores[k]*1000)) # WZN: *1000 is used in MSCNN, does not change result but makes it more readble
return path
def _write_kitti_results_voxel_file(self, all_ry, all_3d_bbox, all_scores,result_path=None):
#WZN: only write 2D detection result. difference is here the corners are already in camera frame
if result_path is None:
result_path = 'kitti/results'
else:
result_path = 'kitti/results/'+result_path
path = os.path.join(datasets.ROOT_DIR, result_path, 'kitti_' + self._subset + '_' + self._image_set + '_' \
+ '-' + time.strftime('%m-%d-%H-%M-%S',time.localtime(time.time())), 'data')
if os.path.exists(path):
pass
else:
os.makedirs(path)
print_debug=0
for im_ind, index in enumerate(self.image_index):
filename = os.path.join(path, index + '.txt')
with open(filename, 'wt') as f:
for cls_ind, cls_name in enumerate(self.classes_write):
if cls_name == '__background__':
continue
rys = all_ry[cls_ind][im_ind]
if rys is None:
continue
dets_3d_bbox = all_3d_bbox[cls_ind][im_ind]
scores = all_scores[cls_ind][im_ind].reshape([-1])
dets_bv_cam = dets_3d_bbox[:,[0,2,3,4]]
#dets2d =
ry_bv = rys
# the KITTI server expects 0-based indices
alpha = 0
k=0
#if print_debug==0:
# print cls_name.lower(), alpha, dets2d[k, 0], dets2d[k, 1], dets2d[k, 2], dets2d[k, 3],dets_bv_cam[k,3],dets_bv_cam[k,2],dets_bv_cam[k,0],dets_bv_cam[k,1],ry_bv[k], scores[k]
# print scores.shape,ry_bv.shape
# print_debug=1
for k in range(ry_bv.shape[0]):
# WZN: .lower() changes letters to lower case.
f.write('{:s} -1 -1 {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} -1000 {:.2f} {:.2f} {:.2f} -1000 {:.2f} {:.2f} {:.3f}\n' \
.format(cls_name.lower(), alpha, \
0,0,100,100,\
dets_bv_cam[k,3],dets_bv_cam[k,2],dets_bv_cam[k,0],dets_bv_cam[k,1],\
ry_bv[k], scores[k]*1000)) # WZN: *1000 is used in MSCNN, does not change result but makes it more readble #dets2d[k, 0], dets2d[k, 1], dets2d[k, 2], dets2d[k, 3],\
return path
def _write_corners_results_file(self, all_boxes, all_boxes3D):
# use_salt = self.config['use_salt']
# comp_id = ''
# if use_salt:
# comp_id += '{}'.format(os.getpid())
#WZN: looks like this is still not usable
path = os.path.join(datasets.ROOT_DIR, 'kitti/results_cnr', 'kitti_' + self._subset + '_' + self._image_set + '_' \
+ '-' + time.strftime('%m-%d-%H-%M-%S',time.localtime(time.time())), 'data')
if os.path.exists(path):
pass
else:
os.makedirs(path)
for im_ind, index in enumerate(self.image_index):
filename = os.path.join(path, index + '.npy')
with open(filename, 'wt') as f:
for cls_ind, cls in enumerate(self.classes_write):
if cls == '__background__':
continue
dets = all_boxes[cls_ind][im_ind]
dets3D = all_boxes3D[cls_ind][im_ind]
# alphas = all_alphas[cls_ind][im_ind]
if dets == []:
continue
# the KITTI server expects 0-based indices
for k in range(dets.shape[0]):
obj = np.hstack((dets[k], dets3D[k, 1:]))
# print obj.shape
np.save(filename, obj)
# # TODO
# alpha = dets3D[k, 0] - np.arctan2(dets3D[k, 4], dets3D[k, 6])
# f.write('{:s} -1 -1 {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.2f} {:.3f}\n' \
# .format(cls.lower(), alpha, \
# dets[k, 0], dets[k, 1], dets[k, 2], dets[k, 3], \
# dets3D[k, 2], dets3D[k, 3], dets3D[k, 1], \
# dets3D[k, 4], dets3D[k, 5], dets3D[k, 6], dets3D[k, 0], dets[k, 4]))
print ('Done')
# return path
def _do_eval(self, path, output_dir='output'):
#WZN: do 2D evaluation
cmd = os.path.join(datasets.ROOT_DIR, 'kitti/eval/cpp/evaluate_object {}'.format(os.path.dirname(path)))
print('Running:\n{}'.format(cmd))
status = subprocess.call(cmd, shell=True)
def _do_eval_bv(self, path, output_dir='output'):
#WZN: do 2D evaluation
cmd = os.path.join(datasets.ROOT_DIR, 'kitti/eval/cpp/evaluate_bv {} {} {}'.format(self.gt_dir,os.path.dirname(path),self.list_dir))
print('Running:\n{}'.format(cmd))
status = subprocess.call(cmd, shell=True)
def evaluate_detections(self, all_boxes, all_boxes3D, output_dir):
#WZN: call write result and 2D evaluation, no more fancy things
self._write_kitti_results_file(all_boxes, all_boxes3D)
# path = self._write_kitti_results_file(all_boxes, all_boxes3D)
# if self._image_set != 'test':
# self._do_eval(path)
# multiple threshold to get PR-curve
def _do_validation_bv(self,boxes_bv,gt_blob,scores=None,thres=0.5,ignore=0.05,DEBUG=False):
diff_level = gt_blob['diff_level']
#ignored_height = gt_blob['ignored_height'] #same as cpp for eval (40,25,25 in pixels, but here transformed)
#the processed bv_boxes, first we only do eval here because polygon intersection is not easy
#diff_level is the difficulty of ground truth in KITTI, should be the same as gt_box. {-1,0,1,2}, -1 can be ignored
positive_ind = gt_blob['gt_boxes_bv'][:,4]>0
diff_level = diff_level[positive_ind]
#print diff_level.T
gt_bv = gt_blob['gt_boxes_bv'][positive_ind,0:4]
bbox_bv = boxes_bv[:,0:4]
#filter low scores
assert not(scores is None), 'no score to produce PR-curve'
scores = np.reshape(scores,[-1])
bbox_bv = bbox_bv[scores>ignore,:]
scores = scores[scores>ignore]
#print scores.shape, scores.size , gt_bv.shape
##sort so that we can accumulately calculate
#ind_sort = np.argsort(scores)
if scores.size>0 and gt_bv.shape[0]>0:
overlaps_all = bbox_overlaps(
np.ascontiguousarray(bbox_bv, dtype=np.float),
np.ascontiguousarray(gt_bv, dtype=np.float))
else:
overlaps_all = np.zeros([scores.size,gt_bv.shape[0]])
t_score_range = np.arange(0.04,0.87,0.02)
nt = t_score_range.shape[0]
recalls = np.zeros((nt,3))
precisions = np.zeros((nt,3))
gt_nums = np.zeros((nt,3))
pos_nums = np.zeros((nt,3))
for diff in [1,2,3]:
idiff = diff-1
ind_diff = np.logical_and(diff_level>0,diff_level<=diff)
for it in range(nt):
t_score = t_score_range[it]
ind_score = scores>t_score
scores_above = scores[ind_score]
overlaps = overlaps_all[ind_score,:]
if scores_above.shape[0]==0:
tp = 0
fp = 0
if gt_bv[ind_diff,:].shape[0]>0:
fn = np.sum(ind_diff)
#return 0.0,0.0,gt_bv.shape[0],0
#recall=0.0; precision=0.0; gt_num=gt_bv.shape[0]; pos_num=0
else:
fn = 0
#return 0.0,0.0,0,0
#recall=0.0; precision=0.0; gt_num=0; pos_num=0
elif gt_bv.shape[0]==0:
tp = 0
fn = 0
fp = bbox_bv.shape[0]
else:
# NOTE this is looser than actual eval!!
argmax_overlaps = overlaps.argmax(axis=1)
max_overlaps = overlaps[np.arange(overlaps.shape[0]), argmax_overlaps]
ind1 = max_overlaps<thres #if <0.5, definitely false positive
#ind2 = a #if >2 positive for one gt, it's fp but now ignore that because we have very low NMS thre
fp = np.sum(ind1)
if gt_bv[ind_diff,:].shape[0]==0:
tp = 0
fn = 0
#return 0.0,0.0,0,fp
#recall=0.0; precision=0.0; gt_num=0; pos_num=fp
else:
#argmax_overlaps = overlaps.argmax(axis=1)
gt_argmax_overlaps = overlaps[:,ind_diff].argmax(axis=0)
gt_max_overlaps = overlaps[:,ind_diff][gt_argmax_overlaps,
np.arange(overlaps[:,ind_diff].shape[1])]
if DEBUG:
#print 'prop_max_overlaps:',overlaps[np.arange(overlaps.shape[0]), argmax_overlaps]
print ('gt_max_overlaps:', gt_max_overlaps)
print (gt_max_overlaps>=thres)
print (np.sum(gt_max_overlaps>=thres))
tp = np.sum(gt_max_overlaps>=thres)
fn = np.sum(gt_max_overlaps<thres)
gt_num = tp+fn
pos_num = tp+fp
if gt_num==0:
recall = 1
else:
recall = float(tp)/gt_num
if pos_num==0:
precision = 1
else:
precision = float(tp)/pos_num
recalls[it,idiff] = recall
precisions[it,idiff] = precision
gt_nums[it,idiff] = gt_num
pos_nums[it,idiff] = pos_num
##the unprocessed 3d_corners project to bv
#gt_cnr = gt_blob['gt_boxes_corners']
return recalls,precisions,gt_nums,pos_nums
def _calc_AP(self,recalls,precisions,plot_file=None):
legends = ['Easy','Moderate','Hard']
if len(recalls.shape)==1:
ind_sort = np.argsort(recalls)
recalls = recalls[ind_sort]
precisions = precisions[ind_sort]
delta_recalls = recalls-np.hstack((0,recalls[0:-1]))
AP = np.sum(delta_recalls*precisions)
if not(plot_file is None):
plt.plot(recall,precision)
plt.xlabel('recall')
plt.ylabel('precision')
plt.savefig(plot_file)
plt.close()
else:
AP = np.zeros(recalls.shape[1])
for j in range(recalls.shape[1]):
ind_sort = np.argsort(recalls[:,j])
recalls_j = recalls[ind_sort,j]
precisions_j = precisions[ind_sort,j]
delta_recalls = recalls_j-np.hstack((0,recalls_j[0:-1]))
AP[j] = np.sum(delta_recalls*precisions_j)
if not(plot_file is None):
plt.plot(np.hstack((0,recalls_j,recalls_j[-1])),np.hstack((precisions_j[0],precisions_j,0)),label=legends[j])
#plt.hold(True)
plt.xlabel('recall')
plt.xlim((0.0,1.0))
plt.ylabel('precision')
plt.ylim((0.0,1.0))
plt.legend()
plt.savefig(plot_file)
plt.close()
return AP
''' one threshold
def _do_validation_bv(self,boxes_bv,gt_blob,scores=None,thres=0.5,ignore=0.2,DEBUG=False):
#the processed bv_boxes, first we only do eval here because polygon intersection is not easy
positive_ind = gt_blob['gt_boxes_bv'][:,4]>0
gt_bv = gt_blob['gt_boxes_bv'][positive_ind,0:4]
bbox_bv = boxes_bv[:,0:4]
#filter low scores
if scores != None:
bbox_bv = bbox_bv[scores>ignore,:]
if bbox_bv.shape[0]==0:
tp = 0
fp = 0
if gt_bv.shape[0]>0:
return 0.0,0.0,gt_bv.shape[0],0
else:
return 0.0,0.0,0,0
elif gt_bv.shape[0]==0:
tp = 0
fp = bbox_bv.shape[0]
fn = 0
return 0.0,0.0,0,fp
else:
overlaps = bbox_overlaps(
np.ascontiguousarray(bbox_bv, dtype=np.float),
np.ascontiguousarray(gt_bv, dtype=np.float))
argmax_overlaps = overlaps.argmax(axis=1)
gt_argmax_overlaps = overlaps.argmax(axis=0)
gt_max_overlaps = overlaps[gt_argmax_overlaps,
np.arange(overlaps.shape[1])]
if DEBUG:
print 'prop_max_overlaps:',overlaps[np.arange(overlaps.shape[0]), argmax_overlaps]
print 'gt_max_overlaps:', gt_max_overlaps
print gt_max_overlaps>=thres
print np.sum(gt_max_overlaps>=thres)
tp = np.sum(gt_max_overlaps>=thres)
fn = np.sum(gt_max_overlaps<thres)
fp = bbox_bv.shape[0]-tp
gt_num = tp+fn
pos_num = tp+fp
recall = float(tp)/gt_num
precision = float(tp)/pos_num
#the unprocessed 3d_corners project to bv
gt_cnr = gt_blob['gt_boxes_corners']
return recall,precision,gt_num,pos_num
'''
if __name__ == '__main__':
d = datasets.kitti_mv3d('train')
res = d.roidb
from IPython import embed; embed()
| [
"[email protected]"
] | |
5139804e41100a6589ddaa48f989cb0aab6176c5 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_309/ch28_2020_03_24_22_36_16_011013.py | 793207e861aac9f5ffad8b0007ec0a9447743a06 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 111 | py | exp = 0
soma = 1
while exp < 100:
conta = 1/2**exp
#soma += conta
exp += 1
print (soma)
| [
"[email protected]"
] | |
7a0a3a277b02addb1e326d10fb728c20339483e7 | d9a11615b57624a47e4719222ffd346eedbbabc1 | /tests/test_flow.py | cbb43664378e920dfe878bdfd884a44676142e9b | [] | no_license | mattjegan/pyzerem | 79461659521bf98551d8b54e74861a0609db29e3 | d3fe9fb54454b14747cc1d238961a93b854aee46 | refs/heads/master | 2021-04-28T21:12:13.909647 | 2018-02-19T11:13:54 | 2018-02-19T11:13:54 | 121,944,907 | 0 | 0 | null | 2018-02-18T11:19:44 | 2018-02-18T11:19:44 | null | UTF-8 | Python | false | false | 1,568 | py |
from zerem import Flow, Slot, process
class TestFlow(object):
def test_slots_register(self):
"""
Tests that slot is added to the flows available slots
"""
class MyFlow(Flow):
slot = Slot()
m = MyFlow()
assert getattr(m, '__flow_available') == {
'slot': 0,
}
def test_processes_register(self):
"""
Test that the process is added to the flows processes/watchers
"""
class MyFlow(Flow):
@process
def step1(self):
pass
m = MyFlow()
assert getattr(m, '__flow_watchers') == [
(['self'], m.step1),
]
def test_setattr_triggers_methods(self):
"""
Tests that setting a slot triggers appropriate processes
"""
class MyFlow(Flow):
slot = Slot()
triggered = False
@process
def step1(self, slot):
self.triggered = True
m = MyFlow()
m.slot = 'test_value'
assert m.triggered is True
def test_setattr_does_not_trigger_when_wrong_args(self):
"""
Tests that setting a slot does not trigger processes it shouldn't
"""
class MyFlow(Flow):
slot = Slot()
triggered = False
@process
def step1(self, slot, nonexistant):
self.triggered = True
m = MyFlow()
m.slot = 'test_value'
assert m.triggered is False
| [
"[email protected]"
] | |
a7c8d7bddd7e6040690e8f4603b1523914061ddc | 113f8ae533a75e9f2fdc1728661af0f19c8460a6 | /template_advanced_demos/venv/bin/django-admin | d4b372e021a8bb698113792a0a8c99472edaa67e | [] | no_license | PeterM358/Python-web-2021 | cf08beaa3330495afc53e640f4a2aaf0429049e9 | a3b7e1d1be0cc85675aaff646917d4f5b7f97b00 | refs/heads/master | 2023-07-09T15:09:08.868548 | 2021-07-24T13:49:22 | 2021-07-24T13:49:22 | 382,328,747 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 319 | #!/Users/petermihailov/django-test/template_advanced_demos/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from django.core.management import execute_from_command_line
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(execute_from_command_line())
| [
"estestveno"
] | estestveno |
|
d1d0508de70a0ada37a1c3e68468cb649846a73f | 9a423dfb84041a926970e10afad93f15619a34d8 | /backend/google_helpers/utils.py | cc3592f7ee47ae2ca715dbd7623e04aa1cc1fb21 | [] | no_license | Babalwa01/Tilde | 3c2d6295b3d5e8a0cce1331f657ad835688a4db5 | 8eaffeb2c6b78aec4f0d6b5f573106e0a705ae53 | refs/heads/master | 2023-05-28T23:06:49.205259 | 2021-05-18T08:41:14 | 2021-05-18T08:41:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,651 | py | import logging
import pandas as pd
from functools import lru_cache
import re
from timezone_helpers import timestamp_str_to_tz_aware_datetime
from google_helpers.constants import TIMESTAMP_FORMAT, TIMEZONE_NAME
def timestamp_to_datetime(timestamp):
return timestamp_str_to_tz_aware_datetime(
timestamp=timestamp, zone_name=TIMEZONE_NAME, dt_format=TIMESTAMP_FORMAT
)
def fetch_sheet(sheet: str = None, url: str = None):
print(f"Fetching sheet: {sheet} {url}")
service = authorize()
if sheet:
book = service.open(sheet)
elif url:
book = service.open_by_url(url)
logging.info(f"fetched sheet {sheet}")
sheet = book.sheet1 # choose the first sheet
return pd.DataFrame(sheet.get_all_records())
def authorize():
import json
from oauth2client.client import SignedJwtAssertionCredentials
import gspread
import os
# insert name of json service account key
SCOPE = [
"https://spreadsheets.google.com/feeds",
"https://www.googleapis.com/auth/drive",
]
SECRETS_FILE = os.getenv("GOOGLE_SHEETS_CREDENTIALS_FILE")
if not SECRETS_FILE:
raise Exception(
"Missing environmental variable: GOOGLE_SHEETS_CREDENTIALS_FILE"
)
# Based on docs here - http://gspread.readthedocs.org/en/latest/oauth2.html
# Load in the secret JSON key in working directory (must be a service account)
json_key = json.load(open(SECRETS_FILE))
# Authenticate using the signed key
credentials = SignedJwtAssertionCredentials(
json_key["client_email"], json_key["private_key"], SCOPE
)
ret = gspread.authorize(credentials)
return ret
# def date_from_args(date): # Not tz aware
# if type(date) is datetime.datetime:
# return date.date()
# for dt_format in [
# "%m/%d/%Y %H:%M:%S",
# "%m/%d/%Y %H:%M",
# "%m/%d/%Y",
# "%d/%m/%Y",
# "%d/%m/%Y %H:%M",
# "%d/%m/%Y %H:%M:%S",
# "%Y/%m/%d %H:%M:%S",
# ]:
# try:
# return datetime.datetime.strptime(date, dt_format).date()
# except ValueError:
# pass
# raise Exception(f"date '{date}' not allowed")
# def timestamp_to_date(timestamp): # Not tz aware
# return timestamp_to_datetime(timestamp).date()
def clean_project_url_part(df, source_col, dest_col):
def mapper(row):
found = re.match(".*(projects/.*$)", str(row[source_col]))
if found:
return found.groups()[0]
return ""
df[dest_col] = df.apply(mapper, axis=1)
df = df[df[source_col].str.contains("projects/")]
return df
| [
"[email protected]"
] | |
25f36707799253f370eeb2ff989176d7430e52ac | 0c84cc9a2c06594e01835a617a7d5866f9db68a4 | /importing-example/example_2/use_animals.py | a55375cf73a3a4e3806d644da20f0e1ba7b9f72f | [] | no_license | 01-Jacky/Python-Things | a508ac4161c0f836fb793bd07e8c69ff0f3d6e1d | 5153a27cdf9dc17ec3344c2774674c7f92156cf6 | refs/heads/master | 2021-03-19T16:59:50.000741 | 2018-04-04T23:48:46 | 2018-04-04T23:48:46 | 100,906,338 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 314 | py | # Import classes from your brand new package
from package.Animals import Mammals
from package.Animals import Birds
# Create an object of Mammals class & call a method of it
myMammal = Mammals()
myMammal.printMembers()
# Create an object of Birds class & call a method of it
myBird = Birds()
myBird.printMembers() | [
"[email protected]"
] | |
ce370ccbf928822cc6b71d9384369bbb1ba2af0d | 5bdc4f88b0825593e5fd52477112fb8ff9cb9d6b | /sparse/util/util.py | aef978b3429068c0199c91b899a6bbef8493ba95 | [
"MIT"
] | permissive | ruiatelsevier/sparse-hyper | 5a59bf201dde74f3410d7371bc93e5b99b3c6425 | b38f537b149a9940b46bb90c82e0f8b1552c471e | refs/heads/master | 2020-07-30T23:38:45.476081 | 2019-09-23T12:28:57 | 2019-09-23T12:28:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,968 | py | import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
from matplotlib.patches import Circle, Wedge, Polygon, Ellipse, Rectangle
from matplotlib.collections import PatchCollection
from matplotlib.axes import Axes
import os, errno, random, time, string, sys
import torch
from torch import nn
from torch import FloatTensor, LongTensor
from torch.autograd import Variable
from torch.utils.data import sampler, dataloader
from collections.abc import Iterable
import torchvision
from collections import OrderedDict
import subprocess
import numpy as np
import math
from enum import Enum
tics = []
DEBUG = False
class Bias(Enum):
"""
"""
# No bias is used.`c
NONE = 1
# The bias is returned as a single dense tensor of floats.
DENSE = 2
# The bias is returned in sparse format, in the same way as the weight matrix is.
SPARSE = 3
def kl_loss(zmean, zlsig):
"""
Computes the KL loss term for a VAE.
:param zmean: batch of z means
:param zlsig: batch of z sigma vectors
:return:
"""
b, l = zmean.size()
kl = 0.5 * torch.sum(zlsig.exp() - zlsig + zmean.pow(2) - 1, dim=1)
assert kl.size() == (b,)
return kl
def kl_batch(batch):
"""
Computes the KL loss between the standard normal MVN and a diagonal MVN fitted to the batch
:param batch:
:return:
"""
b, d = batch.size()
mean = batch.mean(dim=0, keepdim=True)
batch = batch - mean
diacov = torch.bmm(batch.view(d, 1, b), batch.view(d, b, 1)).squeeze() / (b - 1)
logvar = torch.log(diacov)
return -0.5 * torch.sum(1 + logvar - mean.pow(2) - logvar.exp())
def vae_sample(zmean, zlsig, eps=None):
b, l = zmean.size()
if eps is None:
eps = torch.randn(b, l, device='cuda' if zmean.is_cuda else 'cpu')
eps = Variable(eps)
return zmean + eps * (zlsig * 0.5).exp()
def tic():
tics.append(time.time())
def toc():
if len(tics)==0:
return None
else:
return time.time()-tics.pop()
def norm(x):
"""
Normalize a tensor to a tensor with unit norm (treating first dim as batch dim)
:param x:
:return:
"""
b = x.size()[0]
n = torch.norm(x.view(b, -1), p=2, dim=1)
while len(n.size()) < len(x.size()):
n = n.unsqueeze(1)
n.expand_as(x)
return x/n
def makedirs(directory):
"""
Ensure that all directories in the given path exist.
:param directory:
"""
try:
os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise
def sample(collection, k, required):
"""
Sample, without replacement, k elements from 'collection', ensuring that 'required' are always contained in the
sample (but never twice).
currently only works if collection and required contain only unique elements
:param k:
:param collection:
:param required:
:return:
"""
if(k + len(required) > len(collection)):
# use rejection sampling
sample = list(collection)
while len(sample) > k:
ri = random.choice(range(len(sample)))
if sample[ri] not in required:
del(sample[ri])
return sample
else:
required = set(required)
sample0 = set(random.sample(collection, k + len(required)))
sample = list(sample0 - required)
while len(sample) > k - len(required):
ri = random.choice(range(len(sample)))
del(sample[ri])
sample.extend(required)
return sample
#
# if __name__ == '__main__':
#
# print('.')
# print(sample(range(6), 5, [0, 1, 2]))
# print('.')
# print(sample(range(100), 6, [0, 1, 2]))
# print(sample(range(100), 6, [0, 1, 2]))
# print(sample(range(100), 6, [0, 1, 2]))
# print('.')
def sparsemult(use_cuda):
return SparseMultGPU.apply if use_cuda else SparseMultCPU.apply
class SparseMultCPU(torch.autograd.Function):
"""
Sparse matrix multiplication with gradients over the value-vector
Does not work with batch dim.
"""
@staticmethod
def forward(ctx, indices, values, size, vector):
# print(type(size), size, list(size), intlist(size))
# print(indices.size(), values.size(), torch.Size(intlist(size)))
matrix = torch.sparse.FloatTensor(indices, values, torch.Size(intlist(size)))
ctx.indices, ctx.matrix, ctx.vector = indices, matrix, vector
return torch.mm(matrix, vector.unsqueeze(1))
@staticmethod
def backward(ctx, grad_output):
grad_output = grad_output.data
# -- this will break recursive autograd, but it's the only way to get grad over sparse matrices
i_ixs = ctx.indices[0,:]
j_ixs = ctx.indices[1,:]
output_select = grad_output.view(-1)[i_ixs]
vector_select = ctx.vector.view(-1)[j_ixs]
grad_values = output_select * vector_select
grad_vector = torch.mm(ctx.matrix.t(), grad_output).t()
return None, Variable(grad_values), None, Variable(grad_vector)
class SparseMultGPU(torch.autograd.Function):
"""
Sparse matrix multiplication with gradients over the value-vector
Does not work with batch dim.
"""
@staticmethod
def forward(ctx, indices, values, size, vector):
# print(type(size), size, list(size), intlist(size))
matrix = torch.cuda.sparse.FloatTensor(indices, values, torch.Size(intlist(size)))
ctx.indices, ctx.matrix, ctx.vector = indices, matrix, vector
return torch.mm(matrix, vector.unsqueeze(1))
@staticmethod
def backward(ctx, grad_output):
grad_output = grad_output.data
# -- this will break recursive autograd, but it's the only way to get grad over sparse matrices
i_ixs = ctx.indices[0,:]
j_ixs = ctx.indices[1,:]
output_select = grad_output.view(-1)[i_ixs]
vector_select = ctx.vector.view(-1)[j_ixs]
grad_values = output_select * vector_select
grad_vector = torch.mm(ctx.matrix.t(), grad_output).t()
return None, Variable(grad_values), None, Variable(grad_vector)
def nvidia_smi():
command = 'nvidia-smi'
return subprocess.check_output(command, shell=True)
def orth_loss(batch_size, x_size, model, use_cuda):
"""
:param batch_size:
:param x_size:
:param model:
:param use_cuda:
:return:
"""
x_size = (batch_size,) + x_size
x1o, x2o = torch.randn(x_size), torch.randn(x_size)
# normalize to unit tensors
x1o, x2o = norm(x1o), norm(x2o)
if use_cuda:
x1o, x2o = x1o.cuda(), x2o.cuda()
x1o, x2o = Variable(x1o), Variable(x2o)
y1 = model(x1o)
y2 = model(x2o)
x1 = x1o.view(batch_size, 1, -1)
x2 = x2o.view(batch_size, 1, -1)
y1 = y1.view(batch_size, 1, -1)
y2 = y2.view(batch_size, 1, -1)
print('x1 v y1', x1[0, :], y1[0, ])
xnorm = torch.bmm(x1, x2.transpose(1, 2))
ynorm = torch.bmm(y1, y2.transpose(1, 2))
loss = torch.sum(torch.pow((xnorm - ynorm), 2)) / batch_size
return loss, x1o, x2o
def bmultinomial(mat, num_samples=1, replacement=False):
"""
Take multinomial samples from a batch of matrices with multinomial parameters on the
rows
:param mat:
:param num_samples:
:param replacement:
:return:
"""
batches, rows, columns = mat.size()
mat = mat.view(1, -1, columns).squeeze(0)
sample = torch.multinomial(mat, num_samples, replacement)
return sample.view(batches, rows, num_samples), sample
def bsoftmax(input):
b, r, c = input.size()
input = input.view(1, -1, c)
input = nn.functional.softmax(input.squeeze(0)).unsqueeze(0)
return input.view(b, r, c)
def contains_nan(input):
if (not isinstance(input, torch.Tensor)) and isinstance(input, Iterable):
for i in input:
if contains_nan(i):
return True
return False
else:
return bool((input != input).sum() > 0)
#
# if __name__ == '__main__':
#
#
# i = torch.LongTensor([[0, 16, 1],
# [2, 0, 2]])
# v = torch.FloatTensor([1, 1, 1])
#
# matrix = torch.sparse.FloatTensor(i, v, torch.Size((16, 16)))
def od(lst):
od = OrderedDict()
for i, elem in enumerate(lst):
od[str(i)] = elem
return od
class Lambda(nn.Module):
def __init__(self, lambd):
super(Lambda, self).__init__()
self.lambd = lambd
def forward(self, x):
return self.lambd(x)
class Debug(nn.Module):
def __init__(self, lambd):
super(Debug, self).__init__()
self.lambd = lambd
def forward(self, x):
self.lambd(x)
return x
class Flatten(nn.Module):
def forward(self, input):
return input.view(input.size(0), -1)
def flatten(input):
return input.view(input.size(0), -1)
class NoActivation(nn.Module):
def forward(self, input):
return input
def prod(tuple):
result = 1
for v in tuple:
result *= v
return result
def add_noise(input, std=0.1):
"""
In-place
:param input:
:param std:
:return:
"""
noise = torch.cuda.FloatTensor(input.size()) if input.is_cuda else FloatTensor(input.size())
noise.normal_(std=std)
return input + noise
def corrupt_(input, prop=0.3):
"""
Sets a random proportion of the input to zero
:param input:
:param prop:
:return:
"""
t0 = time.time()
FT = torch.cuda.FloatTensor if input.is_cuda else torch.FloatTensor
mask = FT(input.size())
mask.uniform_()
mask.sub_(prop).ceil_()
input.mul_(mask)
def rstring(n):
return ''.join(random.choices(string.ascii_uppercase + string.digits, k=n))
def count_params(model):
sum = 0
for tensor in model.parameters():
sum += prod(tensor.size())
return sum
def logit(x):
"""
Inverse of the sigmoid function.
:param x:
:return:
"""
if type(x) == float:
return math.log(x / (1 - x))
return torch.log(x/ (1 - x))
def inv(i, mx=28):
"""
Inverse of the sigmoid-based scaling function.
:param i:
:param mx: Max value. Should broadcast
:return:
"""
sc = (i/(mx-1)) * 0.9999 + 0.00005
return logit(sc)
def sigmoid(x):
return 1 / (1 + math.exp(-x))
class ChunkSampler(sampler.Sampler):
"""Samples elements sequentially from some offset, using a fixed permutation
initial source: https://github.com/pytorch/vision/issues/168
Arguments:
num_samples: # of desired datapoints
start: offset where we should start selecting from
"""
def __init__(self, start, num, total, seed = 0):
self.start = start
self.num = num
self.random = random.Random(seed)
self.l = list(range(total))
self.random.shuffle(self.l)
def __iter__(self):
return iter(self.l[self.start : self.start + self.num])
def __len__(self):
return self.num
def bmult(width, height, num_indices, batchsize, use_cuda):
"""
?
:param width:
:param height:
:param num_indices:
:param batchsize:
:param use_cuda:
:return:
"""
bmult = torch.cuda.LongTensor([height, width]) if use_cuda else LongTensor([height, width])
m = torch.cuda.LongTensor(range(batchsize)) if use_cuda else LongTensor(range(batchsize))
bmult = bmult.unsqueeze(0).unsqueeze(0)
m = m.unsqueeze(1).unsqueeze(1)
bmult = bmult.expand(batchsize, num_indices, 2)
m = m.expand(batchsize, num_indices, 2)
return m * bmult
def intlist(tensor):
"""
A slow and stupid way to turn a tensor into an iterable over ints
:param tensor:
:return:
"""
if type(tensor) is list:
return tensor
tensor = tensor.squeeze()
assert len(tensor.size()) == 1
s = tensor.size()[0]
l = [None] * s
for i in range(s):
l[i] = int(tensor[i])
return l
def totensor(dataset, batch_size=512, shuffle=True, maxclass=None):
"""
Takes a dataset and loads the whole thing into a tensor
:param dataset:
:return:
"""
loader = torch.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=shuffle, num_workers=2)
index = 0
for i, batch in enumerate(loader):
batch, labels = batch[0], batch[1]
if maxclass is not None:
batch = batch[labels <= maxclass]
if i == 0:
size = list(batch.size())
size[0] = len(dataset)
result = torch.zeros(*size)
result[index:index+batch.size(0)] = batch
index += batch.size(0)
return result
class Reshape(nn.Module):
def __init__(self, shape):
super().__init__()
self.shape = shape
def forward(self, input):
return input.view( (input.size(0),) + self.shape)
def normalize(indices, values, size, row=True, cuda=None, epsilon=0.00000001):
"""
Row or column normalizes a sparse matrix, defined by the given indices and values. Expects a batch dimension.
:param indices: (b, k, 2) LongTensor of index tuples
:param values: length-k vector of values
:param size: dimensions of the matrix
:param row: If true, we normalize the rows, otherwise the columns
:return: The normalized values (the indices stay the same)
"""
if cuda is None:
cuda = indices.is_cuda
dv = 'cuda' if cuda else 'cpu'
spm = sparsemult(cuda)
b, k, r = indices.size()
assert r == 2
# unroll the batch dimension
# (think if this as putting all the matrices in the batch along the diagonal of one huge matrix)
ran = torch.arange(b, device=dv).unsqueeze(1).expand(b, 2)
ran = ran * torch.tensor(size, device=dv).unsqueeze(0).expand(b, 2)
offset = ran.unsqueeze(1).expand(b, k, 2).contiguous().view(-1, 2)
indices = indices.view(-1, 2)
indices = indices + offset
values = values.view(-1)
if row:
ones = torch.ones((b*size[1],), device=dv)
else:
ones = torch.ones((b*size[0],), device=dv)
# transpose the matrix
indices = torch.cat([indices[:, 1:2], indices[:, 0:1]], dim=1)
sums = spm(indices.t(), values, torch.tensor(size, device=dv)*b, ones) # row/column sums
# select the sums corresponding to each index
div = torch.index_select(sums, 0, indices[:, 0]).squeeze() + epsilon
return (values/div).view(b, k)
# if __name__ == "__main__":
# tind = torch.tensor([[[0, 0],[0, 1], [4, 4], [4, 3]], [[0, 1],[1, 0],[0, 2], [2, 0]]])
# tv = torch.tensor([[0.5, 0.5, 0.4, 0.6], [0.5, 1, 0.5, 1]])
#
# print(normalize(tind, tv, (5, 5)))
# print(normalize(tind, tv, (5, 5), row=False))
def duplicates(tuples):
"""
Takes a tensor of integer tuples, and for each tuple that occurs multiple times marks all but one of the occurences
as duplicate.
:param tuples: A (batch, k, r)-tensor of containing a batch of k r-dimensional integer tuples
:return: A size (batch, k) byte tensor. When used as a mask, this masks out all duplicates.
"""
dv = 'cuda' if tuples.is_cuda else 'cpu'
b, k, r = tuples.size()
unique = nunique(tuples)
sorted, sort_idx = torch.sort(unique, dim=1)
_, unsort_idx = torch.sort(sort_idx, dim=1)
mask = sorted[:, 1:] == sorted[:, :-1]
mask = torch.cat([torch.zeros(b, 1, dtype=torch.bool, device=dv), mask], dim=1) # changed type unit to bool
return torch.gather(mask, 1, unsort_idx)
def nduplicates(tuples):
"""
Takes a tensor of integer tuples, and for each tuple that occurs multiple times marks all
but one of the occurrences as duplicate.
:param tuples: A (..., k, r)-tensor of containing a batch of k r-dimensional integer tuples
:return: A size (..., k) byte tensor. When used as a mask, this masks out all duplicates.
"""
init, k, r = tuples.size()[:-2], tuples.size()[-2], tuples.size()[-1]
tuples = tuples.view(-1, k, r)
mask = duplicates(tuples)
return mask.view(*init, k)
def scatter_imgs(latents, images, size=None, ax=None, color=None, alpha=1.0):
assert(latents.shape[0] == images.shape[0])
if ax is None:
fig = plt.figure(figsize=(16, 16))
ax = fig.add_subplot(111)
ax.set_xlim(0, 1e-7)
ax.set_ylim(0, 1e-7)
if color is None:
color = np.asarray([0.0, 0.0, 0.0, 1.0])
else:
color = np.asarray(color)
# print(color)
xmn, ymn = np.min(latents, axis=0)
xmx, ymx = np.max(latents, axis=0)
oxmn, oxmx = ax.get_xlim()
oymn, oymx = ax.get_ylim()
ax.set_xlim(min(oxmn, xmn), max(oxmx, xmx))
ax.set_ylim(min(oymn, ymn), max(oymx, ymx))
# print(ax.get_xlim(), ax.get_ylim())
if size is None:
size = (xmx - xmn)/np.sqrt(latents.shape[0])
size *= 0.5
n, h, w, c = images.shape
aspect = h/w
images = images * (1.0 - color[:3])
images = 1.0 - images
for i in range(n):
x, y = latents[i, 0:2]
im = images[i, :]
ax.imshow(im, extent=(x, x + size, y, y + size*aspect), alpha=alpha)
ax.scatter(latents[:, 0], latents[:, 1], linewidth=0, s=2, color=color)
return ax, size
def linmoid(x, inf_in, up):
"""
Squeeze the given input into the range (0, up). All points are translated linearly, except those above and below the
inflection points (on the input range), which are squeezed through a sigmoid function.
:param input:
:param inflections:
:param range:
:return:
"""
ilow = x < inf_in[0]
ihigh = x > inf_in[1]
# linear transform
s = (up - 1)/(inf_in[1] - inf_in[0])
y = x * s + 0.5 - inf_in[0] * s
scale = s * 4
y[ilow] = torch.sigmoid((x[ilow] - inf_in[0])*scale)
y[ihigh] = torch.sigmoid((x[ihigh] - inf_in[1])*scale) - 0.5 + (up - 0.5)
return y
# if __name__ == "__main__":
# x = torch.linspace(-0.5, 1.5, 1000)
# y = linmoid(x, inf_in=(0.25, 0.75), up=3)
#
# plt.scatter(x.numpy(), y.numpy(), s=2)
# plt.ylim([0, 3])
#
# clean()
# plt.savefig('test_linmoid.png')
def split(offset, depth):
dv = 'cuda' if offset.is_cuda else 'cpu'
b, n, s = offset.size()
bn = b*n
offset = offset.view(bn, s)
numbuckets = 2 ** depth # number of buckets in the input
bsize = s // numbuckets # size of the output buckets
lo = torch.arange(numbuckets, device=dv, dtype=torch.long) * bsize # minimum index of each downbucket
lo = lo[None, :, None].expand(bn, numbuckets, bsize).contiguous().view(bn, -1)
hi = torch.arange(numbuckets, device=dv, dtype=torch.long) * bsize + bsize//2 # minimum index of each upbucket
hi = hi[None, :, None].expand(bn, numbuckets, bsize).contiguous().view(bn, -1)
upchoices = offset.long()
downchoices = 1 - upchoices
numupchoices = upchoices.view(bn, numbuckets, bsize).cumsum(dim=2).view(bn, -1)
numdownchoices = downchoices.view(bn, numbuckets, bsize).cumsum(dim=2).view(bn, -1)
result = torch.zeros(bn, s, dtype=torch.long, device=dv)
# print(result.dtype, upchoices.dtype, hi.dtype, numupchoices.dtype)
result = result + upchoices * (hi + numupchoices - 1)
result = result + downchoices * (lo + numdownchoices - 1)
# If offset is not arranged correctly (equal numbers of ups and downs per bucket)
# we get a non-permutation. This is fine, but we must clamp the result to make sure the
# indices are still legal
result = result.clamp(0, s-1)
return result.view(b, n, s)
def sample_offsets(batch, num, size, depth, cuda=False):
dv = 'cuda' if cuda else 'cpu'
numbuckets = 2 ** depth # number of buckets in the input
bsize = size // numbuckets # size of the input buckets
ordered = torch.tensor([0,1], dtype=torch.uint8, device=dv)[None, None, None, :, None].expand(batch, num, numbuckets, 2, bsize // 2)
ordered = ordered.contiguous().view(batch, num, numbuckets, bsize)
# shuffle the buckets
ordered = ordered.view(batch * num * numbuckets, bsize)
ordered = shuffle_rows(ordered)
ordered = ordered.view(batch, num, numbuckets, bsize)
return ordered.contiguous().view(batch, num, -1)
shufflecache = {}
cache_size = 500_000
def shuffle_rows(x):
r, c = x.size()
if c not in shufflecache:
cached = torch.zeros(cache_size, c, dtype=torch.long, device='cpu')
for i in range(cache_size):
cached[i, :] = torch.randperm(c)
shufflecache[c] = cached
cache = shufflecache[c]
rows = random.sample(range(cache_size), k=r)
sample = cache[rows, :]
if x.is_cuda:
sample = sample.cuda()
out = x.gather(dim=1, index=sample)
if x.is_cuda:
out = out.cuda()
return out
# def bunique(tuples):
# """
# Like unique/2, but for batched tuples.
#
# :param tuples: A (b, k, d) tensor of a batch of (k, d) matrices containing d dimensional integer tuples
# :return: A (b, k, d, 1) tensor
# """
#
# b, k, d = tuples.size()
# tuples = tuples.view(b * k, d)
#
# un = unique(tuples)
#
# return un.view(b, k)
def nunique(tuples):
"""
:param tuples: A (..., d) tensor containing d dimensional integer tuples
:return: A (..., 1) tensor containing a unique single integer for every integer tuple
"""
init, d = tuples.size()[:-1], tuples.size()[-1]
tuples = tuples.view(-1, d)
un = unique(tuples)
return un.view(*init)
def unique(tuples):
"""
Takes a (b, s)-matrix and returns a (b, 1)-matrix with a unique integer for each row.
Uses the cantor tuple function: https://en.wikipedia.org/wiki/Pairing_function#Cantor_pairing_function
:param tuples: A matrix of size (b, s)
:return: A matrix of size (b, 1).
"""
b, s = tuples.size()
if s == 1:
return tuples
if s == 2:
k1, k2 = tuples[:, 0], tuples[:, 1]
res = ((k1 + k2) * (k1 + k2 + 1)) / 2 + k2
return res[:, None]
sub = unique(tuples[:, 1:])
res = torch.cat([tuples[:, 0:1], sub], dim=1)
return unique(res)
def xent(out, tgt):
"""
Binary cross-entropy. Manual implementation so we get gradient over both inputs
:param out:
:param tgt:
:return:
"""
assert out.size() == tgt.size()
out = out.clamp(0, 1)
tgt = tgt.clamp(0, 1)
return - tgt * (out + 1e-10).log() - (1.0 - tgt) * (1.0 - out + 1e-10).log()
if __name__ == '__main__':
#
# size = 8
# offset = torch.tensor([1, 1, 0, 1, 1, 0, 0, 0]).byte()
# offset = torch.tensor([[0, 0, 1, 0, 1, 1, 1, 0], [0, 1, 0, 1, 0, 1, 1, 0]]).byte()
offset = torch.tensor([[0, 1, 1, 0]]).byte()
indices = split(offset[:, None, :], 0)
print(indices)
#
# # print(sample_offsets(3, 4, 16, 3))
# #
# # print(unique(torch.tensor( [[1,2,3,4],[4,3,2,1],[1,2,3,4]] )))
# #
# #
# indices = torch.tensor([[[0, 0], [1, 1]], [[0, 1], [1, 0]]])
# values = torch.tensor([[1.0, 1.0], [1.0, 1.0]])
# inputs = torch.tensor([[[1.0, 2.0, 2.0, 2.0, 2.0], [3.0, 4.0, 2.0, 2.0, 2.0]], [[1.0, 2.0, 2.0, 2.0, 2.0], [3.0, 4.0, 4.0, 4.0, 4.0]]])
#
# print(inputs.size())
#
# print(batchmm(indices, values, (2,2), inputs))
def wrapmod(x, mod):
neg = x < 0.0
y = x.fmod(mod)
y[neg] = mod + y[neg]
return y
def interpolation_grid(size=(10, 10)):
"""
Returns an (h, v, 4) grid, where each point produces a weighted combination of the
four corner points. Taking the convex combination of tensors using these factors, will
result in a linear interpolation grid.
Corner points are enumerated in clockwise fashion, starting top left.
:param size: h, v
:return:
"""
h, v = size
g1, g2 = torch.meshgrid((torch.linspace(0, 1, h), torch.linspace(0, 1, v)))
g1, g2 = g1[:, :, None], g2[:, :, None]
p1, p2 = 1.0 - g1, 1.0 - g2
return torch.cat([p1*p2, p1*g2, g1*g2, g1*p2], dim=2)
def unsqueezen(input, n):
"""
Adds n singular dimensions at the start of the
:param input:
:param n:
:return:
"""
for _ in range(n):
input = input.unsqueeze(0)
return input
class CConv2d(nn.Module):
"""
Implementation of the CoordConv layer from https://arxiv.org/abs/1807.03247
"""
def __init__(self, in_channels, out_channels, kernel_size, res=None, stride=1, padding=0, dilation=1, groups=1, bias=True):
super().__init__()
# layer to which we'll defer the actual convolution
self.master = nn.Conv2d(in_channels + 2, out_channels, kernel_size, stride, padding, dilation, groups, bias)
if res is None:
self.coords = None
else:
self.register_buffer(coordinates(res), 'coords')
def forward(self, x):
b, c, h, w = x.size()
cuda = x.is_cuda
# get the coordinate channels
if self.coords is None:
coords = coordinates(x.size()[-2:], cuda)
else:
coords = self.coords
bcoords = coords[None, :, :, :].expand(b, 2, h, w)
x = torch.cat([bcoords, x], dim=1)
return self.master(x)
def coordinates(res, cuda=False):
"""
Compute the coordinate channels for a given resolution.
:param res:
:return:
"""
dv = 'cuda' if cuda else 'cpu'
h, w = res
hvec, wvec = torch.arange(h, device=dv, dtype=torch.float), torch.arange(w, device=dv, dtype=torch.float)
hvec, wvec = hvec / (h - 1), wvec / (w - 1)
hmat, wmat = hvec[None, :, None].expand(1, h, w), wvec[None, None, :].expand(1, h, w)
return torch.cat([hmat, wmat], dim=0).to(torch.float)
def d(tensor=None):
"""
Returns a device string either for the best available device,
or for the device corresponding to the argument
:param tensor:
:return:
"""
if tensor is None:
return 'cuda' if torch.cuda.is_available() else 'cpu'
return 'cuda' if tensor.is_cuda else 'cpu'
def here(subpath=None):
"""
:return: the path in which the package resides (the directory containing the 'sparse' dir)
"""
if subpath is None:
return os.path.abspath(os.path.join(os.path.dirname(__file__), '../..'))
return os.path.abspath(os.path.join(os.path.dirname(__file__), '../..', subpath))
def flip(x):
"""
Takes a batch of matrix indices (continuous or discrete, can be negative) and returns a version
where anything above the diagonal is flipped to be below the diagonal.
The last dimension should be two, any preceding dimensions are taken to be bacth dimensions
:param x: The batch of matrices to flip
:return: The flipped matrices
"""
assert x.size(-1) == 2
bdims = x.size()[:-1]
x = x.view(-1, 2)
toflip = x[:, 0] < x[:, 1]
t = x[:, 0].clone() # store 0 indices temporarily
y = x.clone()
y[toflip, 0] = x[toflip, 1].clone()
y[toflip, 1] = t[toflip]
return y.view(*(bdims + (2,))) | [
"[email protected]"
] | |
043f39369ddb5869a0f589beb403b66748b3f3a0 | ceedf463269728f0257030671917f9fc979c720a | /popula.py | 8a2b370eed09596f677c02302927860324171dcd | [] | no_license | weltonvaz/Zumbis | 4a8bc213b2d7380b0ef4f3672c6a36b45f3f5c0a | da760e9f258c03660a2eae1439190ce36dee716d | refs/heads/master | 2021-01-19T08:33:58.430648 | 2015-04-17T11:59:11 | 2015-04-17T11:59:11 | 32,888,135 | 0 | 4 | null | null | null | null | UTF-8 | Python | false | false | 507 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Problema de crescimento populacional
# Desenvolvido por Evaldo Junior (InFog)
# http://evaldojunior.com.br/blog
popA, popB, anos = 80000, 200000, 0
cresA, cresB = 0.03, 0.015 # Crescimentos de 3% e 1,5% ao ano
while (popA < popB):
anos += 1
popA = popA + (popA * cresA)
popB = popB + (popB * cresB)
print("Após %i anos o país A ultrapassou o país B em número de habitantes." % anos)
print("País A: %.0f" % popA)
print("País B: %.0f" % popB)
| [
"[email protected]"
] | |
65a01fd1f09658838b02901d836cc99d3fe44dd1 | ed37a985a7411fb3b8f29282a81f1d823f8f4afc | /pascal_triangle/implementations/cython/base.py | 5289433918abcc9fb01106fd869644cc623a41fb | [] | no_license | dmugtasimov/pascal_triangle | 5b310451582f6fc2ddc74f316259c6ec9fc4ec4b | 875deac43300a42560f0433a92e5f1e0475bb754 | refs/heads/master | 2021-06-16T10:55:11.338999 | 2017-04-11T17:20:54 | 2017-04-11T17:20:54 | 35,548,062 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 143 | py | from pascal_triangle.implementations.base import PascalTriangleBase
class CyPascalTriangleBase(PascalTriangleBase):
language = 'Cython'
| [
"[email protected]"
] | |
5aabfdaa690e6d5f51e29d29af16c5f7bbebe551 | f9c7969c8649c484f2460fb245a3d5bd6870fa5a | /ch07/exercises/exercise 50.py | 57914cc3e70dcbd399eceb03ac689bf9eefd314c | [] | no_license | Pshypher/tpocup | 78cf97d51259bfea944dc205b9644bb1ae4ab367 | b05b05728713637b1976a8203c2c97dbbfbb6a94 | refs/heads/master | 2022-05-18T13:11:31.417205 | 2020-01-07T13:50:06 | 2020-01-07T13:50:06 | 260,133,112 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 792 | py | # Program written to transfer the elements of a list to another
# list whilst rearranging the order in which the elements appear
# Unless stated otherwise, variables are assumed to be of type int
def transform(list1, list2, r1, r2):
"""Removes items from list1 in the slice r1:r2, appends them onto list2
in reverse order; Returns the resulting list."""
slice_lst = list1[r1:r2] # r1 < r2
slice_lst.reverse() # reverse the order of the slice
list2.extend(slice_lst) # add the elements sliced from list1
# now reversed to list2
return list2
# Test that the function above works as expected
list1 = [1,2,3,4,5,6,7,8,9]
list2 = [100,200]
transform(list1, list2, 4, 7)
print(list2) # displays [100,200,7,6,5]
| [
"[email protected]"
] | |
6a71ee61962bf5aaad4affa272e4d5ea139738fa | 56f5b2ea36a2258b8ca21e2a3af9a5c7a9df3c6e | /CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/DY4JetsToLL_M-50_TuneZ2Star_8TeV-madgraph/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0_1377544840/HTT_24Jul_newTES_manzoni_Up_Jobs/Job_106/run_cfg.py | 9695dd7ce6e979864c86ead25607ebeee3e6d533 | [] | no_license | rmanzoni/HTT | 18e6b583f04c0a6ca10142d9da3dd4c850cddabc | a03b227073b2d4d8a2abe95367c014694588bf98 | refs/heads/master | 2016-09-06T05:55:52.602604 | 2014-02-20T16:35:34 | 2014-02-20T16:35:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,495 | py | import FWCore.ParameterSet.Config as cms
import os,sys
sys.path.append('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/DY4JetsToLL_M-50_TuneZ2Star_8TeV-madgraph/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0_1377544840/HTT_24Jul_newTES_manzoni_Up_Jobs')
from base_cfg import *
process.source = cms.Source("PoolSource",
noEventSort = cms.untracked.bool(True),
inputCommands = cms.untracked.vstring('keep *',
'drop cmgStructuredPFJets_cmgStructuredPFJetSel__PAT'),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck'),
fileNames = cms.untracked.vstring('/store/cmst3/user/cmgtools/CMG/DY4JetsToLL_M-50_TuneZ2Star_8TeV-madgraph/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0/cmgTuple_572.root',
'/store/cmst3/user/cmgtools/CMG/DY4JetsToLL_M-50_TuneZ2Star_8TeV-madgraph/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0/cmgTuple_573.root',
'/store/cmst3/user/cmgtools/CMG/DY4JetsToLL_M-50_TuneZ2Star_8TeV-madgraph/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0/cmgTuple_574.root',
'/store/cmst3/user/cmgtools/CMG/DY4JetsToLL_M-50_TuneZ2Star_8TeV-madgraph/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0/cmgTuple_575.root',
'/store/cmst3/user/cmgtools/CMG/DY4JetsToLL_M-50_TuneZ2Star_8TeV-madgraph/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0/cmgTuple_576.root')
)
| [
"[email protected]"
] | |
1a0e532b26b8e1f4e25a0bdf0c0d61114323d61c | e7b7cc34f77c71e61aa0fa05bcc62f54fc2fc0e1 | /String/test_q014_longest_common_prefix.py | 5c11b0ca85d14ca6dca237e3305afcd9f12663cf | [] | no_license | sevenhe716/LeetCode | 41d2ef18f5cb317858c9b69d00bcccb743cbdf48 | 4a1747b6497305f3821612d9c358a6795b1690da | refs/heads/master | 2020-03-16T16:12:27.461172 | 2019-04-22T13:27:54 | 2019-04-22T13:27:54 | 130,221,784 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 660 | py | import unittest
from String.q014_longest_common_prefix import SolutionF
class TestLongestCommonPrefix(unittest.TestCase):
"""Test q014_longest_common_prefix.py"""
def test_longest_common_prefix(self):
s = SolutionF()
self.assertEqual('fl', s.longestCommonPrefix(["flower", "flow", "flight"]))
self.assertEqual('', s.longestCommonPrefix(["flower", "flow", ""]))
self.assertEqual('f', s.longestCommonPrefix(["flower", "flow", "f"]))
self.assertEqual('', s.longestCommonPrefix(["dog", "racecar", "car"]))
self.assertEqual('', s.longestCommonPrefix([]))
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
6ee5824a7f0a33926ee6524a24a32398e7a7a209 | e82a5480b960abc154025168a27742149ae74de3 | /Leetcode/Dynamic Programming/Medium/1043_partition_array_for_maximum_sum.py | ea7a058f3689100fc3140e54fb2a74d56b88cb62 | [] | no_license | harshsodi/DSA | 8e700f0284f5f3c5559a7e385b82e0a6c96d3363 | 18f82f9b17a287abe3f318118691b62607e61ff9 | refs/heads/master | 2021-07-07T23:42:50.750471 | 2020-09-11T03:16:41 | 2020-09-11T03:16:41 | 186,679,663 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 918 | py | # Runtime: 448 ms, faster than 41.60% of Python online submissions for Partition Array for Maximum Sum.
# Memory Usage: 11.8 MB, less than 100.00% of Python online submissions for Partition Array for Maximum Sum.
class Solution(object):
def maxSumAfterPartitioning(self, A, K):
"""
:type A: List[int]
:type K: int
:rtype: int
"""
n = len(A)
dp = [0 for _ in A]
dp[0] = A[0]
for i in range(1, n):
cmax = A[i]
for j in range(0, K):
c = i - j
if c < 0:
break
if c == 0:
prev = 0
else:
prev = dp[c-1]
cmax = max(cmax, A[c])
dp[i] = max(dp[i], cmax * (j+1) + prev)
return dp[n-1] | [
"[email protected]"
] | |
bcbe4e83dec0fe91a1870110287f8df495d3f9c4 | 737c0920b33fddb3fc7b6ff7287f06faaf9958bb | /models/temp/common_spec_2.py | e47cda732a65a69386b22619f5cf0ec7033e294e | [] | no_license | Willamjie/CCWH-ACB | aa51b412adccf0078bc2f575dd47e22cd2daa689 | e15176c9d74c1b9232d72d79114f0bf6aa0d315e | refs/heads/main | 2023-02-25T14:30:57.389888 | 2021-02-02T14:08:06 | 2021-02-02T14:08:06 | 335,209,023 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,308 | py | # This file contains modules common to various models
from utils.utils import *
from models.DConv import DOConv2d
def autopad(k, p=None): # kernel, padding
# Pad to 'same'
if p is None:
p = k // 2 if isinstance(k, int) else [x // 2 for x in k] # auto-pad
return p
def DWConv(c1, c2, k=1, s=1, act=True):
# Depthwise convolution
return Conv(c1, c2, k, s, g=math.gcd(c1, c2), act=act)
class Conv(nn.Module):
# Standard convolution
def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups
super(Conv, self).__init__()
self.conv = DOConv2d(c1, c2, k, s, autopad(k, p), groups=g, bias=False)
self.bn = nn.BatchNorm2d(c2)
self.act = nn.LeakyReLU(0.1, inplace=True) if act else nn.Identity()
def forward(self, x):
return self.act(self.bn(self.conv(x)))
def fuseforward(self, x):
return self.act(self.conv(x))
class Bottleneck(nn.Module):
# Standard bottleneck
def __init__(self, c1, c2, shortcut=True, g=1, e=0.5): # ch_in, ch_out, shortcut, groups, expansion
super(Bottleneck, self).__init__()
c_ = int(c2 * e) # hidden channels
self.cv1 = Conv(c1, c_, 1, 1)
self.cv2 = Conv(c_, c2, 3, 1, g=g)
self.add = shortcut and c1 == c2
def forward(self, x):
return x + self.cv2(self.cv1(x)) if self.add else self.cv2(self.cv1(x))
class BottleneckCSP(nn.Module):
# CSP Bottleneck https://github.com/WongKinYiu/CrossStagePartialNetworks
def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5): # ch_in, ch_out, number, shortcut, groups, expansion
super(BottleneckCSP, self).__init__()
c_ = int(c2 * e) # hidden channels
self.cv1 = Conv(c1, c_, 1, 1)
self.cv2 = nn.Conv2d(c1, c_, 1, 1, bias=False)
self.cv3 = nn.Conv2d(c_, c_, 1, 1, bias=False)
self.cv4 = Conv(2 * c_, c2, 1, 1)
self.bn = nn.BatchNorm2d(2 * c_) # applied to cat(cv2, cv3)
self.act = nn.LeakyReLU(0.1, inplace=True)
self.m = nn.Sequential(*[Bottleneck(c_, c_, shortcut, g, e=1.0) for _ in range(n)])
def forward(self, x):
y1 = self.cv3(self.m(self.cv1(x)))
y2 = self.cv2(x)
return self.cv4(self.act(self.bn(torch.cat((y1, y2), dim=1))))
class SPP(nn.Module):
# Spatial pyramid pooling layer used in YOLOv3-SPP
def __init__(self, c1, c2, k=(5, 9, 13)):
super(SPP, self).__init__()
c_ = c1 // 2 # hidden channels
self.cv1 = Conv(c1, c_, 1, 1)
self.cv2 = Conv(c_ * (len(k) + 1), c2, 1, 1)
self.m = nn.ModuleList([nn.MaxPool2d(kernel_size=x, stride=1, padding=x // 2) for x in k])
def forward(self, x):
x = self.cv1(x)
return self.cv2(torch.cat([x] + [m(x) for m in self.m], 1))
class Focus(nn.Module):
# Focus wh information into c-space
def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups
super(Focus, self).__init__()
self.conv = Conv(c1 * 4, c2, k, s, p, g, act)
def forward(self, x): # x(b,c,w,h) -> y(b,4c,w/2,h/2)
return self.conv(torch.cat([x[..., ::2, ::2], x[..., 1::2, ::2], x[..., ::2, 1::2], x[..., 1::2, 1::2]], 1))
class Concat(nn.Module):
# Concatenate a list of tensors along dimension
def __init__(self, dimension=1):
super(Concat, self).__init__()
self.d = dimension
def forward(self, x):
return torch.cat(x, self.d)
class Flatten(nn.Module):
# Use after nn.AdaptiveAvgPool2d(1) to remove last 2 dimensions
@staticmethod
def forward(x):
return x.view(x.size(0), -1)
class Classify(nn.Module):
# Classification head, i.e. x(b,c1,20,20) to x(b,c2)
def __init__(self, c1, c2, k=1, s=1, p=None, g=1): # ch_in, ch_out, kernel, stride, padding, groups
super(Classify, self).__init__()
self.aap = nn.AdaptiveAvgPool2d(1) # to x(b,c1,1,1)
self.conv = nn.Conv2d(c1, c2, k, s, autopad(k, p), groups=g, bias=False) # to x(b,c2,1,1)
self.flat = Flatten()
def forward(self, x):
z = torch.cat([self.aap(y) for y in (x if isinstance(x, list) else [x])], 1) # cat if list
return self.flat(self.conv(z)) # flatten to x(b,c2)
| [
"[email protected]"
] | |
2efc75247312dda6a4b3a75b13341709c8291fe0 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p4VQE/R4/benchmark/startPyquil304.py | bbb13e55f149de899519a4a930fcac157e7752b1 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,220 | py | # qubit number=4
# total number=13
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += H(0) # number=1
prog += H(1) # number=2
prog += H(2) # number=3
prog += CNOT(0,2) # number=7
prog += X(2) # number=8
prog += CNOT(0,2) # number=9
prog += CNOT(3,1) # number=10
prog += H(3) # number=4
prog += Y(3) # number=5
prog += X(0) # number=11
prog += X(0) # number=12
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('4q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil304.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
| [
"[email protected]"
] | |
95e1ea6aacd9e0b2bfe38b9c9de93cfd60182a95 | 51108a50ffb48ad154f587c230045bb783f22240 | /bfgame/factories/recipes/base.py | f4d2fbb68a4894ed9e44bdc747ab6a6932072734 | [
"MIT"
] | permissive | ChrisLR/BasicDungeonRL | c90bd0866c457557cccbad24e14689d5d6db7b00 | b293d40bd9a0d3b7aec41b5e1d58441165997ff1 | refs/heads/master | 2021-06-15T13:56:53.888646 | 2019-08-05T16:33:57 | 2019-08-05T16:33:57 | 104,269,987 | 3 | 0 | MIT | 2019-08-05T16:28:23 | 2017-09-20T21:35:19 | Python | UTF-8 | Python | false | false | 161 | py | class Recipe(object):
name = ""
base_object_type = None
depends_on = []
@staticmethod
def build_components(object_type, game):
pass
| [
"[email protected]"
] | |
94912c9ed339cdf676610f0ca9397675dcf1e0ec | f9a8ee37334771f37edda863db08a7dcccc9522f | /AtCoder/Contest/ABC/ZONeエナジー プログラミングコンテスト/abc200E.py | dc144abd33d75de438010a0aa9fffe4eff052492 | [] | no_license | shimmee/competitive-programming | 25b008ee225858b7b208c3f3ca7681e33f6c0190 | 894f0b7d557d6997789af3fcf91fe65a33619080 | refs/heads/master | 2023-06-07T13:07:17.850769 | 2021-07-05T17:20:47 | 2021-07-05T17:20:47 | 331,076,780 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 529 | py | # ZONeエナジー プログラミングコンテスト E
# URL:
# Date:
# ---------- Ideas ----------
#
# ------------------- Solution --------------------
#
# ------------------- Answer --------------------
#code:python
# ------------------ Sample Input -------------------
# ----------------- Length of time ------------------
#
# -------------- Editorial / my impression -------------
#
# ----------------- Category ------------------
#AtCoder
#AC_with_editorial #解説AC
#wanna_review #hard復習 #復習したい
| [
"[email protected]"
] | |
c723f5fdff701d3e8e5da3916b313407906b7a1e | 377e3a552fb807febc18ce036af77edbce93ca19 | /searching algo/exponential_search.py | 776eff5ca5b6589ddfd1b7d6f43e0a9e8c67c45e | [] | no_license | souravs17031999/100dayscodingchallenge | 940eb9b6d6037be4fc0dd5605f9f808614085bd9 | d05966f3e6875a5ec5a8870b9d2627be570d18d9 | refs/heads/master | 2022-10-29T11:05:46.762554 | 2022-09-28T13:04:32 | 2022-09-28T13:04:32 | 215,993,823 | 44 | 12 | null | 2022-08-18T14:58:50 | 2019-10-18T09:55:03 | Python | UTF-8 | Python | false | false | 1,882 | py | # Program to search the element using exponential search algorithm
# IDEA: logic of the algorithm is to use the fact that if we are able to find the bounds of the answer where it may lie
# and then using binary search algorithm because that range is already ordered, and we just need to check our answer (if it actually exists)
# TIME : 0(lg(i)) where i is the index of the element to be existence (if it is in the list), assuming the list is already sorted (in comparison to binary
# search , it is much faster especially in case if the key is near to the first element)
def binary_search(l, start, end, key):
while(start <= end):
middle = (start + end) // 2
if key < l[middle]:
end = middle - 1
elif key > l[middle]:
start = middle + 1
else:
return middle
return -1
# function to implement exponential search
def exponential_search(arr, key):
# base case
if arr[0] == key:
return 0
# starting with 1th index
i = 1
n = len(arr)
# trying to search for first exponent j, for which 2^j is greater than key element
# that is to find if the current element is smaller than key, and since it is sorted, then we need to increase the range by doubling it
# also to avoid going out of bounds, we should ensure the invariant : i < n - 1
while i < n - 1 and arr[i] <= key:
i *= 2
print(i)
# lower bound will be i/2 , since we already have doubled then we have found greater number,
# and higher bound will be whatever last greater number index we have found
return binary_search(arr, i//2, i, key)
# driver function
if __name__ == '__main__':
arr = [2, 3, 4, 10, 20]
key = 10
index = exponential_search(arr, key)
if index == -1:
print("element not found !")
else:
print(f"element found at : {index}")
| [
"[email protected]"
] | |
ff4e0d6e7c0b10941ced2e6a74ccfc027bb1206b | b50f8de2f35858f866b8f7d54da2994e5b59a391 | /src/dataload/sources/entrez/entrez_genomic_pos.py | 839f8410cfc258609ad333cca65c85f34f67dca0 | [
"Apache-2.0"
] | permissive | SuLab/mygene.info | 455127c4e0bcae61eb36d76496dfc4139be0f584 | 506d7b1d2a7e4de55bdebba8671dc8a09fc303b2 | refs/heads/master | 2020-06-03T11:27:34.021692 | 2017-06-12T20:58:45 | 2017-06-12T20:58:45 | 54,933,630 | 20 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,148 | py | '''
Populates MICROBE gene entries with genomic position data
Currently updates the 120 microbial taxids that are NCBI Reference Sequences
run get_ref_microbe_taxids function to get an updated file for TAXIDS_FILE
when it's necessary.
'''
import os.path
from biothings.utils.common import (dump, loadobj, get_timestamp)
from utils.dataload import (tab2list, load_start, load_done)
from dataload import get_data_folder
DATA_FOLDER = get_data_folder('entrez')
print('DATA_FOLDER: ' + DATA_FOLDER)
__metadata__ = {
'__collection__': 'entrez_genomic_pos',
}
TAXIDS_FILE = os.path.join(DATA_FOLDER, "../ref_microbe_taxids.pyobj")
DATAFILE = os.path.join(DATA_FOLDER, 'gene/gene2refseq.gz')
def load_genedoc(self):
"""
Loads gene data from NCBI's refseq2gene.gz file.
Parses it based on genomic position data and refseq status provided by the
list of taxids from get_ref_microbe_taxids() as lookup table
:return:
"""
taxids = loadobj(TAXIDS_FILE)
taxid_set = set(taxids)
load_start(DATAFILE)
def _includefn(ld):
return ld[0] in taxid_set # match taxid from taxid_set
cols_included = [0, 1, 7, 9, 10, 11] # 0-based col idx
gene2genomic_pos_li = tab2list(DATAFILE, cols_included, header=1,
includefn=_includefn)
count = 0
last_id = None
for gene in gene2genomic_pos_li:
count += 1
strand = 1 if gene[5] == '+' else -1
_id = gene[1]
mgi_dict = {
'_id': _id,
'genomic_pos': {
'start': int(gene[3]),
'end': int(gene[4]),
'chr': gene[2],
'strand': strand
}
}
if _id != last_id:
# rows with dup _id will be skipped
yield mgi_dict
last_id = _id
load_done('[%d]' % count)
def get_mapping(self):
mapping = {
"genomic_pos": {
"dynamic": False,
"type": "nested",
"properties": {
"chr": {"type": "string"},
"start": {"type": "long"},
"end": {"type": "long"},
"strand": {
"type": "byte",
"index": "no"
},
},
},
}
return mapping
def get_ref_microbe_taxids():
"""
Downloads the latest bacterial genome assembly summary from the NCBI genome
ftp site and generate a list of taxids of the bacterial reference genomes.
:return:
"""
import urllib.request
import csv
urlbase = 'ftp://ftp.ncbi.nlm.nih.gov'
urlextension = '/genomes/refseq/bacteria/assembly_summary.txt'
assembly = urllib.request.urlopen(urlbase + urlextension)
datareader = csv.reader(assembly.read().decode().splitlines(), delimiter="\t")
taxid = []
for row in datareader:
if len(row) == 1 and row[0].startswith("#"):
continue
if row[4] in ['reference genome','representative genome']:
taxid.append(row[5])
ts = get_timestamp()
dump(taxid, "ref_microbe_taxids_{}.pyobj".format(ts))
return taxid
| [
"[email protected]"
] | |
0ae246e21eb23160ee3be8dc5060109d11903209 | 26f862c5f17fd97beb38be35b4b5937673929c9b | /swagger_client/models/system_object.py | f2f7c5ffd1642cfd9026a3adcb69acada10676a8 | [] | no_license | m-wendt/swagger-client | bf146841fa4e7eb6add01c09822eb01d89defa5e | 2db96983a900dbb1f5d32c5e66d190e5c0d9b3dc | refs/heads/master | 2020-11-25T22:06:23.487954 | 2019-12-18T15:56:21 | 2019-12-18T15:56:21 | 228,865,549 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,441 | py | # coding: utf-8
"""
Save.TV API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v3
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class SystemObject(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
}
attribute_map = {
}
def __init__(self): # noqa: E501
"""SystemObject - a model defined in Swagger""" # noqa: E501
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(SystemObject, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, SystemObject):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
cd1b616721d53514d80440788a48f49edc7432fc | 6189f34eff2831e3e727cd7c5e43bc5b591adffc | /WebMirror/management/rss_parser_funcs/feed_parse_extractMayomtlHomeBlog.py | b24bf15613f06fa3f5fec756e2f050bb98d368d9 | [
"BSD-3-Clause"
] | permissive | fake-name/ReadableWebProxy | 24603660b204a9e7965cfdd4a942ff62d7711e27 | ca2e086818433abc08c014dd06bfd22d4985ea2a | refs/heads/master | 2023-09-04T03:54:50.043051 | 2023-08-26T16:08:46 | 2023-08-26T16:08:46 | 39,611,770 | 207 | 20 | BSD-3-Clause | 2023-09-11T15:48:15 | 2015-07-24T04:30:43 | Python | UTF-8 | Python | false | false | 938 | py | def extractMayomtlHomeBlog(item):
'''
Parser for 'mayomtl.home.blog'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('My Lover Was Stolen, And I Was Kicked Out Of The Hero’s Party, But I Awakened To The EX Skill “Fixed Damage” And Became Invincible. Now, Let’s Begin Some Revenge',
'My Lover Was Stolen, And I Was Kicked Out Of The Hero’s Party, But I Awakened To The EX Skill “Fixed Damage” And Became Invincible. Now, Let’s Begin Some Revenge', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False | [
"[email protected]"
] | |
ebf9e16d0dc1998f35d44ba5017f92cdab150035 | d77af24d09dc00a4b7d3e0bdc14b7d2727c96495 | /RouToolPa/Tools/AsseblyQC/PurgeDups.py | 68fc6d159a6e577e289bfe832a2558a7f6313423 | [] | no_license | mahajrod/RouToolPa | 14ee0f7fce78c53e8639e770caa6ffb0dfd82fce | 9b0cd0f0817a23cd3f37b3a55f83ce2d8abc71d8 | refs/heads/master | 2023-08-19T19:15:49.876175 | 2023-08-12T12:27:39 | 2023-08-12T12:27:39 | 181,844,151 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 5,532 | py | #!/usr/bin/env python
from pathlib import Path
import pandas as pd
import numpy as np
from RouToolPa.Routines import MathRoutines
from RouToolPa.Tools.Abstract import Tool
class PurgeDups(Tool):
def __init__(self, path="", max_threads=4):
Tool.__init__(self, "augustus", path=path, max_threads=max_threads)
def convert_coverage_file_to_bed(self, input_file, output_prefix):
length_dict = {}
coverage_dict = {}
mean_coverage_dict = {}
median_coverage_dict = {}
with self.metaopen(input_file, "r", buffering=100000000) as in_fd, \
self.metaopen(output_prefix + ".bed", "w", buffering=100000000) as out_fd:
scaffold, length = in_fd.readline()[1:].split()
length_dict[scaffold] = int(length)
coverage_dict[scaffold] = {}
for line in in_fd:
if line[0] == ">":
scaffold, length = line[1:].split()
length_dict[scaffold] = int(length)
coverage_dict[scaffold] = {}
continue
#print(line)
value_list = list(map(int, line.strip().split()))
value_list[0] -= 1 # convert to zero-based and half open coordinates
out_fd.write("{0}\t{1}\n".format(scaffold, "\t".join(map(str, value_list))))
#print(value_list)
if value_list[-1] not in coverage_dict[scaffold]:
coverage_dict[scaffold][value_list[-1]] = value_list[1] - value_list[0]
else:
coverage_dict[scaffold][value_list[-1]] += value_list[1] - value_list[0]
for scaffold in coverage_dict:
median_coverage_dict[scaffold] = MathRoutines.median_from_dict(coverage_dict[scaffold])
mean_coverage_dict[scaffold] = MathRoutines.mean_from_dict(coverage_dict[scaffold])
stat_df = pd.DataFrame.from_dict(length_dict, columns=["length", ], orient='index').sort_values(by=["length"], ascending=False)
stat_df.index.name = "scaffold"
stat_df["mean_cov"] = pd.Series(mean_coverage_dict)
stat_df["median_cov"] = pd.Series(median_coverage_dict)
stat_df.to_csv(output_prefix + ".stat", sep="\t", header=False, index=True)
stat_df[["length"]].to_csv(output_prefix + ".len", sep="\t", header=False, index=True)
return stat_df
def add_lengths_to_dups_bed(self, input_file, length_file, output_file):
if isinstance(length_file, (str, Path)):
length_df = pd.read_csv(length_file, sep="\t", header=None, index_col=0, names=["scaffold", "length"])
else:
length_df = length_file
dups_bed_df = pd.read_csv(input_file, sep="\t", header=None, index_col=0, names=["scaffold", "start", "end", "type", "overlapping_scaffold"])
dups_bed_df["overlap_len"] = dups_bed_df["end"] - dups_bed_df["start"]
dups_bed_df["scaffold_len"] = length_df["length"]
dups_bed_df["overlapping_scaffold_len"] = dups_bed_df["overlapping_scaffold"].apply(lambda s: length_df.loc[s, "length"])
dups_bed_df["overlap_faction"] = dups_bed_df["overlap_len"] / dups_bed_df["scaffold_len"]
dups_bed_df["overlap_faction_overlapping_scaffold"] = dups_bed_df["overlap_len"] / dups_bed_df["overlapping_scaffold_len"]
def count_fraction(df):
scaffold_len = df["scaffold_len"].iloc[0]
sorted_df = df[["start", "end"]].sort_values(by=["start", "end"])
fraction_df = [list(sorted_df.iloc[0])]
for row in sorted_df.itertuples(index=False):
if row[0] <= fraction_df[-1][1]:
if row[1] > fraction_df[-1][1]:
fraction_df[-1][1] = row[1]
else:
fraction_df.append(list(row))
fraction_df = pd.DataFrame(fraction_df, columns=["start", "end"])
fraction_df["fraction"] = (fraction_df["end"] - fraction_df["start"]) / scaffold_len
return sum(fraction_df["fraction"])
haplo_fraction_df = dups_bed_df[["start", "end", "scaffold_len"]].groupby(by='scaffold').apply(count_fraction)
dups_bed_df["cumulative_overlap_fraction"] = haplo_fraction_df
with open(output_file, "w") as out_fd:
out_fd.write("#{0}\n".format("\t".join(["scaffold", "start", "end", "type", "overlapping_scaffold",
"overlap_len", "scaffold_len", "overlapping_scaffold_len",
"overlap_faction", "overlap_faction_overlapping_scaffold",
"cumulative_overlap_fraction"])))
dups_bed_df.to_csv(out_fd, sep="\t", header=False, index=True, na_rep=".")
#print(haplo_fraction_df)
return dups_bed_df
"""
def count_contig_fraction_in_haplotype(self, input_file_with_len, output_file):
if isinstance(input_file_with_len, (str, Path)):
haplo_df = pd.read_csv(input_file_with_len, sep="\t", header=None, index_col=0,
names=["scaffold", "start", "end", "type", "overlapping_scaffold",
"overlap_len", "scaffold_len", "overlapping_scaffold_len",
"overlap_faction,%", "overlap_faction_overlapping_scaffold,%"])
else:
haplo_df = input_file_with_len
print(haplo_df)
"""
| [
"[email protected]"
] | |
c387e022354c89f96838c5697b5b229da319c648 | 8afb5afd38548c631f6f9536846039ef6cb297b9 | /MY_REPOS/JAMSTACK-TEMPLATES/_EVENMOAR/USERS/GREGBER/argos/examples/with-django/screenshot/urls.py | 95701a96abd2542e4163a96a56675fda05ead35c | [
"MIT"
] | permissive | bgoonz/UsefulResourceRepo2.0 | d87588ffd668bb498f7787b896cc7b20d83ce0ad | 2cb4b45dd14a230aa0e800042e893f8dfb23beda | refs/heads/master | 2023-03-17T01:22:05.254751 | 2022-08-11T03:18:22 | 2022-08-11T03:18:22 | 382,628,698 | 10 | 12 | MIT | 2022-10-10T14:13:54 | 2021-07-03T13:58:52 | null | UTF-8 | Python | false | false | 108 | py | from django.conf.urls import url
from . import views
urlpatterns = [url(r"^$", views.index, name="index")]
| [
"[email protected]"
] | |
44b27378b4ea4a56ee40c98ad8fa798406f849ae | 63b79c404d83e4980891c488f4d9592558ecda35 | /assets/src/ba_data/python/bastd/game/kingofthehill.py | 1e8b361031e049a49962e8a2f2e17f3990011e82 | [
"MIT"
] | permissive | kakekakeka/ballistica | 56e8879cd5b4b990e5e05da3dfd300d7cbb45446 | 3ffeff8ce401a00128363ff08b406471092adaa9 | refs/heads/master | 2022-11-14T08:11:57.160782 | 2020-07-01T05:43:13 | 2020-07-01T05:49:44 | 276,755,445 | 2 | 0 | MIT | 2020-07-02T22:18:37 | 2020-07-02T22:18:36 | null | UTF-8 | Python | false | false | 11,065 | py | # Copyright (c) 2011-2020 Eric Froemling
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# -----------------------------------------------------------------------------
"""Defines the King of the Hill game."""
# ba_meta require api 6
# (see https://ballistica.net/wiki/meta-tag-system)
from __future__ import annotations
import weakref
from enum import Enum
from typing import TYPE_CHECKING
import ba
from bastd.actor.flag import Flag
from bastd.actor.playerspaz import PlayerSpaz
from bastd.actor.scoreboard import Scoreboard
from bastd.gameutils import SharedObjects
if TYPE_CHECKING:
from weakref import ReferenceType
from typing import Any, Type, List, Dict, Optional, Sequence, Union
class FlagState(Enum):
"""States our single flag can be in."""
NEW = 0
UNCONTESTED = 1
CONTESTED = 2
HELD = 3
class Player(ba.Player['Team']):
"""Our player type for this game."""
def __init__(self) -> None:
self.time_at_flag = 0
class Team(ba.Team[Player]):
"""Our team type for this game."""
def __init__(self, time_remaining: int) -> None:
self.time_remaining = time_remaining
# ba_meta export game
class KingOfTheHillGame(ba.TeamGameActivity[Player, Team]):
"""Game where a team wins by holding a 'hill' for a set amount of time."""
name = 'King of the Hill'
description = 'Secure the flag for a set length of time.'
available_settings = [
ba.IntSetting(
'Hold Time',
min_value=10,
default=30,
increment=10,
),
ba.IntChoiceSetting(
'Time Limit',
choices=[
('None', 0),
('1 Minute', 60),
('2 Minutes', 120),
('5 Minutes', 300),
('10 Minutes', 600),
('20 Minutes', 1200),
],
default=0,
),
ba.FloatChoiceSetting(
'Respawn Times',
choices=[
('Shorter', 0.25),
('Short', 0.5),
('Normal', 1.0),
('Long', 2.0),
('Longer', 4.0),
],
default=1.0,
),
]
scoreconfig = ba.ScoreConfig(label='Time Held')
@classmethod
def supports_session_type(cls, sessiontype: Type[ba.Session]) -> bool:
return issubclass(sessiontype, ba.MultiTeamSession)
@classmethod
def get_supported_maps(cls, sessiontype: Type[ba.Session]) -> List[str]:
return ba.getmaps('king_of_the_hill')
def __init__(self, settings: dict):
super().__init__(settings)
shared = SharedObjects.get()
self._scoreboard = Scoreboard()
self._swipsound = ba.getsound('swip')
self._tick_sound = ba.getsound('tick')
self._countdownsounds = {
10: ba.getsound('announceTen'),
9: ba.getsound('announceNine'),
8: ba.getsound('announceEight'),
7: ba.getsound('announceSeven'),
6: ba.getsound('announceSix'),
5: ba.getsound('announceFive'),
4: ba.getsound('announceFour'),
3: ba.getsound('announceThree'),
2: ba.getsound('announceTwo'),
1: ba.getsound('announceOne')
}
self._flag_pos: Optional[Sequence[float]] = None
self._flag_state: Optional[FlagState] = None
self._flag: Optional[Flag] = None
self._flag_light: Optional[ba.Node] = None
self._scoring_team: Optional[ReferenceType[Team]] = None
self._hold_time = int(settings['Hold Time'])
self._time_limit = float(settings['Time Limit'])
self._flag_region_material = ba.Material()
self._flag_region_material.add_actions(
conditions=('they_have_material', shared.player_material),
actions=(
('modify_part_collision', 'collide', True),
('modify_part_collision', 'physical', False),
('call', 'at_connect',
ba.Call(self._handle_player_flag_region_collide, True)),
('call', 'at_disconnect',
ba.Call(self._handle_player_flag_region_collide, False)),
))
# Base class overrides.
self.default_music = ba.MusicType.SCARY
def get_instance_description(self) -> Union[str, Sequence]:
return 'Secure the flag for ${ARG1} seconds.', self._hold_time
def get_instance_description_short(self) -> Union[str, Sequence]:
return 'secure the flag for ${ARG1} seconds', self._hold_time
def create_team(self, sessionteam: ba.SessionTeam) -> Team:
return Team(time_remaining=self._hold_time)
def on_begin(self) -> None:
super().on_begin()
shared = SharedObjects.get()
self.setup_standard_time_limit(self._time_limit)
self.setup_standard_powerup_drops()
self._flag_pos = self.map.get_flag_position(None)
ba.timer(1.0, self._tick, repeat=True)
self._flag_state = FlagState.NEW
Flag.project_stand(self._flag_pos)
self._flag = Flag(position=self._flag_pos,
touchable=False,
color=(1, 1, 1))
self._flag_light = ba.newnode('light',
attrs={
'position': self._flag_pos,
'intensity': 0.2,
'height_attenuated': False,
'radius': 0.4,
'color': (0.2, 0.2, 0.2)
})
# Flag region.
flagmats = [self._flag_region_material, shared.region_material]
ba.newnode('region',
attrs={
'position': self._flag_pos,
'scale': (1.8, 1.8, 1.8),
'type': 'sphere',
'materials': flagmats
})
self._update_flag_state()
def _tick(self) -> None:
self._update_flag_state()
# Give holding players points.
for player in self.players:
if player.time_at_flag > 0:
self.stats.player_scored(player,
3,
screenmessage=False,
display=False)
if self._scoring_team is None:
scoring_team = None
else:
scoring_team = self._scoring_team()
if scoring_team:
if scoring_team.time_remaining > 0:
ba.playsound(self._tick_sound)
scoring_team.time_remaining = max(0,
scoring_team.time_remaining - 1)
self._update_scoreboard()
if scoring_team.time_remaining > 0:
assert self._flag is not None
self._flag.set_score_text(str(scoring_team.time_remaining))
# Announce numbers we have sounds for.
numsound = self._countdownsounds.get(scoring_team.time_remaining)
if numsound is not None:
ba.playsound(numsound)
# winner
if scoring_team.time_remaining <= 0:
self.end_game()
def end_game(self) -> None:
results = ba.GameResults()
for team in self.teams:
results.set_team_score(team, self._hold_time - team.time_remaining)
self.end(results=results, announce_delay=0)
def _update_flag_state(self) -> None:
holding_teams = set(player.team for player in self.players
if player.time_at_flag)
prev_state = self._flag_state
assert self._flag_light
assert self._flag is not None
assert self._flag.node
if len(holding_teams) > 1:
self._flag_state = FlagState.CONTESTED
self._scoring_team = None
self._flag_light.color = (0.6, 0.6, 0.1)
self._flag.node.color = (1.0, 1.0, 0.4)
elif len(holding_teams) == 1:
holding_team = list(holding_teams)[0]
self._flag_state = FlagState.HELD
self._scoring_team = weakref.ref(holding_team)
self._flag_light.color = ba.normalized_color(holding_team.color)
self._flag.node.color = holding_team.color
else:
self._flag_state = FlagState.UNCONTESTED
self._scoring_team = None
self._flag_light.color = (0.2, 0.2, 0.2)
self._flag.node.color = (1, 1, 1)
if self._flag_state != prev_state:
ba.playsound(self._swipsound)
def _handle_player_flag_region_collide(self, colliding: bool) -> None:
try:
player = ba.getcollision().opposingnode.getdelegate(
PlayerSpaz, True).getplayer(Player, True)
except ba.NotFoundError:
return
# Different parts of us can collide so a single value isn't enough
# also don't count it if we're dead (flying heads shouldn't be able to
# win the game :-)
if colliding and player.is_alive():
player.time_at_flag += 1
else:
player.time_at_flag = max(0, player.time_at_flag - 1)
self._update_flag_state()
def _update_scoreboard(self) -> None:
for team in self.teams:
self._scoreboard.set_team_value(team,
team.time_remaining,
self._hold_time,
countdown=True)
def handlemessage(self, msg: Any) -> Any:
if isinstance(msg, ba.PlayerDiedMessage):
super().handlemessage(msg) # Augment default.
# No longer can count as time_at_flag once dead.
player = msg.getplayer(Player)
player.time_at_flag = 0
self._update_flag_state()
self.respawn_player(player)
| [
"[email protected]"
] | |
a31e0c49cbba5e45a39a6fef1c35198454eac3b3 | 44a7473404d37a3f5c73cbcdf88be55564e580bb | /121_word-ladder-ii/word-ladder-ii.py | 07104867d5e6cc1bf312c512b69bfb50106d1741 | [] | no_license | frankobe/lintcode | dbc10befc8055c55b2ca9716aa3dfa238b58aaa9 | db131f968944b8140f07a8e5765fea55c72da6ba | refs/heads/master | 2021-06-07T18:32:12.338879 | 2016-11-22T09:05:37 | 2016-11-22T09:05:37 | 20,018,761 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,264 | py | # coding:utf-8
'''
@Copyright:LintCode
@Author: frankobe
@Problem: http://www.lintcode.com/problem/word-ladder-ii
@Language: Python
@Datetime: 15-09-10 00:37
'''
from string import ascii_lowercase
from collections import defaultdict
class Solution:
# @param start, a string
# @param end, a string
# @param dict, a set of string
# @return a list of lists of string
def findLadders(self, start, end, dict):
dict.add(end)
level = {start}
size = len(start)
parents = defaultdict(set)
while level and end not in parents:
next_level = defaultdict(set)
for node in level:
for char in ascii_lowercase:
for i in range(size):
n = node[:i]+char+node[i+1:]
if n in dict and n not in parents:
next_level[n].add(node)
level = next_level
parents.update(next_level)
res = [[end]]
while res and res[0][0] != start:
res = [[p]+r for r in res for p in parents[r[0]]]
return res
| [
"[email protected]"
] | |
761f351aabe14cde3bd1b7c3d625b2c8bd592c4b | dba522d0d9f1677672af03c81a0118565158c659 | /Gen/ChannelService.py | 423c7570a96b50561420ca5f4321bcbb471f7c70 | [] | no_license | fossabot/LineAlpha-Full-Ver | c6fefbf0d1d69b744c2913e0e1fd51ade5f931d5 | cabe9ab158d358ddb92195855ff07c7d483c6c20 | refs/heads/master | 2022-12-18T07:44:02.743358 | 2020-09-14T12:31:57 | 2020-09-14T12:31:57 | 295,410,125 | 0 | 0 | null | 2020-09-14T12:31:45 | 2020-09-14T12:31:44 | null | UTF-8 | Python | false | true | 127,570 | py | #
# Autogenerated by Thrift Compiler (0.9.3)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
import logging
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def approveChannelAndIssueChannelToken(self, channelId):
"""
Parameters:
- channelId
"""
pass
def approveChannelAndIssueRequestToken(self, channelId, otpId):
"""
Parameters:
- channelId
- otpId
"""
pass
def fetchNotificationItems(self, localRev):
"""
Parameters:
- localRev
"""
pass
def getApprovedChannels(self, lastSynced, locale):
"""
Parameters:
- lastSynced
- locale
"""
pass
def getChannelInfo(self, channelId, locale):
"""
Parameters:
- channelId
- locale
"""
pass
def getChannelNotificationSetting(self, channelId, locale):
"""
Parameters:
- channelId
- locale
"""
pass
def getChannelNotificationSettings(self, locale):
"""
Parameters:
- locale
"""
pass
def getChannels(self, lastSynced, locale):
"""
Parameters:
- lastSynced
- locale
"""
pass
def getDomains(self, lastSynced):
"""
Parameters:
- lastSynced
"""
pass
def getFriendChannelMatrices(self, channelIds):
"""
Parameters:
- channelIds
"""
pass
def getNotificationBadgeCount(self, localRev):
"""
Parameters:
- localRev
"""
pass
def issueChannelToken(self, channelId):
"""
Parameters:
- channelId
"""
pass
def issueRequestToken(self, channelId, otpId):
"""
Parameters:
- channelId
- otpId
"""
pass
def issueRequestTokenWithAuthScheme(self, channelId, otpId, authScheme, returnUrl):
"""
Parameters:
- channelId
- otpId
- authScheme
- returnUrl
"""
pass
def reserveCoinUse(self, request, locale):
"""
Parameters:
- request
- locale
"""
pass
def revokeChannel(self, channelId):
"""
Parameters:
- channelId
"""
pass
def syncChannelData(self, lastSynced, locale):
"""
Parameters:
- lastSynced
- locale
"""
pass
def updateChannelNotificationSetting(self, setting):
"""
Parameters:
- setting
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def approveChannelAndIssueChannelToken(self, channelId):
"""
Parameters:
- channelId
"""
self.send_approveChannelAndIssueChannelToken(channelId)
return self.recv_approveChannelAndIssueChannelToken()
def send_approveChannelAndIssueChannelToken(self, channelId):
self._oprot.writeMessageBegin('approveChannelAndIssueChannelToken', TMessageType.CALL, self._seqid)
args = approveChannelAndIssueChannelToken_args()
args.channelId = channelId
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_approveChannelAndIssueChannelToken(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = approveChannelAndIssueChannelToken_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "approveChannelAndIssueChannelToken failed: unknown result")
def approveChannelAndIssueRequestToken(self, channelId, otpId):
"""
Parameters:
- channelId
- otpId
"""
self.send_approveChannelAndIssueRequestToken(channelId, otpId)
return self.recv_approveChannelAndIssueRequestToken()
def send_approveChannelAndIssueRequestToken(self, channelId, otpId):
self._oprot.writeMessageBegin('approveChannelAndIssueRequestToken', TMessageType.CALL, self._seqid)
args = approveChannelAndIssueRequestToken_args()
args.channelId = channelId
args.otpId = otpId
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_approveChannelAndIssueRequestToken(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = approveChannelAndIssueRequestToken_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "approveChannelAndIssueRequestToken failed: unknown result")
def fetchNotificationItems(self, localRev):
"""
Parameters:
- localRev
"""
self.send_fetchNotificationItems(localRev)
return self.recv_fetchNotificationItems()
def send_fetchNotificationItems(self, localRev):
self._oprot.writeMessageBegin('fetchNotificationItems', TMessageType.CALL, self._seqid)
args = fetchNotificationItems_args()
args.localRev = localRev
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_fetchNotificationItems(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = fetchNotificationItems_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchNotificationItems failed: unknown result")
def getApprovedChannels(self, lastSynced, locale):
"""
Parameters:
- lastSynced
- locale
"""
self.send_getApprovedChannels(lastSynced, locale)
return self.recv_getApprovedChannels()
def send_getApprovedChannels(self, lastSynced, locale):
self._oprot.writeMessageBegin('getApprovedChannels', TMessageType.CALL, self._seqid)
args = getApprovedChannels_args()
args.lastSynced = lastSynced
args.locale = locale
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getApprovedChannels(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getApprovedChannels_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getApprovedChannels failed: unknown result")
def getChannelInfo(self, channelId, locale):
"""
Parameters:
- channelId
- locale
"""
self.send_getChannelInfo(channelId, locale)
return self.recv_getChannelInfo()
def send_getChannelInfo(self, channelId, locale):
self._oprot.writeMessageBegin('getChannelInfo', TMessageType.CALL, self._seqid)
args = getChannelInfo_args()
args.channelId = channelId
args.locale = locale
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getChannelInfo(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getChannelInfo_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getChannelInfo failed: unknown result")
def getChannelNotificationSetting(self, channelId, locale):
"""
Parameters:
- channelId
- locale
"""
self.send_getChannelNotificationSetting(channelId, locale)
return self.recv_getChannelNotificationSetting()
def send_getChannelNotificationSetting(self, channelId, locale):
self._oprot.writeMessageBegin('getChannelNotificationSetting', TMessageType.CALL, self._seqid)
args = getChannelNotificationSetting_args()
args.channelId = channelId
args.locale = locale
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getChannelNotificationSetting(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getChannelNotificationSetting_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getChannelNotificationSetting failed: unknown result")
def getChannelNotificationSettings(self, locale):
"""
Parameters:
- locale
"""
self.send_getChannelNotificationSettings(locale)
return self.recv_getChannelNotificationSettings()
def send_getChannelNotificationSettings(self, locale):
self._oprot.writeMessageBegin('getChannelNotificationSettings', TMessageType.CALL, self._seqid)
args = getChannelNotificationSettings_args()
args.locale = locale
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getChannelNotificationSettings(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getChannelNotificationSettings_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getChannelNotificationSettings failed: unknown result")
def getChannels(self, lastSynced, locale):
"""
Parameters:
- lastSynced
- locale
"""
self.send_getChannels(lastSynced, locale)
return self.recv_getChannels()
def send_getChannels(self, lastSynced, locale):
self._oprot.writeMessageBegin('getChannels', TMessageType.CALL, self._seqid)
args = getChannels_args()
args.lastSynced = lastSynced
args.locale = locale
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getChannels(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getChannels_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getChannels failed: unknown result")
def getDomains(self, lastSynced):
"""
Parameters:
- lastSynced
"""
self.send_getDomains(lastSynced)
return self.recv_getDomains()
def send_getDomains(self, lastSynced):
self._oprot.writeMessageBegin('getDomains', TMessageType.CALL, self._seqid)
args = getDomains_args()
args.lastSynced = lastSynced
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getDomains(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getDomains_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getDomains failed: unknown result")
def getFriendChannelMatrices(self, channelIds):
"""
Parameters:
- channelIds
"""
self.send_getFriendChannelMatrices(channelIds)
return self.recv_getFriendChannelMatrices()
def send_getFriendChannelMatrices(self, channelIds):
self._oprot.writeMessageBegin('getFriendChannelMatrices', TMessageType.CALL, self._seqid)
args = getFriendChannelMatrices_args()
args.channelIds = channelIds
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getFriendChannelMatrices(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getFriendChannelMatrices_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getFriendChannelMatrices failed: unknown result")
def getNotificationBadgeCount(self, localRev):
"""
Parameters:
- localRev
"""
self.send_getNotificationBadgeCount(localRev)
return self.recv_getNotificationBadgeCount()
def send_getNotificationBadgeCount(self, localRev):
self._oprot.writeMessageBegin('getNotificationBadgeCount', TMessageType.CALL, self._seqid)
args = getNotificationBadgeCount_args()
args.localRev = localRev
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getNotificationBadgeCount(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getNotificationBadgeCount_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "getNotificationBadgeCount failed: unknown result")
def issueChannelToken(self, channelId):
"""
Parameters:
- channelId
"""
self.send_issueChannelToken(channelId)
return self.recv_issueChannelToken()
def send_issueChannelToken(self, channelId):
self._oprot.writeMessageBegin('issueChannelToken', TMessageType.CALL, self._seqid)
args = issueChannelToken_args()
args.channelId = channelId
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_issueChannelToken(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = issueChannelToken_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "issueChannelToken failed: unknown result")
def issueRequestToken(self, channelId, otpId):
"""
Parameters:
- channelId
- otpId
"""
self.send_issueRequestToken(channelId, otpId)
return self.recv_issueRequestToken()
def send_issueRequestToken(self, channelId, otpId):
self._oprot.writeMessageBegin('issueRequestToken', TMessageType.CALL, self._seqid)
args = issueRequestToken_args()
args.channelId = channelId
args.otpId = otpId
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_issueRequestToken(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = issueRequestToken_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "issueRequestToken failed: unknown result")
def issueRequestTokenWithAuthScheme(self, channelId, otpId, authScheme, returnUrl):
"""
Parameters:
- channelId
- otpId
- authScheme
- returnUrl
"""
self.send_issueRequestTokenWithAuthScheme(channelId, otpId, authScheme, returnUrl)
return self.recv_issueRequestTokenWithAuthScheme()
def send_issueRequestTokenWithAuthScheme(self, channelId, otpId, authScheme, returnUrl):
self._oprot.writeMessageBegin('issueRequestTokenWithAuthScheme', TMessageType.CALL, self._seqid)
args = issueRequestTokenWithAuthScheme_args()
args.channelId = channelId
args.otpId = otpId
args.authScheme = authScheme
args.returnUrl = returnUrl
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_issueRequestTokenWithAuthScheme(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = issueRequestTokenWithAuthScheme_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "issueRequestTokenWithAuthScheme failed: unknown result")
def reserveCoinUse(self, request, locale):
"""
Parameters:
- request
- locale
"""
self.send_reserveCoinUse(request, locale)
return self.recv_reserveCoinUse()
def send_reserveCoinUse(self, request, locale):
self._oprot.writeMessageBegin('reserveCoinUse', TMessageType.CALL, self._seqid)
args = reserveCoinUse_args()
args.request = request
args.locale = locale
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_reserveCoinUse(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = reserveCoinUse_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "reserveCoinUse failed: unknown result")
def revokeChannel(self, channelId):
"""
Parameters:
- channelId
"""
self.send_revokeChannel(channelId)
self.recv_revokeChannel()
def send_revokeChannel(self, channelId):
self._oprot.writeMessageBegin('revokeChannel', TMessageType.CALL, self._seqid)
args = revokeChannel_args()
args.channelId = channelId
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_revokeChannel(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = revokeChannel_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
def syncChannelData(self, lastSynced, locale):
"""
Parameters:
- lastSynced
- locale
"""
self.send_syncChannelData(lastSynced, locale)
return self.recv_syncChannelData()
def send_syncChannelData(self, lastSynced, locale):
self._oprot.writeMessageBegin('syncChannelData', TMessageType.CALL, self._seqid)
args = syncChannelData_args()
args.lastSynced = lastSynced
args.locale = locale
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_syncChannelData(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = syncChannelData_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "syncChannelData failed: unknown result")
def updateChannelNotificationSetting(self, setting):
"""
Parameters:
- setting
"""
self.send_updateChannelNotificationSetting(setting)
self.recv_updateChannelNotificationSetting()
def send_updateChannelNotificationSetting(self, setting):
self._oprot.writeMessageBegin('updateChannelNotificationSetting', TMessageType.CALL, self._seqid)
args = updateChannelNotificationSetting_args()
args.setting = setting
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_updateChannelNotificationSetting(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = updateChannelNotificationSetting_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
return
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["approveChannelAndIssueChannelToken"] = Processor.process_approveChannelAndIssueChannelToken
self._processMap["approveChannelAndIssueRequestToken"] = Processor.process_approveChannelAndIssueRequestToken
self._processMap["fetchNotificationItems"] = Processor.process_fetchNotificationItems
self._processMap["getApprovedChannels"] = Processor.process_getApprovedChannels
self._processMap["getChannelInfo"] = Processor.process_getChannelInfo
self._processMap["getChannelNotificationSetting"] = Processor.process_getChannelNotificationSetting
self._processMap["getChannelNotificationSettings"] = Processor.process_getChannelNotificationSettings
self._processMap["getChannels"] = Processor.process_getChannels
self._processMap["getDomains"] = Processor.process_getDomains
self._processMap["getFriendChannelMatrices"] = Processor.process_getFriendChannelMatrices
self._processMap["getNotificationBadgeCount"] = Processor.process_getNotificationBadgeCount
self._processMap["issueChannelToken"] = Processor.process_issueChannelToken
self._processMap["issueRequestToken"] = Processor.process_issueRequestToken
self._processMap["issueRequestTokenWithAuthScheme"] = Processor.process_issueRequestTokenWithAuthScheme
self._processMap["reserveCoinUse"] = Processor.process_reserveCoinUse
self._processMap["revokeChannel"] = Processor.process_revokeChannel
self._processMap["syncChannelData"] = Processor.process_syncChannelData
self._processMap["updateChannelNotificationSetting"] = Processor.process_updateChannelNotificationSetting
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_approveChannelAndIssueChannelToken(self, seqid, iprot, oprot):
args = approveChannelAndIssueChannelToken_args()
args.read(iprot)
iprot.readMessageEnd()
result = approveChannelAndIssueChannelToken_result()
try:
result.success = self._handler.approveChannelAndIssueChannelToken(args.channelId)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("approveChannelAndIssueChannelToken", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_approveChannelAndIssueRequestToken(self, seqid, iprot, oprot):
args = approveChannelAndIssueRequestToken_args()
args.read(iprot)
iprot.readMessageEnd()
result = approveChannelAndIssueRequestToken_result()
try:
result.success = self._handler.approveChannelAndIssueRequestToken(args.channelId, args.otpId)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("approveChannelAndIssueRequestToken", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_fetchNotificationItems(self, seqid, iprot, oprot):
args = fetchNotificationItems_args()
args.read(iprot)
iprot.readMessageEnd()
result = fetchNotificationItems_result()
try:
result.success = self._handler.fetchNotificationItems(args.localRev)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("fetchNotificationItems", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getApprovedChannels(self, seqid, iprot, oprot):
args = getApprovedChannels_args()
args.read(iprot)
iprot.readMessageEnd()
result = getApprovedChannels_result()
try:
result.success = self._handler.getApprovedChannels(args.lastSynced, args.locale)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getApprovedChannels", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getChannelInfo(self, seqid, iprot, oprot):
args = getChannelInfo_args()
args.read(iprot)
iprot.readMessageEnd()
result = getChannelInfo_result()
try:
result.success = self._handler.getChannelInfo(args.channelId, args.locale)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getChannelInfo", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getChannelNotificationSetting(self, seqid, iprot, oprot):
args = getChannelNotificationSetting_args()
args.read(iprot)
iprot.readMessageEnd()
result = getChannelNotificationSetting_result()
try:
result.success = self._handler.getChannelNotificationSetting(args.channelId, args.locale)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getChannelNotificationSetting", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getChannelNotificationSettings(self, seqid, iprot, oprot):
args = getChannelNotificationSettings_args()
args.read(iprot)
iprot.readMessageEnd()
result = getChannelNotificationSettings_result()
try:
result.success = self._handler.getChannelNotificationSettings(args.locale)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getChannelNotificationSettings", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getChannels(self, seqid, iprot, oprot):
args = getChannels_args()
args.read(iprot)
iprot.readMessageEnd()
result = getChannels_result()
try:
result.success = self._handler.getChannels(args.lastSynced, args.locale)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getChannels", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getDomains(self, seqid, iprot, oprot):
args = getDomains_args()
args.read(iprot)
iprot.readMessageEnd()
result = getDomains_result()
try:
result.success = self._handler.getDomains(args.lastSynced)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getDomains", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getFriendChannelMatrices(self, seqid, iprot, oprot):
args = getFriendChannelMatrices_args()
args.read(iprot)
iprot.readMessageEnd()
result = getFriendChannelMatrices_result()
try:
result.success = self._handler.getFriendChannelMatrices(args.channelIds)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getFriendChannelMatrices", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getNotificationBadgeCount(self, seqid, iprot, oprot):
args = getNotificationBadgeCount_args()
args.read(iprot)
iprot.readMessageEnd()
result = getNotificationBadgeCount_result()
try:
result.success = self._handler.getNotificationBadgeCount(args.localRev)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getNotificationBadgeCount", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_issueChannelToken(self, seqid, iprot, oprot):
args = issueChannelToken_args()
args.read(iprot)
iprot.readMessageEnd()
result = issueChannelToken_result()
try:
result.success = self._handler.issueChannelToken(args.channelId)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("issueChannelToken", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_issueRequestToken(self, seqid, iprot, oprot):
args = issueRequestToken_args()
args.read(iprot)
iprot.readMessageEnd()
result = issueRequestToken_result()
try:
result.success = self._handler.issueRequestToken(args.channelId, args.otpId)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("issueRequestToken", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_issueRequestTokenWithAuthScheme(self, seqid, iprot, oprot):
args = issueRequestTokenWithAuthScheme_args()
args.read(iprot)
iprot.readMessageEnd()
result = issueRequestTokenWithAuthScheme_result()
try:
result.success = self._handler.issueRequestTokenWithAuthScheme(args.channelId, args.otpId, args.authScheme, args.returnUrl)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("issueRequestTokenWithAuthScheme", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_reserveCoinUse(self, seqid, iprot, oprot):
args = reserveCoinUse_args()
args.read(iprot)
iprot.readMessageEnd()
result = reserveCoinUse_result()
try:
result.success = self._handler.reserveCoinUse(args.request, args.locale)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("reserveCoinUse", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_revokeChannel(self, seqid, iprot, oprot):
args = revokeChannel_args()
args.read(iprot)
iprot.readMessageEnd()
result = revokeChannel_result()
try:
self._handler.revokeChannel(args.channelId)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("revokeChannel", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_syncChannelData(self, seqid, iprot, oprot):
args = syncChannelData_args()
args.read(iprot)
iprot.readMessageEnd()
result = syncChannelData_result()
try:
result.success = self._handler.syncChannelData(args.lastSynced, args.locale)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("syncChannelData", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_updateChannelNotificationSetting(self, seqid, iprot, oprot):
args = updateChannelNotificationSetting_args()
args.read(iprot)
iprot.readMessageEnd()
result = updateChannelNotificationSetting_result()
try:
self._handler.updateChannelNotificationSetting(args.setting)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except ChannelException as e:
msg_type = TMessageType.REPLY
result.e = e
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("updateChannelNotificationSetting", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class approveChannelAndIssueChannelToken_args:
"""
Attributes:
- channelId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'channelId', None, None, ), # 1
)
def __init__(self, channelId=None,):
self.channelId = channelId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.channelId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('approveChannelAndIssueChannelToken_args')
if self.channelId is not None:
oprot.writeFieldBegin('channelId', TType.STRING, 1)
oprot.writeString(self.channelId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.channelId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class approveChannelAndIssueChannelToken_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ChannelToken, ChannelToken.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ChannelToken()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('approveChannelAndIssueChannelToken_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class approveChannelAndIssueRequestToken_args:
"""
Attributes:
- channelId
- otpId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'channelId', None, None, ), # 1
(2, TType.STRING, 'otpId', None, None, ), # 2
)
def __init__(self, channelId=None, otpId=None,):
self.channelId = channelId
self.otpId = otpId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.channelId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.otpId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('approveChannelAndIssueRequestToken_args')
if self.channelId is not None:
oprot.writeFieldBegin('channelId', TType.STRING, 1)
oprot.writeString(self.channelId)
oprot.writeFieldEnd()
if self.otpId is not None:
oprot.writeFieldBegin('otpId', TType.STRING, 2)
oprot.writeString(self.otpId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.channelId)
value = (value * 31) ^ hash(self.otpId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class approveChannelAndIssueRequestToken_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('approveChannelAndIssueRequestToken_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class fetchNotificationItems_args:
"""
Attributes:
- localRev
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.I64, 'localRev', None, None, ), # 2
)
def __init__(self, localRev=None,):
self.localRev = localRev
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.I64:
self.localRev = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('fetchNotificationItems_args')
if self.localRev is not None:
oprot.writeFieldBegin('localRev', TType.I64, 2)
oprot.writeI64(self.localRev)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.localRev)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class fetchNotificationItems_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (NotificationFetchResult, NotificationFetchResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = NotificationFetchResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('fetchNotificationItems_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getApprovedChannels_args:
"""
Attributes:
- lastSynced
- locale
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.I64, 'lastSynced', None, None, ), # 2
(3, TType.STRING, 'locale', None, None, ), # 3
)
def __init__(self, lastSynced=None, locale=None,):
self.lastSynced = lastSynced
self.locale = locale
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.I64:
self.lastSynced = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.locale = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getApprovedChannels_args')
if self.lastSynced is not None:
oprot.writeFieldBegin('lastSynced', TType.I64, 2)
oprot.writeI64(self.lastSynced)
oprot.writeFieldEnd()
if self.locale is not None:
oprot.writeFieldBegin('locale', TType.STRING, 3)
oprot.writeString(self.locale)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.lastSynced)
value = (value * 31) ^ hash(self.locale)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getApprovedChannels_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ApprovedChannelInfos, ApprovedChannelInfos.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ApprovedChannelInfos()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getApprovedChannels_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getChannelInfo_args:
"""
Attributes:
- channelId
- locale
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.STRING, 'channelId', None, None, ), # 2
(3, TType.STRING, 'locale', None, None, ), # 3
)
def __init__(self, channelId=None, locale=None,):
self.channelId = channelId
self.locale = locale
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.STRING:
self.channelId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.locale = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getChannelInfo_args')
if self.channelId is not None:
oprot.writeFieldBegin('channelId', TType.STRING, 2)
oprot.writeString(self.channelId)
oprot.writeFieldEnd()
if self.locale is not None:
oprot.writeFieldBegin('locale', TType.STRING, 3)
oprot.writeString(self.locale)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.channelId)
value = (value * 31) ^ hash(self.locale)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getChannelInfo_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ChannelInfo, ChannelInfo.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ChannelInfo()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getChannelInfo_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getChannelNotificationSetting_args:
"""
Attributes:
- channelId
- locale
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'channelId', None, None, ), # 1
(2, TType.STRING, 'locale', None, None, ), # 2
)
def __init__(self, channelId=None, locale=None,):
self.channelId = channelId
self.locale = locale
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.channelId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.locale = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getChannelNotificationSetting_args')
if self.channelId is not None:
oprot.writeFieldBegin('channelId', TType.STRING, 1)
oprot.writeString(self.channelId)
oprot.writeFieldEnd()
if self.locale is not None:
oprot.writeFieldBegin('locale', TType.STRING, 2)
oprot.writeString(self.locale)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.channelId)
value = (value * 31) ^ hash(self.locale)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getChannelNotificationSetting_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ChannelNotificationSetting, ChannelNotificationSetting.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ChannelNotificationSetting()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getChannelNotificationSetting_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getChannelNotificationSettings_args:
"""
Attributes:
- locale
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'locale', None, None, ), # 1
)
def __init__(self, locale=None,):
self.locale = locale
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.locale = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getChannelNotificationSettings_args')
if self.locale is not None:
oprot.writeFieldBegin('locale', TType.STRING, 1)
oprot.writeString(self.locale)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.locale)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getChannelNotificationSettings_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(ChannelNotificationSetting, ChannelNotificationSetting.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype638, _size635) = iprot.readListBegin()
for _i639 in xrange(_size635):
_elem640 = ChannelNotificationSetting()
_elem640.read(iprot)
self.success.append(_elem640)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getChannelNotificationSettings_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter641 in self.success:
iter641.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getChannels_args:
"""
Attributes:
- lastSynced
- locale
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.I64, 'lastSynced', None, None, ), # 2
(3, TType.STRING, 'locale', None, None, ), # 3
)
def __init__(self, lastSynced=None, locale=None,):
self.lastSynced = lastSynced
self.locale = locale
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.I64:
self.lastSynced = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.locale = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getChannels_args')
if self.lastSynced is not None:
oprot.writeFieldBegin('lastSynced', TType.I64, 2)
oprot.writeI64(self.lastSynced)
oprot.writeFieldEnd()
if self.locale is not None:
oprot.writeFieldBegin('locale', TType.STRING, 3)
oprot.writeString(self.locale)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.lastSynced)
value = (value * 31) ^ hash(self.locale)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getChannels_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ChannelInfos, ChannelInfos.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ChannelInfos()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getChannels_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getDomains_args:
"""
Attributes:
- lastSynced
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.I64, 'lastSynced', None, None, ), # 2
)
def __init__(self, lastSynced=None,):
self.lastSynced = lastSynced
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.I64:
self.lastSynced = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getDomains_args')
if self.lastSynced is not None:
oprot.writeFieldBegin('lastSynced', TType.I64, 2)
oprot.writeI64(self.lastSynced)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.lastSynced)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getDomains_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ChannelDomains, ChannelDomains.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ChannelDomains()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getDomains_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getFriendChannelMatrices_args:
"""
Attributes:
- channelIds
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'channelIds', (TType.STRING,None), None, ), # 1
)
def __init__(self, channelIds=None,):
self.channelIds = channelIds
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.channelIds = []
(_etype645, _size642) = iprot.readListBegin()
for _i646 in xrange(_size642):
_elem647 = iprot.readString()
self.channelIds.append(_elem647)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getFriendChannelMatrices_args')
if self.channelIds is not None:
oprot.writeFieldBegin('channelIds', TType.LIST, 1)
oprot.writeListBegin(TType.STRING, len(self.channelIds))
for iter648 in self.channelIds:
oprot.writeString(iter648)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.channelIds)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getFriendChannelMatrices_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (FriendChannelMatricesResponse, FriendChannelMatricesResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = FriendChannelMatricesResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getFriendChannelMatrices_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNotificationBadgeCount_args:
"""
Attributes:
- localRev
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.I64, 'localRev', None, None, ), # 2
)
def __init__(self, localRev=None,):
self.localRev = localRev
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.I64:
self.localRev = iprot.readI64()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNotificationBadgeCount_args')
if self.localRev is not None:
oprot.writeFieldBegin('localRev', TType.I64, 2)
oprot.writeI64(self.localRev)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.localRev)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getNotificationBadgeCount_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getNotificationBadgeCount_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class issueChannelToken_args:
"""
Attributes:
- channelId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'channelId', None, None, ), # 1
)
def __init__(self, channelId=None,):
self.channelId = channelId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.channelId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('issueChannelToken_args')
if self.channelId is not None:
oprot.writeFieldBegin('channelId', TType.STRING, 1)
oprot.writeString(self.channelId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.channelId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class issueChannelToken_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ChannelToken, ChannelToken.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ChannelToken()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('issueChannelToken_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class issueRequestToken_args:
"""
Attributes:
- channelId
- otpId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'channelId', None, None, ), # 1
(2, TType.STRING, 'otpId', None, None, ), # 2
)
def __init__(self, channelId=None, otpId=None,):
self.channelId = channelId
self.otpId = otpId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.channelId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.otpId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('issueRequestToken_args')
if self.channelId is not None:
oprot.writeFieldBegin('channelId', TType.STRING, 1)
oprot.writeString(self.channelId)
oprot.writeFieldEnd()
if self.otpId is not None:
oprot.writeFieldBegin('otpId', TType.STRING, 2)
oprot.writeString(self.otpId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.channelId)
value = (value * 31) ^ hash(self.otpId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class issueRequestToken_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('issueRequestToken_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class issueRequestTokenWithAuthScheme_args:
"""
Attributes:
- channelId
- otpId
- authScheme
- returnUrl
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'channelId', None, None, ), # 1
(2, TType.STRING, 'otpId', None, None, ), # 2
(3, TType.LIST, 'authScheme', (TType.STRING,None), None, ), # 3
(4, TType.STRING, 'returnUrl', None, None, ), # 4
)
def __init__(self, channelId=None, otpId=None, authScheme=None, returnUrl=None,):
self.channelId = channelId
self.otpId = otpId
self.authScheme = authScheme
self.returnUrl = returnUrl
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.channelId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.otpId = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.authScheme = []
(_etype652, _size649) = iprot.readListBegin()
for _i653 in xrange(_size649):
_elem654 = iprot.readString()
self.authScheme.append(_elem654)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.returnUrl = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('issueRequestTokenWithAuthScheme_args')
if self.channelId is not None:
oprot.writeFieldBegin('channelId', TType.STRING, 1)
oprot.writeString(self.channelId)
oprot.writeFieldEnd()
if self.otpId is not None:
oprot.writeFieldBegin('otpId', TType.STRING, 2)
oprot.writeString(self.otpId)
oprot.writeFieldEnd()
if self.authScheme is not None:
oprot.writeFieldBegin('authScheme', TType.LIST, 3)
oprot.writeListBegin(TType.STRING, len(self.authScheme))
for iter655 in self.authScheme:
oprot.writeString(iter655)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.returnUrl is not None:
oprot.writeFieldBegin('returnUrl', TType.STRING, 4)
oprot.writeString(self.returnUrl)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.channelId)
value = (value * 31) ^ hash(self.otpId)
value = (value * 31) ^ hash(self.authScheme)
value = (value * 31) ^ hash(self.returnUrl)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class issueRequestTokenWithAuthScheme_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (RequestTokenResponse, RequestTokenResponse.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = RequestTokenResponse()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('issueRequestTokenWithAuthScheme_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class reserveCoinUse_args:
"""
Attributes:
- request
- locale
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.STRUCT, 'request', (CoinUseReservation, CoinUseReservation.thrift_spec), None, ), # 2
(3, TType.STRING, 'locale', None, None, ), # 3
)
def __init__(self, request=None, locale=None,):
self.request = request
self.locale = locale
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.STRUCT:
self.request = CoinUseReservation()
self.request.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.locale = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('reserveCoinUse_args')
if self.request is not None:
oprot.writeFieldBegin('request', TType.STRUCT, 2)
self.request.write(oprot)
oprot.writeFieldEnd()
if self.locale is not None:
oprot.writeFieldBegin('locale', TType.STRING, 3)
oprot.writeString(self.locale)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.request)
value = (value * 31) ^ hash(self.locale)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class reserveCoinUse_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRING, 'success', None, None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('reserveCoinUse_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class revokeChannel_args:
"""
Attributes:
- channelId
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'channelId', None, None, ), # 1
)
def __init__(self, channelId=None,):
self.channelId = channelId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.channelId = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('revokeChannel_args')
if self.channelId is not None:
oprot.writeFieldBegin('channelId', TType.STRING, 1)
oprot.writeString(self.channelId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.channelId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class revokeChannel_result:
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('revokeChannel_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class syncChannelData_args:
"""
Attributes:
- lastSynced
- locale
"""
thrift_spec = (
None, # 0
None, # 1
(2, TType.I64, 'lastSynced', None, None, ), # 2
(3, TType.STRING, 'locale', None, None, ), # 3
)
def __init__(self, lastSynced=None, locale=None,):
self.lastSynced = lastSynced
self.locale = locale
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 2:
if ftype == TType.I64:
self.lastSynced = iprot.readI64()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.locale = iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('syncChannelData_args')
if self.lastSynced is not None:
oprot.writeFieldBegin('lastSynced', TType.I64, 2)
oprot.writeI64(self.lastSynced)
oprot.writeFieldEnd()
if self.locale is not None:
oprot.writeFieldBegin('locale', TType.STRING, 3)
oprot.writeString(self.locale)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.lastSynced)
value = (value * 31) ^ hash(self.locale)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class syncChannelData_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (ChannelSyncDatas, ChannelSyncDatas.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ChannelSyncDatas()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('syncChannelData_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateChannelNotificationSetting_args:
"""
Attributes:
- setting
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'setting', (TType.STRUCT,(ChannelNotificationSetting, ChannelNotificationSetting.thrift_spec)), None, ), # 1
)
def __init__(self, setting=None,):
self.setting = setting
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.setting = []
(_etype659, _size656) = iprot.readListBegin()
for _i660 in xrange(_size656):
_elem661 = ChannelNotificationSetting()
_elem661.read(iprot)
self.setting.append(_elem661)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateChannelNotificationSetting_args')
if self.setting is not None:
oprot.writeFieldBegin('setting', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.setting))
for iter662 in self.setting:
iter662.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.setting)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class updateChannelNotificationSetting_result:
"""
Attributes:
- e
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', (ChannelException, ChannelException.thrift_spec), None, ), # 1
)
def __init__(self, e=None,):
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = ChannelException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('updateChannelNotificationSetting_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.e)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other) | [
"[email protected]"
] | |
3a867c97d04bc12c43529626104a44e5cde357d0 | 5982a9c9c9cb682ec9732f9eeb438b62c61f2e99 | /Problem_131/my_solution.py | 0957503ab542faeb851bc44ae52794dc24263800 | [] | no_license | chenshanghao/LeetCode_learning | 6fdf98473be8f2240dd86d5586bbd1bbb95d6b0c | acf2395f3b946054009d4543f2a13e83402323d3 | refs/heads/master | 2021-10-23T05:23:01.970535 | 2019-03-15T05:08:54 | 2019-03-15T05:08:54 | 114,688,902 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 787 | py | class Solution(object):
def partition(self, s):
"""
:type s: str
:rtype: List[List[str]]
"""
self.result = []
if len(s) == 0: return self.result
self.backtrack([],0,s)
return self.result
def IsPalindrome(self, string):
i, j = 0, len(string)-1
while(i<=j):
if string[i]!= string[j]:
return False
i+=1
j-=1
return True
def backtrack(self, temp, start, s):
if start >= len(s):
self.result.append(temp[:])
for i in range(start,len(s)):
if self.IsPalindrome(s[start:i+1]):
temp.append(s[start:i+1])
self.backtrack(temp, i+1, s)
temp.pop() | [
"[email protected]"
] | |
e97d900e1e7624fde472f0927a9acdd56581b60c | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/59/usersdata/216/26475/submittedfiles/testes.py | 54f602251a4e6970b2390dc096dbe706d12bd81d | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 154 | py | # -*- coding: utf-8 -*-
from __future__ import division
#COMECE AQUI ABAIXO
a= float(input('digite a'))
b= float(input('digite b'))
c=(a+b)/2
print (c) | [
"[email protected]"
] | |
ecdbc102e9f8ef980acd46a217b272a871a16550 | 1efeed0fa970b05801a29ccfdc90c52bb571dd02 | /venv/bin/pip3.7 | eb97d48a42293fd28c8d07856e8ff04e1a0b9bce | [] | no_license | williamwang0/MusicGen | 2e7fe5d9b2d35d1406b8951a86a5eac6d704571e | b6411505d1fd29e13ca93e3975f3de106ad4a7d0 | refs/heads/master | 2020-07-08T15:48:33.840412 | 2020-05-27T17:30:38 | 2020-05-27T17:30:38 | 203,717,161 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 416 | 7 | #!/home/albertczhang/Desktop/Projects/Music-Gen/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3.7'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3.7')()
)
| [
"[email protected]"
] | |
dc763b74c1dc61594084c2e1bd2444d4edaf96d4 | 9c0f691393abbeb5754e1624e0c48dfcdf857352 | /2017/Helpers/day_02.py | ce9e1ba0627d0318c61d59f26c208b83c5be9430 | [] | no_license | seligman/aoc | d0aac62eda3e6adc3c96229ca859bd2274398187 | 9de27ff2e13100770a3afa4595b15565d45bb6bc | refs/heads/master | 2023-04-02T16:45:19.032567 | 2023-03-22T15:05:33 | 2023-03-22T15:05:33 | 230,493,583 | 17 | 10 | null | null | null | null | UTF-8 | Python | false | false | 1,393 | py | #!/usr/bin/env python3
import itertools
DAY_NUM = 2
DAY_DESC = 'Day 2: Corruption Checksum'
def calc(log, values):
values = [[int(y) for y in x.replace('\t', ' ').split(' ')] for x in values]
ret = 0
ret2 = 0
for row in values:
a, b = min(row), max(row)
ret += b - a
for a, b in itertools.combinations(row, 2):
if b > a:
a, b = b, a
if a % b == 0:
ret2 += a // b
log("Second form: " + str(ret2))
return ret
def test(log):
values = [
"5 1 9 5",
"7 5 3",
"2 4 6 8",
]
if calc(log, values) == 18:
return True
else:
return False
def run(log, values):
log(calc(log, values))
if __name__ == "__main__":
import sys, os
def find_input_file():
for fn in sys.argv[1:] + ["input.txt", f"day_{DAY_NUM:0d}_input.txt", f"day_{DAY_NUM:02d}_input.txt"]:
for dn in [[], ["Puzzles"], ["..", "Puzzles"]]:
cur = os.path.join(*(dn + [fn]))
if os.path.isfile(cur): return cur
fn = find_input_file()
if fn is None: print("Unable to find input file!\nSpecify filename on command line"); exit(1)
print(f"Using '{fn}' as input file:")
with open(fn) as f: values = [x.strip("\r\n") for x in f.readlines()]
print(f"Running day {DAY_DESC}:")
run(print, values)
| [
"[email protected]"
] | |
6015c71d15294c4e5332fac46c344a18bee9eddd | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/216/usersdata/354/113785/submittedfiles/av2_p3_civil.py | e7ed6d32a539f56ce9f534bf54484c4e125a9e91 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 546 | py | # -*- coding: utf-8 -*-
import numpy as np
ordem=int(input('digite a dimensao n da matriz: '))
x=int(input('digite a linha do numero: '))
y=int(input('digite a coluna do numero: '))
matriz=np.zeros((ordem,ordem))
for i in range(0,ordem,1):
for j in range(0,ordem,1):
matriz[i,j]=int(input('digite os valores da matriz: '))
#LINHA
i=x
soma=0
for j in range(0,ordem,1):
if j!=y:
soma=soma+matriz[i,j]
#COLUNA
j=y
soma1=0
for i in range(0,ordem,1):
if i!=x:
soma1=soma1+matriz[i,j]
peso=soma+soma1
print(peso) | [
"[email protected]"
] | |
2bbc1196c0025f188016b75fc7993a434729f616 | 6af81c1e3853255f064ce58e848b34211decdd23 | /test/top/api/rest/HotelsSearchRequest.py | f79b816b5737e298bbab342a8a4f1a00cb4bc2be | [] | no_license | dacy413/TBAutoTool | d472445f54f0841f2cd461d48ec6181ae2182d92 | ca7da4638d38dd58e38c680ee03aaccf575bce7b | refs/heads/master | 2016-09-06T16:13:01.633177 | 2015-02-01T00:04:50 | 2015-02-01T00:04:50 | 29,625,228 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 438 | py | '''
Created by auto_sdk on 2015-01-20 12:36:26
'''
from top.api.base import RestApi
class HotelsSearchRequest(RestApi):
def __init__(self,domain='gw.api.taobao.com',port=80):
RestApi.__init__(self,domain, port)
self.city = None
self.country = None
self.district = None
self.domestic = None
self.name = None
self.page_no = None
self.province = None
def getapiname(self):
return 'taobao.hotels.search'
| [
"[email protected]"
] | |
70c76db1ec07449c468c62369074bb65be67d7f4 | 7920ac571217d627aad1ed8fa0b87ef1436cdb28 | /casepro/cases/migrations/0006_auto_20150508_0912.py | ba0c4ea6984a0959cd1e04d511e9dab37ee86a50 | [
"BSD-3-Clause"
] | permissive | rapidpro/casepro | 34777e5373822d41ff2e5f3995f86d009c2d1e7c | 66177c00b06b2bd6e6cad2b648feb8f28f592add | refs/heads/main | 2023-07-20T00:16:09.616516 | 2023-07-06T21:46:31 | 2023-07-06T21:46:31 | 32,147,348 | 23 | 30 | BSD-3-Clause | 2023-07-19T07:44:59 | 2015-03-13T09:31:47 | Python | UTF-8 | Python | false | false | 1,662 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("cases", "0005_auto_20150424_1427")]
operations = [
migrations.CreateModel(
name="CaseEvent",
fields=[
("id", models.AutoField(verbose_name="ID", serialize=False, auto_created=True, primary_key=True)),
("event", models.CharField(max_length=1, choices=[("R", "Contact replied")])),
("created_on", models.DateTimeField(db_index=True)),
],
),
migrations.AlterField(
model_name="case",
name="opened_on",
field=models.DateTimeField(help_text="When this case was opened", auto_now_add=True, db_index=True),
),
migrations.AlterField(
model_name="caseaction", name="created_on", field=models.DateTimeField(auto_now_add=True, db_index=True)
),
migrations.AlterField(
model_name="messageaction",
name="action",
field=models.CharField(
max_length=1,
choices=[
("F", "Flag"),
("N", "Un-flag"),
("L", "Label"),
("U", "Remove Label"),
("A", "Archive"),
("R", "Restore"),
],
),
),
migrations.AddField(
model_name="caseevent",
name="case",
field=models.ForeignKey(related_name="events", to="cases.Case", on_delete=models.PROTECT),
),
]
| [
"[email protected]"
] | |
05cd6f081aaaff3e3da677689d1dfab5b16c2bc3 | 114ad0c49ba0ca0f5d8b95151da45d54a7f05a04 | /zion/urls.py | 682b6af9853493bf4c7a20985f1b29cf2beb7d57 | [] | no_license | gray-adeyi/zion | b10cbd09588b1fdea86fff38be79c4dde1d5043c | c0d1ac64db978f8277932a82a15fa1f3f2b188f6 | refs/heads/master | 2023-01-10T00:50:17.121785 | 2020-11-11T19:24:37 | 2020-11-11T19:24:37 | 312,034,548 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,003 | py | """zion URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('core.urls')),
]
if settings.DEBUG == True:
media_url = static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += media_url
| [
"[email protected]"
] | |
9364a7ea7ee0ec5d468df263e0c925c4f78ff3a7 | 9afbb6993450d1e0c3bae68e86844bd06d4419ee | /global_var/g11.py | 32a8a7f1014ea83d6ac59211fac7c41d0e675748 | [] | no_license | Jigar710/Python_Programs | 6f331caac30878655d4cca4ad97d4214c0262088 | 714a6306487eb6712f32ccb51b6a2407a81873fa | refs/heads/main | 2023-02-25T12:24:44.874199 | 2021-01-28T15:43:24 | 2021-01-28T15:43:24 | 332,869,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 53 | py | def m1():
global a
a = 10
print(a)
m1()
print(a) | [
"[email protected]"
] | |
61cbca6ae98bfe29817aceebbeaa860f7fd52ced | 3d02b79ce53a83fc0086decb2e933c767c3d55bc | /IPython/parallel/controller/scheduler.py | 00ba2f0560ea955ebb8b3f3da811083b6c61616d | [
"BSD-3-Clause"
] | permissive | minrk/ipython-py3k | 47f8a65ac9767599568c94f01540364b8593a35d | 094195eaf203e88c58f71e834bf0153842ac852c | refs/heads/master | 2023-06-08T10:51:37.423469 | 2011-07-02T01:57:42 | 2011-07-02T01:57:42 | 1,989,947 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,626 | py | """The Python scheduler for rich scheduling.
The Pure ZMQ scheduler does not allow routing schemes other than LRU,
nor does it check msg_id DAG dependencies. For those, a slightly slower
Python Scheduler exists.
Authors:
* Min RK
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2010-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#----------------------------------------------------------------------
# Imports
#----------------------------------------------------------------------
import logging
import sys
from datetime import datetime, timedelta
from random import randint, random
from types import FunctionType
try:
import numpy
except ImportError:
numpy = None
import zmq
from zmq.eventloop import ioloop, zmqstream
# local imports
from IPython.external.decorator import decorator
from IPython.config.application import Application
from IPython.config.loader import Config
from IPython.utils.traitlets import Instance, Dict, List, Set, Int, Enum
from IPython.parallel import error
from IPython.parallel.factory import SessionFactory
from IPython.parallel.util import connect_logger, local_logger
from .dependency import Dependency
@decorator
def logged(f,self,*args,**kwargs):
# print ("#--------------------")
self.log.debug("scheduler::%s(*%s,**%s)", f.__name__, args, kwargs)
# print ("#--")
return f(self,*args, **kwargs)
#----------------------------------------------------------------------
# Chooser functions
#----------------------------------------------------------------------
def plainrandom(loads):
"""Plain random pick."""
n = len(loads)
return randint(0,n-1)
def lru(loads):
"""Always pick the front of the line.
The content of `loads` is ignored.
Assumes LRU ordering of loads, with oldest first.
"""
return 0
def twobin(loads):
"""Pick two at random, use the LRU of the two.
The content of loads is ignored.
Assumes LRU ordering of loads, with oldest first.
"""
n = len(loads)
a = randint(0,n-1)
b = randint(0,n-1)
return min(a,b)
def weighted(loads):
"""Pick two at random using inverse load as weight.
Return the less loaded of the two.
"""
# weight 0 a million times more than 1:
weights = 1./(1e-6+numpy.array(loads))
sums = weights.cumsum()
t = sums[-1]
x = random()*t
y = random()*t
idx = 0
idy = 0
while sums[idx] < x:
idx += 1
while sums[idy] < y:
idy += 1
if weights[idy] > weights[idx]:
return idy
else:
return idx
def leastload(loads):
"""Always choose the lowest load.
If the lowest load occurs more than once, the first
occurance will be used. If loads has LRU ordering, this means
the LRU of those with the lowest load is chosen.
"""
return loads.index(min(loads))
#---------------------------------------------------------------------
# Classes
#---------------------------------------------------------------------
# store empty default dependency:
MET = Dependency([])
class TaskScheduler(SessionFactory):
"""Python TaskScheduler object.
This is the simplest object that supports msg_id based
DAG dependencies. *Only* task msg_ids are checked, not
msg_ids of jobs submitted via the MUX queue.
"""
hwm = Int(0, config=True, shortname='hwm',
help="""specify the High Water Mark (HWM) for the downstream
socket in the Task scheduler. This is the maximum number
of allowed outstanding tasks on each engine."""
)
scheme_name = Enum(('leastload', 'pure', 'lru', 'plainrandom', 'weighted', 'twobin'),
'leastload', config=True, shortname='scheme', allow_none=False,
help="""select the task scheduler scheme [default: Python LRU]
Options are: 'pure', 'lru', 'plainrandom', 'weighted', 'twobin','leastload'"""
)
def _scheme_name_changed(self, old, new):
self.log.debug("Using scheme %r"%new)
self.scheme = globals()[new]
# input arguments:
scheme = Instance(FunctionType) # function for determining the destination
def _scheme_default(self):
return leastload
client_stream = Instance(zmqstream.ZMQStream) # client-facing stream
engine_stream = Instance(zmqstream.ZMQStream) # engine-facing stream
notifier_stream = Instance(zmqstream.ZMQStream) # hub-facing sub stream
mon_stream = Instance(zmqstream.ZMQStream) # hub-facing pub stream
# internals:
graph = Dict() # dict by msg_id of [ msg_ids that depend on key ]
retries = Dict() # dict by msg_id of retries remaining (non-neg ints)
# waiting = List() # list of msg_ids ready to run, but haven't due to HWM
depending = Dict() # dict by msg_id of (msg_id, raw_msg, after, follow)
pending = Dict() # dict by engine_uuid of submitted tasks
completed = Dict() # dict by engine_uuid of completed tasks
failed = Dict() # dict by engine_uuid of failed tasks
destinations = Dict() # dict by msg_id of engine_uuids where jobs ran (reverse of completed+failed)
clients = Dict() # dict by msg_id for who submitted the task
targets = List() # list of target IDENTs
loads = List() # list of engine loads
# full = Set() # set of IDENTs that have HWM outstanding tasks
all_completed = Set() # set of all completed tasks
all_failed = Set() # set of all failed tasks
all_done = Set() # set of all finished tasks=union(completed,failed)
all_ids = Set() # set of all submitted task IDs
blacklist = Dict() # dict by msg_id of locations where a job has encountered UnmetDependency
auditor = Instance('zmq.eventloop.ioloop.PeriodicCallback')
def start(self):
self.engine_stream.on_recv(self.dispatch_result, copy=False)
self._notification_handlers = dict(
registration_notification = self._register_engine,
unregistration_notification = self._unregister_engine
)
self.notifier_stream.on_recv(self.dispatch_notification)
self.auditor = ioloop.PeriodicCallback(self.audit_timeouts, 2e3, self.loop) # 1 Hz
self.auditor.start()
self.log.info("Scheduler started [%s]"%self.scheme_name)
def resume_receiving(self):
"""Resume accepting jobs."""
self.client_stream.on_recv(self.dispatch_submission, copy=False)
def stop_receiving(self):
"""Stop accepting jobs while there are no engines.
Leave them in the ZMQ queue."""
self.client_stream.on_recv(None)
#-----------------------------------------------------------------------
# [Un]Registration Handling
#-----------------------------------------------------------------------
def dispatch_notification(self, msg):
"""dispatch register/unregister events."""
try:
idents,msg = self.session.feed_identities(msg)
except ValueError:
self.log.warn("task::Invalid Message: %r"%msg)
return
try:
msg = self.session.unpack_message(msg)
except ValueError:
self.log.warn("task::Unauthorized message from: %r"%idents)
return
msg_type = msg['msg_type']
handler = self._notification_handlers.get(msg_type, None)
if handler is None:
self.log.error("Unhandled message type: %r"%msg_type)
else:
try:
handler(str(msg['content']['queue']))
except KeyError:
self.log.error("task::Invalid notification msg: %r"%msg)
def _register_engine(self, uid):
"""New engine with ident `uid` became available."""
# head of the line:
self.targets.insert(0,uid)
self.loads.insert(0,0)
# initialize sets
self.completed[uid] = set()
self.failed[uid] = set()
self.pending[uid] = {}
if len(self.targets) == 1:
self.resume_receiving()
# rescan the graph:
self.update_graph(None)
def _unregister_engine(self, uid):
"""Existing engine with ident `uid` became unavailable."""
if len(self.targets) == 1:
# this was our only engine
self.stop_receiving()
# handle any potentially finished tasks:
self.engine_stream.flush()
# don't pop destinations, because they might be used later
# map(self.destinations.pop, self.completed.pop(uid))
# map(self.destinations.pop, self.failed.pop(uid))
# prevent this engine from receiving work
idx = self.targets.index(uid)
self.targets.pop(idx)
self.loads.pop(idx)
# wait 5 seconds before cleaning up pending jobs, since the results might
# still be incoming
if self.pending[uid]:
dc = ioloop.DelayedCallback(lambda : self.handle_stranded_tasks(uid), 5000, self.loop)
dc.start()
else:
self.completed.pop(uid)
self.failed.pop(uid)
def handle_stranded_tasks(self, engine):
"""Deal with jobs resident in an engine that died."""
lost = self.pending[engine]
for msg_id in list(lost.keys()):
if msg_id not in self.pending[engine]:
# prevent double-handling of messages
continue
raw_msg = lost[msg_id][0]
idents,msg = self.session.feed_identities(raw_msg, copy=False)
parent = self.session.unpack(msg[1].bytes)
idents = [engine, idents[0]]
# build fake error reply
try:
raise error.EngineError("Engine %r died while running task %r"%(engine, msg_id))
except:
content = error.wrap_exception()
msg = self.session.msg('apply_reply', content, parent=parent, subheader={'status':'error'})
raw_reply = list(map(zmq.Message, self.session.serialize(msg, ident=idents)))
# and dispatch it
self.dispatch_result(raw_reply)
# finally scrub completed/failed lists
self.completed.pop(engine)
self.failed.pop(engine)
#-----------------------------------------------------------------------
# Job Submission
#-----------------------------------------------------------------------
def dispatch_submission(self, raw_msg):
"""Dispatch job submission to appropriate handlers."""
# ensure targets up to date:
self.notifier_stream.flush()
try:
idents, msg = self.session.feed_identities(raw_msg, copy=False)
msg = self.session.unpack_message(msg, content=False, copy=False)
except Exception:
self.log.error("task::Invaid task msg: %r"%raw_msg, exc_info=True)
return
# send to monitor
self.mon_stream.send_multipart(['intask']+raw_msg, copy=False)
header = msg['header']
msg_id = header['msg_id']
self.all_ids.add(msg_id)
# targets
targets = set(header.get('targets', []))
retries = header.get('retries', 0)
self.retries[msg_id] = retries
# time dependencies
after = header.get('after', None)
if after:
after = Dependency(after)
if after.all:
if after.success:
after = after.difference(self.all_completed)
if after.failure:
after = after.difference(self.all_failed)
if after.check(self.all_completed, self.all_failed):
# recast as empty set, if `after` already met,
# to prevent unnecessary set comparisons
after = MET
else:
after = MET
# location dependencies
follow = Dependency(header.get('follow', []))
# turn timeouts into datetime objects:
timeout = header.get('timeout', None)
if timeout:
timeout = datetime.now() + timedelta(0,timeout,0)
args = [raw_msg, targets, after, follow, timeout]
# validate and reduce dependencies:
for dep in after,follow:
if not dep: # empty dependency
continue
# check valid:
if msg_id in dep or dep.difference(self.all_ids):
self.depending[msg_id] = args
return self.fail_unreachable(msg_id, error.InvalidDependency)
# check if unreachable:
if dep.unreachable(self.all_completed, self.all_failed):
self.depending[msg_id] = args
return self.fail_unreachable(msg_id)
if after.check(self.all_completed, self.all_failed):
# time deps already met, try to run
if not self.maybe_run(msg_id, *args):
# can't run yet
if msg_id not in self.all_failed:
# could have failed as unreachable
self.save_unmet(msg_id, *args)
else:
self.save_unmet(msg_id, *args)
def audit_timeouts(self):
"""Audit all waiting tasks for expired timeouts."""
now = datetime.now()
for msg_id in list(self.depending.keys()):
# must recheck, in case one failure cascaded to another:
if msg_id in self.depending:
raw,after,targets,follow,timeout = self.depending[msg_id]
if timeout and timeout < now:
self.fail_unreachable(msg_id, error.TaskTimeout)
def fail_unreachable(self, msg_id, why=error.ImpossibleDependency):
"""a task has become unreachable, send a reply with an ImpossibleDependency
error."""
if msg_id not in self.depending:
self.log.error("msg %r already failed!", msg_id)
return
raw_msg,targets,after,follow,timeout = self.depending.pop(msg_id)
for mid in follow.union(after):
if mid in self.graph:
self.graph[mid].remove(msg_id)
# FIXME: unpacking a message I've already unpacked, but didn't save:
idents,msg = self.session.feed_identities(raw_msg, copy=False)
header = self.session.unpack(msg[1].bytes)
try:
raise why()
except:
content = error.wrap_exception()
self.all_done.add(msg_id)
self.all_failed.add(msg_id)
msg = self.session.send(self.client_stream, 'apply_reply', content,
parent=header, ident=idents)
self.session.send(self.mon_stream, msg, ident=['outtask']+idents)
self.update_graph(msg_id, success=False)
def maybe_run(self, msg_id, raw_msg, targets, after, follow, timeout):
"""check location dependencies, and run if they are met."""
blacklist = self.blacklist.setdefault(msg_id, set())
if follow or targets or blacklist or self.hwm:
# we need a can_run filter
def can_run(idx):
# check hwm
if self.hwm and self.loads[idx] == self.hwm:
return False
target = self.targets[idx]
# check blacklist
if target in blacklist:
return False
# check targets
if targets and target not in targets:
return False
# check follow
return follow.check(self.completed[target], self.failed[target])
indices = list(filter(can_run, list(range(len(self.targets)))))
if not indices:
# couldn't run
if follow.all:
# check follow for impossibility
dests = set()
relevant = set()
if follow.success:
relevant = self.all_completed
if follow.failure:
relevant = relevant.union(self.all_failed)
for m in follow.intersection(relevant):
dests.add(self.destinations[m])
if len(dests) > 1:
self.depending[msg_id] = (raw_msg, targets, after, follow, timeout)
self.fail_unreachable(msg_id)
return False
if targets:
# check blacklist+targets for impossibility
targets.difference_update(blacklist)
if not targets or not targets.intersection(self.targets):
self.depending[msg_id] = (raw_msg, targets, after, follow, timeout)
self.fail_unreachable(msg_id)
return False
return False
else:
indices = None
self.submit_task(msg_id, raw_msg, targets, follow, timeout, indices)
return True
def save_unmet(self, msg_id, raw_msg, targets, after, follow, timeout):
"""Save a message for later submission when its dependencies are met."""
self.depending[msg_id] = [raw_msg,targets,after,follow,timeout]
# track the ids in follow or after, but not those already finished
for dep_id in after.union(follow).difference(self.all_done):
if dep_id not in self.graph:
self.graph[dep_id] = set()
self.graph[dep_id].add(msg_id)
def submit_task(self, msg_id, raw_msg, targets, follow, timeout, indices=None):
"""Submit a task to any of a subset of our targets."""
if indices:
loads = [self.loads[i] for i in indices]
else:
loads = self.loads
idx = self.scheme(loads)
if indices:
idx = indices[idx]
target = self.targets[idx]
# print (target, map(str, msg[:3]))
# send job to the engine
self.engine_stream.send(target, flags=zmq.SNDMORE, copy=False)
self.engine_stream.send_multipart(raw_msg, copy=False)
# update load
self.add_job(idx)
self.pending[target][msg_id] = (raw_msg, targets, MET, follow, timeout)
# notify Hub
content = dict(msg_id=msg_id, engine_id=target)
self.session.send(self.mon_stream, 'task_destination', content=content,
ident=['tracktask',self.session.session])
#-----------------------------------------------------------------------
# Result Handling
#-----------------------------------------------------------------------
def dispatch_result(self, raw_msg):
"""dispatch method for result replies"""
try:
idents,msg = self.session.feed_identities(raw_msg, copy=False)
msg = self.session.unpack_message(msg, content=False, copy=False)
engine = idents[0]
try:
idx = self.targets.index(engine)
except ValueError:
pass # skip load-update for dead engines
else:
self.finish_job(idx)
except Exception:
self.log.error("task::Invaid result: %r", raw_msg, exc_info=True)
return
header = msg['header']
parent = msg['parent_header']
if header.get('dependencies_met', True):
success = (header['status'] == 'ok')
msg_id = parent['msg_id']
retries = self.retries[msg_id]
if not success and retries > 0:
# failed
self.retries[msg_id] = retries - 1
self.handle_unmet_dependency(idents, parent)
else:
del self.retries[msg_id]
# relay to client and update graph
self.handle_result(idents, parent, raw_msg, success)
# send to Hub monitor
self.mon_stream.send_multipart(['outtask']+raw_msg, copy=False)
else:
self.handle_unmet_dependency(idents, parent)
def handle_result(self, idents, parent, raw_msg, success=True):
"""handle a real task result, either success or failure"""
# first, relay result to client
engine = idents[0]
client = idents[1]
# swap_ids for XREP-XREP mirror
raw_msg[:2] = [client,engine]
# print (map(str, raw_msg[:4]))
self.client_stream.send_multipart(raw_msg, copy=False)
# now, update our data structures
msg_id = parent['msg_id']
self.blacklist.pop(msg_id, None)
self.pending[engine].pop(msg_id)
if success:
self.completed[engine].add(msg_id)
self.all_completed.add(msg_id)
else:
self.failed[engine].add(msg_id)
self.all_failed.add(msg_id)
self.all_done.add(msg_id)
self.destinations[msg_id] = engine
self.update_graph(msg_id, success)
def handle_unmet_dependency(self, idents, parent):
"""handle an unmet dependency"""
engine = idents[0]
msg_id = parent['msg_id']
if msg_id not in self.blacklist:
self.blacklist[msg_id] = set()
self.blacklist[msg_id].add(engine)
args = self.pending[engine].pop(msg_id)
raw,targets,after,follow,timeout = args
if self.blacklist[msg_id] == targets:
self.depending[msg_id] = args
self.fail_unreachable(msg_id)
elif not self.maybe_run(msg_id, *args):
# resubmit failed
if msg_id not in self.all_failed:
# put it back in our dependency tree
self.save_unmet(msg_id, *args)
if self.hwm:
try:
idx = self.targets.index(engine)
except ValueError:
pass # skip load-update for dead engines
else:
if self.loads[idx] == self.hwm-1:
self.update_graph(None)
def update_graph(self, dep_id=None, success=True):
"""dep_id just finished. Update our dependency
graph and submit any jobs that just became runable.
Called with dep_id=None to update entire graph for hwm, but without finishing
a task.
"""
# print ("\n\n***********")
# pprint (dep_id)
# pprint (self.graph)
# pprint (self.depending)
# pprint (self.all_completed)
# pprint (self.all_failed)
# print ("\n\n***********\n\n")
# update any jobs that depended on the dependency
jobs = self.graph.pop(dep_id, [])
# recheck *all* jobs if
# a) we have HWM and an engine just become no longer full
# or b) dep_id was given as None
if dep_id is None or self.hwm and any( [ load==self.hwm-1 for load in self.loads ]):
jobs = list(self.depending.keys())
for msg_id in jobs:
raw_msg, targets, after, follow, timeout = self.depending[msg_id]
if after.unreachable(self.all_completed, self.all_failed)\
or follow.unreachable(self.all_completed, self.all_failed):
self.fail_unreachable(msg_id)
elif after.check(self.all_completed, self.all_failed): # time deps met, maybe run
if self.maybe_run(msg_id, raw_msg, targets, MET, follow, timeout):
self.depending.pop(msg_id)
for mid in follow.union(after):
if mid in self.graph:
self.graph[mid].remove(msg_id)
#----------------------------------------------------------------------
# methods to be overridden by subclasses
#----------------------------------------------------------------------
def add_job(self, idx):
"""Called after self.targets[idx] just got the job with header.
Override with subclasses. The default ordering is simple LRU.
The default loads are the number of outstanding jobs."""
self.loads[idx] += 1
for lis in (self.targets, self.loads):
lis.append(lis.pop(idx))
def finish_job(self, idx):
"""Called after self.targets[idx] just finished a job.
Override with subclasses."""
self.loads[idx] -= 1
def launch_scheduler(in_addr, out_addr, mon_addr, not_addr, config=None,
logname='root', log_url=None, loglevel=logging.DEBUG,
identity=b'task', in_thread=False):
ZMQStream = zmqstream.ZMQStream
if config:
# unwrap dict back into Config
config = Config(config)
if in_thread:
# use instance() to get the same Context/Loop as our parent
ctx = zmq.Context.instance()
loop = ioloop.IOLoop.instance()
else:
# in a process, don't use instance()
# for safety with multiprocessing
ctx = zmq.Context()
loop = ioloop.IOLoop()
ins = ZMQStream(ctx.socket(zmq.XREP),loop)
ins.setsockopt(zmq.IDENTITY, identity)
ins.bind(in_addr)
outs = ZMQStream(ctx.socket(zmq.XREP),loop)
outs.setsockopt(zmq.IDENTITY, identity)
outs.bind(out_addr)
mons = zmqstream.ZMQStream(ctx.socket(zmq.PUB),loop)
mons.connect(mon_addr)
nots = zmqstream.ZMQStream(ctx.socket(zmq.SUB),loop)
nots.setsockopt(zmq.SUBSCRIBE, b'')
nots.connect(not_addr)
# setup logging.
if in_thread:
log = Application.instance().log
else:
if log_url:
log = connect_logger(logname, ctx, log_url, root="scheduler", loglevel=loglevel)
else:
log = local_logger(logname, loglevel)
scheduler = TaskScheduler(client_stream=ins, engine_stream=outs,
mon_stream=mons, notifier_stream=nots,
loop=loop, log=log,
config=config)
scheduler.start()
if not in_thread:
try:
loop.start()
except KeyboardInterrupt:
print ("interrupted, exiting...", file=sys.__stderr__)
| [
"[email protected]"
] | |
b453aed2c254c9389e6d16e6972bda279a7aa2b9 | cf3891c6122d21584bb6d7ad81c41e26755c1083 | /tests/gmprocess/subcommands/import_test.py | e189a580a0d4d0d4d0c4ed40a44128a0147e9ff5 | [
"Unlicense",
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | mmoschetti-usgs/groundmotion-processing | 5cb6412eebe258dd3d30e085d68affc20553d744 | 944667e90b5a0a01f7017a676f60e2958b1eb902 | refs/heads/master | 2022-11-03T04:32:00.353837 | 2022-10-19T17:57:16 | 2022-10-19T18:37:23 | 186,485,732 | 0 | 0 | NOASSERTION | 2019-05-13T19:51:34 | 2019-05-13T19:51:34 | null | UTF-8 | Python | false | false | 2,014 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import io
import shutil
import pathlib
from gmprocess.utils import constants
def test_import(script_runner):
try:
# Need to create profile first.
cdir = constants.CONFIG_PATH_TEST
ddir = constants.TEST_DATA_DIR / "demo"
idir = constants.TEST_DATA_DIR / "import"
setup_inputs = io.StringIO(f"test\n{str(cdir)}\n{str(ddir)}\nname\[email protected]\n")
ret = script_runner.run("gmrecords", "projects", "-c", stdin=setup_inputs)
setup_inputs.close()
assert ret.success
# Test CESMD zip file
zfile = idir / "cesmd_test.zip"
ret = script_runner.run(
"gmrecords", "import", "-e", "nn00725272", "-p", str(zfile)
)
print("*** stdout ***")
print(ret.stdout)
print("*** stderr ***")
print(ret.stderr)
assert ret.success
raw_dir = ddir / "nn00725272" / "raw"
assert raw_dir.is_dir()
dst_files = list(pathlib.Path(raw_dir).glob("*"))
assert len(dst_files) == 23
# Test tar file of CWB data
tfile = idir / "test.tar.zip"
ret = script_runner.run(
"gmrecords", "import", "-e", "us6000e2mt", "-p", str(tfile)
)
assert ret.success
raw_dir = ddir / "us6000e2mt" / "raw"
assert raw_dir.is_dir()
dst_dats = list(raw_dir.glob("*.dat"))
assert len(dst_dats) == 19
# Test directory of files
dpath = idir / "dir"
ret = script_runner.run(
"gmrecords", "import", "-e", "us6000e2mt", "-p", str(dpath)
)
assert ret.success
except Exception as ex:
raise ex
finally:
shutil.rmtree(str(constants.CONFIG_PATH_TEST), ignore_errors=True)
# Remove created files
events = ["us6000e2mt", "nn00725272"]
for eid in events:
shutil.rmtree(str(ddir / eid), ignore_errors=True)
if __name__ == "__main__":
test_import()
| [
"[email protected]"
] | |
89302cc74ca6ac2bdca46b282f61fee632281c3a | ad02587a87ec19658d6a53bcf2a2f5e92149e7f4 | /django-stubs/core/serializers/__init__.pyi | fcc124753a89fb1b8460527fcb732507dc4e7f9c | [
"BSD-3-Clause"
] | permissive | Naddiseo/django-stubs | 32a944617aea5b0e2dc3b8ad4dfd191b9ca6198b | cff5ab463c911283a9c43a26a38cb7bd4deebbd5 | refs/heads/master | 2020-04-18T05:01:40.832084 | 2019-01-22T17:13:31 | 2019-01-22T17:13:31 | 167,261,510 | 0 | 0 | BSD-3-Clause | 2019-01-23T22:06:15 | 2019-01-23T22:06:15 | null | UTF-8 | Python | false | false | 1,396 | pyi | from collections import OrderedDict
from typing import Any, Callable, Dict, Iterator, List, Optional, Tuple, Type, Union
from django.apps.config import AppConfig
from django.core.serializers.base import Serializer, Deserializer
from django.db.models.base import Model
from django.db.models.query import QuerySet
BUILTIN_SERIALIZERS: Any
class BadSerializer:
internal_use_only: bool = ...
exception: ModuleNotFoundError = ...
def __init__(self, exception: ImportError) -> None: ...
def __call__(self, *args: Any, **kwargs: Any) -> Any: ...
def register_serializer(format: str, serializer_module: str, serializers: Optional[Dict[str, Any]] = ...) -> None: ...
def unregister_serializer(format: str) -> None: ...
def get_serializer(format: str) -> Union[Type[Serializer], BadSerializer]: ...
def get_serializer_formats() -> List[str]: ...
def get_public_serializer_formats() -> List[str]: ...
def get_deserializer(format: str) -> Union[Callable, Type[Deserializer]]: ...
def serialize(
format: str, queryset: Union[Iterator[Any], List[Model], QuerySet], **options: Any
) -> Optional[Union[List[OrderedDict], bytes, str]]: ...
def deserialize(format: str, stream_or_string: Any, **options: Any) -> Union[Iterator[Any], Deserializer]: ...
def sort_dependencies(
app_list: Union[List[Tuple[AppConfig, None]], List[Tuple[str, List[Type[Model]]]]]
) -> List[Type[Model]]: ...
| [
"[email protected]"
] | |
7442cc095982c595c26f2dc4f1297cb96e53d1b1 | c5f58af61e3577ded52acda210f4f664651b598c | /template/mmdetection/tools/inference.py | 1c3be13bc08f24a5ff7a2139b02780c446855c27 | [
"Apache-2.0",
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | hojihun5516/object-detection-level2-cv-02 | 0a4ee5cea9a77ef5d43fb61a4b37fe3a87cb0eac | bc8a08286935b31b8e7e597c4b1ca2cbbaeb9109 | refs/heads/master | 2023-08-31T09:50:59.150971 | 2021-10-16T15:00:19 | 2021-10-16T15:00:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,490 | py | import argparse
import os
import os.path as osp
import time
import warnings
import mmcv
import torch
from mmcv import Config, DictAction
from mmcv.cnn import fuse_conv_bn
from mmcv.parallel import MMDataParallel, MMDistributedDataParallel
from mmcv.runner import get_dist_info, init_dist, load_checkpoint, wrap_fp16_model
from mmdet.apis import multi_gpu_test, single_gpu_test
from mmdet.datasets import build_dataloader, build_dataset, replace_ImageToTensor
from mmdet.models import build_detector
import pandas as pd
from pandas import DataFrame
import numpy as np
from pycocotools.coco import COCO
def parse_args():
parser = argparse.ArgumentParser(description="MMDet test (and eval) a model")
# config 파일 경로 (학습 시킬 때 사용했던 config 파일, work_dir에도 복사되어있음)
parser.add_argument("config", help="test config file path")
# checkpoint가 저장되어있는 work_dir 경로
parser.add_argument("--work_dir", help="the directory to save the file containing evaluation metrics")
# 사용할 checkpoint epoch
parser.add_argument("--epoch", default="latest", help="Checkpoint file's epoch")
parser.add_argument("--show_score_thr", type=float, default=0.05, help="score threshold (default: 0.05)")
args = parser.parse_args()
return args
def make_csv(output, cfg):
# submission 양식에 맞게 output 후처리
prediction_strings = []
file_names = []
coco = COCO(cfg.data.test.ann_file)
img_ids = coco.getImgIds()
class_num = len(cfg.data.test.classes)
for i, out in enumerate(output):
prediction_string = ""
image_info = coco.loadImgs(coco.getImgIds(imgIds=i))[0]
for j in range(class_num):
for o in out[j]:
prediction_string += (
str(j)
+ " "
+ str(o[4])
+ " "
+ str(o[0])
+ " "
+ str(o[1])
+ " "
+ str(o[2])
+ " "
+ str(o[3])
+ " "
)
prediction_strings.append(prediction_string)
file_names.append(image_info["file_name"])
submission = pd.DataFrame()
submission["PredictionString"] = prediction_strings
submission["image_id"] = file_names
submission.to_csv(os.path.join(cfg.work_dir, "submission.csv"), index=None)
print(f"submission.csv is saved in {cfg.work_dir}")
def main():
args = parse_args()
cfg = Config.fromfile(args.config)
if args.work_dir:
cfg.work_dir = args.work_dir
cfg.data.test.test_mode = True
dataset = build_dataset(cfg.data.test)
data_loader = build_dataloader(
dataset,
samples_per_gpu=cfg.data.samples_per_gpu,
workers_per_gpu=cfg.data.workers_per_gpu,
dist=False,
shuffle=False,
)
checkpoint_path = os.path.join(cfg.work_dir, f"{args.epoch}.pth")
# build detector
cfg.model.train_cfg = None
model = build_detector(cfg.model, test_cfg=cfg.get("test_cfg"))
# ckpt load
checkpoint = load_checkpoint(model, checkpoint_path, map_location="cpu")
model.CLASSES = dataset.CLASSES
model = MMDataParallel(model.cuda(), device_ids=[0])
# cal ouput
output = single_gpu_test(model, data_loader, show_score_thr=args.show_score_thr)
make_csv(output, cfg)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
0ffb1a2ee81207f529a86af9c5969f5b359151d8 | 92866897ac8b95067960f312aa92a4d02c7c81df | /environments/oc-p5/database.py | 93f99ef3da8506942db150a6ad42cd3bace69117 | [] | no_license | DenisLamalis/cours-python | 63fec725c038a50fd52f428152dbc1e0671dba53 | 1fc92b125969a2771633d6e8508138986163b6e7 | refs/heads/master | 2023-02-03T19:59:34.345181 | 2020-12-15T09:57:42 | 2020-12-15T09:57:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,841 | py | import mysql.connector
from config import *
from mysql.connector import errorcode
from tables import Tables
class Database:
""" """
def __init__(self):
""" """
self.host = HOST
self.user = USER
self.password = PASSWORD
self.db_name = 'PureBeurre'
self.tables = Tables()
def connection(self):
""" """
try:
self.connection = mysql.connector.connect(
host = self.host,
user = self.user,
password = self.password,
database = self.db_name)
self.mycursor = self.connection.cursor()
if (self.connection.is_connected()):
print(f"REUSSITE : Connection à la base {self.db_name} effectuée.")
return self.mycursor
except mysql.connector.Error as error:
print("ECHEC : impossible de me connecter, erreur : {}".format(error))
def db_create(self):
""" """
mycursor = self.connection()
try:
mycursor.execute("CREATE DATABASE {} DEFAULT CHARACTER SET 'utf8'".format(self.db_name))
print(f"REUSSITE : création de la base {self.db_name} effectuée.")
except mysql.connector.Error as err:
print("ECHEC : impossible de créer la base, erreur : {}".format(err))
exit(1)
def tables_create(self):
""" """
mycursor = self.connection()
for table_name in self.tables.TABLES:
table_description = self.tables.TABLES[table_name]
try:
mycursor.execute(table_description)
print("REUSSITE : la création de la table {} est effectuée.\n".format(table_name), end='')
except mysql.connector.Error as err:
print("ECHEC : impossible de créer la table, erreur : {}".format(error))
def load_nutriscore(self):
mycursor = self.connection()
try:
add_nutriscore = ("INSERT INTO nutriscore (nut_id, nut_type) VALUES (%s,%s)")
values = (1, 'A')
self.mycursor.execute(add_nutriscore, values)
values = (2, 'B')
self.mycursor.execute(add_nutriscore, values)
values = (3, 'C')
self.mycursor.execute(add_nutriscore, values)
values = (4, 'D')
self.mycursor.execute(add_nutriscore, values)
values = (5, 'E')
self.mycursor.execute(add_nutriscore, values)
self.connection.commit()
print("Les différents Nutriscore ont été chargés dans la base.")
except mysql.connector.Error as error:
print("Erreur lors du chargement : {}".format(error))
if __name__ == "__main__":
database = Database()
| [
"[email protected]"
] | |
dd121328da958577a0671baf5dbb8cbeb3a5c377 | 5c61990fc1a79f389111a3e449c1fadf65fc1b8c | /portnet_api/models/contract.py | 19c2fe0d6032fe1e126ae861a70ebe56c2177e77 | [] | no_license | brahim94/portnet | 3befb64009fd014b74e01151cc429a613d3d2f11 | f1120ce4806ba2fd7e26132ca918d1ce8b9ad32c | refs/heads/master | 2023-04-14T07:17:40.956207 | 2021-04-27T16:37:48 | 2021-04-27T16:37:48 | 356,211,308 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 34,894 | py | # -*- coding: utf-8 -*-
import json
import logging
import requests
import base64
import datetime
import time
from openerp import api, fields, models, _
from dateutil.relativedelta import relativedelta
from openerp.exceptions import ValidationError
_logger = logging.getLogger(__name__)
try:
import xmltodict
except:
_logger.debug('xmltodict libraries not available. Please install "xmltodict"\
python package by "pip install xmltodict".')
class ResContract(models.Model):
_inherit = 'res.contract'
@api.model
def create_subscription(self, values):
subscription_id = False
if values:
### Validate Data
if not values.get('name'):
return {'faultCode': 0, 'faultString': 'N° Souscriptions is required.'}
if not values.get('template_id'):
return {'faultCode': 0, 'faultString': 'Package is required.'}
if not values.get('state'):
return {'faultCode': 0, 'faultString': 'State is required.'}
if not values.get('date_start'):
return {'faultCode': 0, 'faultString': 'Date début is required.'}
if not values.get('date'):
return {'faultCode': 0, 'faultString': 'Date fin is required.'}
if not values.get('partner_categ_id'):
return {'faultCode': 0, 'faultString': "Rôle de l'opérateur is required."}
if not values.get('partner_id'):
return {'faultCode': 0, 'faultString': 'Opérateur is required.'}
if not values.get('date_create'):
return {'faultCode': 0, 'faultString': 'Create Date is required.'}
### Find Data From DB
template_id = self.search([('name', '=', values['template_id']), ('type_contract', '=', 'package'), ('is_template', '=', True)], limit=1)
if not template_id:
return {'faultCode': 0, 'faultString': 'template_id doesn’t exist in Odoo db'}
partner_categ_id = self.env['res.partner.category'].search([('code', '=', values['partner_categ_id'])], limit=1)
if not partner_categ_id:
return {'faultCode': 0, 'faultString': "partner_categ_id doesn’t exist in Odoo db"}
partner_id = self.env['res.partner'].search([('code', '=', values['partner_id']), ('categ_id', '=', partner_categ_id.id), ('customer', '=', True)], limit=1)
if not partner_id:
return {'faultCode': 0, 'faultString': 'partner_id doesn’t exist in Odoo db'}
### A = pending
### D = draft
state = False
if values['state'] == 'A':
state = 'pending'
elif values['state'] == 'D':
state = 'draft'
else:
return {'faultCode': 0, 'faultString': 'state doesn’t exist in Odoo db'}
date_start = str(values['date_start']).strip()
date = str(values['date']).strip()
date_create = str(values['date_create']).strip()
next_invoice_date = fields.Date.from_string(date_start) + relativedelta(months=template_id.periodicity_id.nb_months)
subscription_id = self.with_context(default_type_contract='package', default_is_template=False).create({
### API Fields
'name': values['name'],
'template_id': template_id.id if template_id else False,
'date_start': date_start,
'date': date,
'add_balance': values.get('add_balance') or 0,
'partner_categ_id': partner_categ_id.id if partner_categ_id else False,
'partner_id': partner_id.id if partner_id else False,
'date_create_portnet': date_create,
'state': state,
### Default Package Fields
'product_id': template_id.product_id.id,
'product_category_id': template_id.product_category_id.id,
'periodicity_id': template_id.periodicity_id.id,
'tacite': template_id.tacite,
'currency_id': template_id.currency_id.id,
'amount': template_id.amount,
'transaction_no': template_id.transaction_no,
'first_invoice_date': date_start,
'next_invoice_date': next_invoice_date,
'anticipated_invoice_date': next_invoice_date,
})
subscription_id.onchange_template_id()
subscription_id.message_post(body=_("Record created by API Services"))
if subscription_id:
return {'success': subscription_id.id}
else:
return {'faultCode': 0, 'faultString': 'Something went wrong!'}
@api.model
def update_subscription(self, values):
subscription_id = False
if values:
### Validate Data
if not values.get('name'):
return {'faultCode': 0, 'faultString': 'N° Souscriptions is required.'}
if not values.get('comment'):
return {'faultCode': 0, 'faultString': 'Comment is required.'}
if not values.get('date_write'):
return {'faultCode': 0, 'faultString': 'Update Date is required.'}
### Find Data From DB
subscription_id = self.search([('name', '=', values['name']), ('type_contract', '=', 'package'), ('is_template', '=', False)], limit=1)
if not subscription_id:
return {'faultCode': 0, 'faultString': 'Subscription doesn’t exist in Odoo db'}
vals = {}
if values.get('date_start'):
vals.update({'date_start': str(values['date_start']).strip()})
if values.get('date'):
vals.update({'date': str(values['date']).strip()})
if values.get('add_balance'):
vals.update({'add_balance': values['add_balance'] or 0})
t = time.strptime(str(values['date_write']).strip(), "%Y-%m-%dT%H:%M:%S")
date_write = datetime.datetime(*tuple(t)[:7])
# date_write = str(values['date_write']).strip()
vals.update({'date_write_portnet': date_write})
if values.get('state'):
if values['state'] == 'A':
vals.update({'state': 'pending'})
elif values['state'] == 'S':
vals.update({'state': 'suspend'})
elif values['state'] == 'E':
vals.update({'state': 'expire'})
elif values['state'] == 'C':
vals.update({'state': 'closed'})
# vals.update({'description_package': values.get('comment')})
subscription_id.write(vals)
subscription_id.message_post(body=_("Record updated by API Services"))
subscription_id.message_post(body=_(values.get('comment').strip()))
if subscription_id:
return {'success': subscription_id.id}
else:
return {'faultCode': 0, 'faultString': 'Something went wrong!'}
def master_tag_start(self, tag):
data = "<%s xmlns=\"http://www.portnet.ma/nouvelleTarification\">" % tag
return data
def sub_tag_start(self, tag):
data = "<%s>" % tag
return data
def tag_end(self, tag):
data = "</%s>" % tag
return data
def new_line(self):
return '\n'
def get_tab(self):
return ''.ljust(4)
def get_tranches_lines(self, line_ids):
lines = ''
for line in line_ids:
lines += ''.join([
self.sub_tag_start('tranches'),
self.sub_tag_start('de'), (line.tranche_de_no or ''), self.tag_end('de'), self.new_line(), self.get_tab(),
self.sub_tag_start('a'), (line.tranche_a_no or ''), self.tag_end('a'), self.new_line(), self.get_tab(),
self.sub_tag_start('frais'), (line.frais_de_services or ''), self.tag_end('frais'), self.new_line(), self.get_tab(),
self.tag_end('tranches'),
])
return lines
@api.multi
def action_sync_GU(self):
company_id = self.env.user.company_id
url = (("%s/crm/nouvelleTarification/updateSouscription") % (company_id.ip_address))
# payload = "<souscription xmlns=\"http://www.portnet.ma/nouvelleTarification\">\n <identifiant>S000545</identifiant>\n <codePackage>POS-AGM-111125</codePackage>\n <debutValidite>2020-05-30T09:00:00</debutValidite>\n <finValidite>2020-06-30T09:00:00</finValidite>\n <soldeSupplementaire>400</soldeSupplementaire>\n <statut>ACTIVE</statut>\n <typeOperateur>IMPEXP</typeOperateur>\n <codeOperateur>3861</codeOperateur>\n</souscription >"
headers = {
'authorization': "Basic %s" % (base64.b64encode(("%s:%s" % (company_id.user_id, company_id.password)).encode())).decode(),
'content-type': "application/xml",
}
payload = ''.join([
self.master_tag_start('souscription'), self.new_line(), self.get_tab(),
self.sub_tag_start('identifiant'), (self.name or ''), self.tag_end('identifiant'), self.new_line(), self.get_tab(),
self.sub_tag_start('debutValidite'), (fields.Datetime.from_string(self.date_start).strftime("%Y-%m-%dT%H:%M:%S")), self.tag_end('debutValidite'), self.new_line(), self.get_tab(),
self.sub_tag_start('finValidite'), (fields.Datetime.from_string(self.date).strftime("%Y-%m-%dT%H:%M:%S")), self.tag_end('finValidite'), self.new_line(), self.get_tab(),
self.sub_tag_start('dateModification'), (fields.Datetime.from_string(fields.Datetime.now()).strftime("%Y-%m-%dT%H:%M:%S")), self.tag_end('dateModification'), self.new_line(), self.get_tab(),
self.sub_tag_start('motif'), (str(self.description_package) or ''), self.tag_end('motif'), self.new_line(), self.get_tab(),
self.tag_end('souscription'),
])
response = requests.request("POST", url, headers=headers, data=payload)
res = json.loads(json.dumps(xmltodict.parse(response.text, process_namespaces=True)))
if response.status_code != 200:
message = ''
description = ''
guid = ''
res = json.loads(json.dumps(xmltodict.parse(response.text, process_namespaces=True)))
if res and res.get('http://www.portnet.ma/nouvelleTarification:reponse') and res.get('http://www.portnet.ma/nouvelleTarification:reponse').get('http://www.portnet.ma/nouvelleTarification:description'):
message = res['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:message']
description = res['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:description']
guid = res['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:guid']
_logger.warning("\nERROR MESSAGE: \n\n %s \n\n" % str(response.text))
raise ValidationError("%s \n\n %s \nGUID: %s" % (message, description, guid))
self.write({'date_write_portnet': fields.Datetime.now(), 'date_sync_portnet': fields.Datetime.now()})
return True
@api.multi
def action_suspend(self):
company_id = self.env.user.company_id
url = (("%s/crm/nouvelleTarification/suspendSouscription") % (company_id.ip_address))
# payload = "<souscription xmlns=\"http://www.portnet.ma/nouvelleTarification\">\n <identifiant>S000545</identifiant>\n <codePackage>POS-AGM-111125</codePackage>\n <debutValidite>2020-05-30T09:00:00</debutValidite>\n <finValidite>2020-06-30T09:00:00</finValidite>\n <soldeSupplementaire>400</soldeSupplementaire>\n <statut>ACTIVE</statut>\n <typeOperateur>IMPEXP</typeOperateur>\n <codeOperateur>3861</codeOperateur>\n</souscription >"
headers = {
'authorization': "Basic %s" % (base64.b64encode(("%s:%s" % (company_id.user_id, company_id.password)).encode())).decode(),
'content-type': "application/xml",
}
payload = ''.join([
self.master_tag_start('souscription'), self.new_line(), self.get_tab(),
self.sub_tag_start('identifiant'), (self.name or ''), self.tag_end('identifiant'), self.new_line(), self.get_tab(),
self.sub_tag_start('statut'), ('SUSPENDU'), self.tag_end('statut'), self.new_line(), self.get_tab(),
self.sub_tag_start('dateSuspension'), (fields.Datetime.from_string(fields.Datetime.now()).strftime("%Y-%m-%dT%H:%M:%S")), self.tag_end('dateSuspension'), self.new_line(), self.get_tab(),
self.sub_tag_start('motif'), (str(self.description_package) or ''), self.tag_end('motif'), self.new_line(), self.get_tab(),
self.tag_end('souscription'),
])
response = requests.request("POST", url, headers=headers, data=payload)
res = json.loads(json.dumps(xmltodict.parse(response.text, process_namespaces=True)))
if response.status_code != 200:
message = ''
description = ''
guid = ''
res = json.loads(json.dumps(xmltodict.parse(response.text, process_namespaces=True)))
if res and res.get('http://www.portnet.ma/nouvelleTarification:reponse') and res.get('http://www.portnet.ma/nouvelleTarification:reponse').get('http://www.portnet.ma/nouvelleTarification:description'):
message = res['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:message']
description = res['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:description']
guid = res['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:guid']
_logger.warning("\nERROR MESSAGE: \n\n %s \n\n" % str(response.text))
raise ValidationError("%s \n\n %s \nGUID: %s" % (message, description, guid))
self.write({'date_sync_portnet': fields.Datetime.now(), 'state': 'suspend'})
return True
@api.model
def create_package(self, values):
package_id = False
if values:
vals = {}
Currency = self.env['res.currency']
Product = self.env['product.product']
ProductCategory = self.env['product.category']
PartnerCategory = self.env['res.partner.category']
Periodicity = self.env['res.periodicity']
### Validate Data
if not values.get('name'):
return {'faultCode': 0, 'faultString': 'Code package is required.'}
if not values.get('partner_categ_id'):
return {'faultCode': 0, 'faultString': "Rôle de l'opérateur is required."}
if not values.get('active_package'):
return {'faultCode': 0, 'faultString': 'Active Package status is required.'}
if not values.get('criteria_factures'):
return {'faultCode': 0, 'faultString': 'Critére de facturation is required.'}
if not values.get('parameter_decompte'):
return {'faultCode': 0, 'faultString': 'Paramétre de décompte is required.'}
if not values.get('type_paiment'):
return {'faultCode': 0, 'faultString': 'Type paiement is required.'}
if values.get('transaction_no') and values['transaction_no'] == 'transaction_limit' and not values.get('transaction_no_limit'):
return {'faultCode': 0, 'faultString': 'Nombre de transactions is required.'}
if not values.get('periodicity_id'):
return {'faultCode': 0, 'faultString': 'Périodicité is required.'}
if not values.get('debut_validate'):
return {'faultCode': 0, 'faultString': 'Debut de validité is required.'}
if not values.get('validate_package'):
return {'faultCode': 0, 'faultString': 'Validité du package is required.'}
# if not values.get('tacite'):
# return {'faultCode': 0, 'faultString': 'Tacite de reconduction is required.'}
if not values.get('type_service'):
return {'faultCode': 0, 'faultString': 'Type de frais is required.'}
if not values.get('date_create'):
return {'faultCode': 0, 'faultString': 'Create Date is required.'}
### Find Data From DB
currency_id = Currency.search([('name', '=', 'MAD')], limit=1)
if not currency_id:
return {'faultCode': 0, 'faultString': 'Currency doesn’t exist in Odoo db'}
product_category_id = self.env.ref('product.product_category_all')
if not product_category_id:
product_category_id = ProductCategory.search([('name', '=', 'All')], limit=1)
if not product_category_id:
return {'faultCode': 0, 'faultString': 'Product Category doesn’t exist in Odoo db'}
product_id = Product.search([('name', '=', values['name'])], limit=1)
if not product_id:
product_id = Product.with_context(default_type='service', default_is_package=True, default_category_id=product_category_id.id).create({
'name': values['name'],
})
partner_categ_id = PartnerCategory.search([('code', '=', values['partner_categ_id'])], limit=1)
if not partner_categ_id:
return {'faultCode': 0, 'faultString': "partner_categ_id doesn’t exist in Odoo db"}
month = 0
if values['periodicity_id'] == 'Mensuel':
month = 1
elif values['periodicity_id'] == 'Trimestriel':
month = 3
elif values['periodicity_id'] == 'Semestriel':
month = 6
elif values['periodicity_id'] == 'Annuel':
month = 12
periodicity_id = Periodicity.search([('nb_months', '=', month)], limit=1)
if not periodicity_id:
return {'faultCode': 0, 'faultString': 'periodicity_id doesn’t exist in Odoo db'}
criteria_factures = False
if values['criteria_factures'] == "Titre d'importation":
criteria_factures = 'enable'
elif values['criteria_factures'] == "Escale":
criteria_factures = 'disable'
parameter_decompte = False
if values['parameter_decompte'] == "Envoi pour domiciliation":
parameter_decompte = 'enable'
elif values['parameter_decompte'] == "Envoi du manifeste":
parameter_decompte = 'disable'
if values['type_service'] == 'fix' and not values.get('service_fee'):
return {'faultCode': 0, 'faultString': 'service_fee is mandatory.'}
elif values['type_service'] == 'tranches' and not values.get('type_service_line_ids'):
return {'faultCode': 0, 'faultString': 'service_lines is mandatory.'}
if values.get('type_service_line_ids'):
service_lines = []
for line in values['type_service_line_ids']:
service_lines.append((0, 0, {'tranche_de_no': line[0], 'tranche_a_no': line[1], 'frais_de_services': line[2]}))
vals.update({'type_service_line_ids': service_lines})
date_create = str(values['date_create']).strip()
if values.get('transaction_no'):
vals.update({'transaction_no': values['transaction_no']})
vals.update({
'name': values['name'],
'currency_id': currency_id.id,
'product_category_id': product_category_id.id,
'product_id': product_id.id,
'partner_categ_id': partner_categ_id.id,
'active_package': values['active_package'],
'criteria_factures': criteria_factures,
'parameter_decompte': parameter_decompte,
'type_paiment': values['type_paiment'],
# 'transaction_no': values['transaction_no'],
'transaction_no_limit': values.get('transaction_no_limit'),
'amount': values.get('amount'),
'periodicity_id': periodicity_id.id,
'debut_validate': values['debut_validate'],
'validate_package': values['validate_package'],
'tacite': values['tacite'],
'type_service': values['type_service'],
'service_fee': values.get('service_fee'),
'description_package': values.get('description_package'),
'date_create_portnet': date_create,
})
package_id = self.with_context(default_type_contract='package', default_is_template=True).create(vals)
package_id.message_post(body=_("Record created by API Services"))
if package_id:
return {'success': package_id.id}
else:
return {'faultCode': 0, 'faultString': 'Something went wrong!'}
@api.model
def update_package(self, values):
package_id = False
if values:
vals = {}
PartnerCategory = self.env['res.partner.category']
Periodicity = self.env['res.periodicity']
### Validate Data
if not values.get('name'):
return {'faultCode': 0, 'faultString': 'Code package is required.'}
if not values.get('comment'):
return {'faultCode': 0, 'faultString': 'Comment is required.'}
if not values.get('date_write'):
return {'faultCode': 0, 'faultString': 'Update Date is required.'}
package_id = self.search([('name', '=', values['name']), ('type_contract', '=', 'package'), ('is_template', '=', True)], limit=1)
if not package_id:
return {'faultCode': 0, 'faultString': 'Package doesn’t exist in Odoo db'}
### Find Data From DB
if values.get('partner_categ_id'):
partner_categ_id = PartnerCategory.search([('code', '=', values['partner_categ_id'])], limit=1)
if not partner_categ_id:
return {'faultCode': 0, 'faultString': "partner_categ_id doesn’t exist in Odoo db"}
vals.update({'partner_categ_id': partner_categ_id.id})
if values.get('active_package'):
vals.update({'active_package': values['active_package']})
if values.get('criteria_factures'):
if values['criteria_factures'] == "Titre d'importation":
vals.update({'criteria_factures': 'enable'})
elif values['criteria_factures'] == "Escale":
vals.update({'criteria_factures': 'disable'})
if values.get('parameter_decompte'):
if values['parameter_decompte'] == "Envoi pour domiciliation":
vals.update({'parameter_decompte': 'enable'})
elif values['parameter_decompte'] == "Envoi du manifeste":
vals.update({'parameter_decompte': 'disable'})
if values.get('type_paiment'):
vals.update({'type_paiment': values['type_paiment']})
if values.get('transaction_no'):
vals.update({'transaction_no': values['transaction_no']})
if values.get('transaction_no_limit'):
vals.update({'transaction_no_limit': values['transaction_no_limit']})
if values.get('amount'):
vals.update({'amount': values['amount']})
if values.get('periodicity_id'):
month = 0
if values['periodicity_id'] == 'Mensuel':
month = 1
elif values['periodicity_id'] == 'Trimestriel':
month = 3
elif values['periodicity_id'] == 'Semestriel':
month = 6
elif values['periodicity_id'] == 'Annuel':
month = 12
periodicity_id = Periodicity.search([('nb_months', '=', month)], limit=1)
if not periodicity_id:
return {'faultCode': 0, 'faultString': 'periodicity_id doesn’t exist in Odoo db'}
vals.update({'periodicity_id': periodicity_id.id})
if values.get('debut_validate'):
vals.update({'debut_validate': values['debut_validate']})
if values.get('validate_package'):
vals.update({'validate_package': values['validate_package']})
if values.get('type_service'):
if values['type_service'] == 'fix' and not values.get('service_fee'):
return {'faultCode': 0, 'faultString': 'service_fee is mandatory.'}
elif values['type_service'] == 'tranches' and not values.get('type_service_line_ids'):
return {'faultCode': 0, 'faultString': 'service_lines is mandatory.'}
vals.update({'type_service': values['type_service']})
if values.get('service_fee'):
vals.update({'service_fee': values['service_fee']})
if values.get('type_service_line_ids'):
service_lines = []
for line in values['type_service_line_ids']:
service_lines.append((0, 0, {'tranche_de_no': line[0], 'tranche_a_no': line[1], 'frais_de_services': line[2]}))
package_id.type_service_line_ids.unlink()
vals.update({'type_service_line_ids': service_lines})
if values.get('description_package'):
vals.update({'description_package': values['description_package']})
date_write = str(values['date_write']).strip()
vals.update({'date_write_portnet': date_write, 'tacite': values.get('tacite')})
package_id.write(vals)
package_id.message_post(body=_("Record updated by API Services"))
package_id.message_post(body=_((values['comment']).strip()))
if package_id:
return {'success': package_id.id}
else:
return {'faultCode': 0, 'faultString': 'Something went wrong!'}
@api.multi
def create_package_export(self):
package_code = False
contract_id = self
company_id = self.env.user.company_id
url = (("%s/crm/nouvelleTarification/createPackage") % (company_id.ip_address))
code_url = ("%s/crm/nouvelleTarification/identifiantPackage?roleOperateur=%s&typePaiement=%s") % (company_id.ip_address, str(contract_id.partner_categ_id.code), str(contract_id.type_paiment))
# payload = "<souscription xmlns=\"http://www.portnet.ma/nouvelleTarification\">\n <identifiant>S000545</identifiant>\n <codePackage>POS-AGM-111125</codePackage>\n <debutValidite>2020-05-30T09:00:00</debutValidite>\n <finValidite>2020-06-30T09:00:00</finValidite>\n <soldeSupplementaire>400</soldeSupplementaire>\n <statut>ACTIVE</statut>\n <typeOperateur>IMPEXP</typeOperateur>\n <codeOperateur>3861</codeOperateur>\n</souscription >"
headers = {
'authorization': "Basic %s" % (base64.b64encode(("%s:%s" % (company_id.user_id, company_id.password)).encode())).decode(),
'content-type': "application/xml",
}
### Get Package Code
response_code = requests.request("GET", code_url, headers=headers)
res_code = json.loads(json.dumps(xmltodict.parse(response_code.text, process_namespaces=True)))
result_sub_code = json.loads(json.dumps(xmltodict.parse(response_code.text, process_namespaces=True)))
if response_code.status_code == 200:
if result_sub_code and result_sub_code.get('http://www.portnet.ma/nouvelleTarification:reponse') and result_sub_code.get('http://www.portnet.ma/nouvelleTarification:reponse').get('http://www.portnet.ma/nouvelleTarification:message'):
package_code = result_sub_code['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:message']
else:
message_code = ''
description_code = ''
guid_code = ''
if result_sub_code and result_sub_code.get('http://www.portnet.ma/nouvelleTarification:reponse') and result_sub_code.get('http://www.portnet.ma/nouvelleTarification:reponse').get('http://www.portnet.ma/nouvelleTarification:description'):
message_code = result_sub_code['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:message']
description_code = result_sub_code['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:description']
guid_code = result_sub_code['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:guid']
_logger.warning("\nERROR MESSAGE: \n\n %s \n\n" % str(response_code.text))
raise ValidationError("%s \n\n %s \nGUID: %s" % (message_code, description_code, guid_code))
### Create Package After getting package number
payload = ''.join([
contract_id.master_tag_start('package'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('code'), (package_code), contract_id.tag_end('code'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('statut'), ('Actif'), contract_id.tag_end('statut'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('roleOperateur'), (contract_id.partner_categ_id.code or ''), contract_id.tag_end('roleOperateur'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('critereFacturation'), str(dict(self._fields['criteria_factures'].selection).get(contract_id.criteria_factures)), contract_id.tag_end('critereFacturation'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('parametreDecompte'), str(dict(self._fields['parameter_decompte'].selection).get(contract_id.parameter_decompte)), contract_id.tag_end('parametreDecompte'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('reconduction'), ('1' if contract_id.tacite else ''), contract_id.tag_end('reconduction'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('typePaiement'), (contract_id.type_paiment or ''), contract_id.tag_end('typePaiement'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('transactionAutorisee'), (contract_id.transaction_no or ''), contract_id.tag_end('transactionAutorisee'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('nbreTransactions'), (str(contract_id.transaction_no_limit) or ''), contract_id.tag_end('nbreTransactions'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('abonnementBase'), (str(contract_id.amount) or ''), contract_id.tag_end('abonnementBase'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('periodicite'), (contract_id.periodicity_id.name or ''), contract_id.tag_end('periodicite'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('validite'), (contract_id.validate_package or ''), contract_id.tag_end('validite'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('debutValidite'), (contract_id.debut_validate or ''), contract_id.tag_end('debutValidite'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('typeService'), (contract_id.type_service or ''), contract_id.tag_end('typeService'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('fraisService'), (str(contract_id.service_fee) or ''), contract_id.tag_end('fraisService'), contract_id.new_line(), contract_id.get_tab(),
(contract_id.get_tranches_lines(contract_id.type_service_line_ids)),
contract_id.sub_tag_start('dateCreation'), (fields.Datetime.from_string(contract_id.date_create_portnet or fields.Datetime.now()).strftime("%Y-%m-%dT%H:%M:%S")), contract_id.tag_end('dateCreation'), contract_id.new_line(), contract_id.get_tab(),
contract_id.sub_tag_start('description'), (contract_id.description_package or ''), contract_id.tag_end('description'), contract_id.new_line(), contract_id.get_tab(),
contract_id.tag_end('package'),
])
response = requests.request("POST", url, headers=headers, data=payload)
res = json.loads(json.dumps(xmltodict.parse(response.text, process_namespaces=True)))
if response.status_code != 200:
message = ''
description = ''
guid = ''
res = json.loads(json.dumps(xmltodict.parse(response.text, process_namespaces=True)))
if res and res.get('http://www.portnet.ma/nouvelleTarification:reponse') and res.get('http://www.portnet.ma/nouvelleTarification:reponse').get('http://www.portnet.ma/nouvelleTarification:description'):
message = res['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:message']
description = res['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:description']
guid = res['http://www.portnet.ma/nouvelleTarification:reponse']['http://www.portnet.ma/nouvelleTarification:guid']
_logger.warning("\nERROR MESSAGE: \n\n %s \n\n" % str(response.text))
raise ValidationError("%s \n\n %s \nGUID: %s" % (message, description, guid))
else:
contract_id.write({'name': package_code, 'date_create_portnet': fields.Datetime.now(), 'date_sync_portnet': fields.Datetime.now()})
return True
@api.model
def create(self, values):
res = super(ResContract, self).create(values)
if self._context.get('default_is_template') and self._context['default_is_template'] == True and self._context.get('default_type_contract') and self._context['default_type_contract'] == 'package':
res.create_package_export()
return res | [
"[email protected]"
] | |
5812623a6b231e2bf8b445f6ffa642fcb04887cc | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_328/ch37_2020_03_25_19_56_06_004731.py | 24a51c11e2535e75fe75b9bbbcb27294953173b6 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 184 | py | a = (input("Digite aqui sua senha: "))
while True:
if a != "desisto":
a= (input("Digite aqui sua senha: "))
else:
print ("Você acertou a senha!")
break | [
"[email protected]"
] | |
165e7dc760f0dca474059f542342f73228ee2ee4 | 7aebfaec6957ad67523f1d8851856af88fb997a6 | /catkin_ws/build/robotiq/robotiq_modbus_rtu/catkin_generated/pkg.develspace.context.pc.py | 2fea7493bdaa8d6fad68cdd3e90a1c93c073d9a2 | [] | no_license | k-makihara/ROS | 918e79e521999085ab628b6bf27ec28a51a8ab87 | 45b60e0488a5ff1e3d8f1ca09bfd191dbf8c0508 | refs/heads/master | 2023-01-28T06:00:55.943392 | 2020-11-26T05:27:16 | 2020-11-26T05:27:16 | 316,127,707 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 384 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "rospy".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "robotiq_modbus_rtu"
PROJECT_SPACE_DIR = "/home/mslab/catkin_ws/devel"
PROJECT_VERSION = "1.0.0"
| [
"[email protected]"
] | |
f3d1de23c937418d9d66cee322518ae815b1b97d | 942ee5e8d54e8ebe9c5c841fbfdd1da652946944 | /1001-1500/1354.Construct Target Array With Multiple Sums.2.py | 937d34cc801eb03e13440f0ce5a28b99a2374341 | [] | no_license | kaiwensun/leetcode | 0129c174457f32887fbca078fb448adce46dd89d | 6b607f4aae3a4603e61f2e2b7480fdfba1d9b947 | refs/heads/master | 2023-08-31T07:30:50.459062 | 2023-08-27T07:59:16 | 2023-08-27T07:59:16 | 57,526,914 | 69 | 9 | null | 2023-08-20T06:34:41 | 2016-05-01T05:37:29 | Python | UTF-8 | Python | false | false | 640 | py | import heapq
class Solution(object):
def isPossible(self, target):
s = sum(target)
max_heap = [-t for t in target]
heapq.heapify(max_heap)
while max_heap[0] != -1:
top = -heapq.heappop(max_heap)
snd = -max_heap[0] if max_heap else 0
restored = top * 2 - s
diff = top - restored
if top == snd or diff == 0:
return False
restored = snd + (top - snd) % -diff
if restored < 1:
return False
s -= (top - restored)
heapq.heappush(max_heap, -restored)
return True
| [
"[email protected]"
] | |
8acd7f0b84d63e7f93c370fddda0a52423c34f22 | 6a99547f767b942e2b51b79da0f23a990f3105d3 | /zyugyourobokon/build/navigation/catkin_generated/pkg.develspace.context.pc.py | 402bf6c8fe243528f6f26e4d058a75479241185b | [] | no_license | andokeisuke/NHK2020 | 588a1c0070bacaa98f10229252b40eb34c647345 | d6cb3f0c192141e9d87f4faaf7d1d4537ede4a3e | refs/heads/master | 2020-07-11T20:19:29.539799 | 2019-11-22T18:48:04 | 2019-11-22T18:48:04 | 204,635,311 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 375 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "navigation"
PROJECT_SPACE_DIR = "/home/ando/zyugyourobokon/devel"
PROJECT_VERSION = "0.0.0"
| [
"[email protected]"
] | |
bcb2d3d5b2956afcdde5f3be634d6e0742748d87 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02233/s835389106.py | 94c01a979ff6a34046d86dab98703089af7bc21b | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 140 | py | fib = [0 for i in range(45)]
fib[0] = 1
fib[1] = 1
for i in range(2, 45):
fib[i] = fib[i - 1] + fib[i - 2]
n = int(input())
print(fib[n])
| [
"[email protected]"
] | |
5b5eb5cda0fba8e8594dfdd2a26512967a17d5b7 | db861016e307fa7e1a57c1d07262b5d9c8051218 | /cookbook/ingredients/migrations/0001_initial.py | 91177488701b66d42e9147238624ca23682e9abb | [
"MIT"
] | permissive | mugagambi/cookbook-graphql-server | 794fedaf0d6c7fc5a7ffd21100d90c4f9ef16cba | d45044dc5e307d822e3338bcb3e4f8758c89a2f2 | refs/heads/master | 2021-01-25T14:33:55.992792 | 2018-03-03T17:18:52 | 2018-03-03T17:18:52 | 123,712,464 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,002 | py | # Generated by Django 2.0.2 on 2018-03-03 13:19
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Ingredient',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('notes', models.TextField()),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ingredients', to='ingredients.Category')),
],
),
]
| [
"[email protected]"
] | |
89502b0cd5d0b335a4a18aeb229341a774ad9d71 | 44d5b0a1f411ce14181f1bc8b09e3acbc800e9e1 | /routes1.py | a1fbc536fb60722dd5222cad0edc061e93379366 | [] | no_license | mukosh123/Librarysysrtem | 5d74988af1aaec31a007f5aaddd9d8e3855a7662 | e74ed3328bc50336df28ec45fdf3775051407a27 | refs/heads/master | 2021-01-22T10:40:15.775179 | 2017-02-16T14:42:41 | 2017-02-16T14:42:41 | 82,023,543 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,825 | py | from flask import *
import sqlite3
DATABASE = 'books.db'
app = Flask(__name__)
app.config.from_object(__name__)
def connect_db():
return sqlite3.connect(app.config['DATABASE'])
@app.route('/admin')
def admin():
g.db = connect_db()
cur = g.db.execute('select rep_title,category from reps')
books = [dict(rep_title=row[0],category=row[1]) for row in cur.fetchall()]
g.db.close()
return render_template('admin.html',books=books)
@app.route('/userlogin')
def userlogin():
error = None
if request.method == 'POST':
if request.form['email'] == '[email protected]' or request.form['password']== 'admin':
return redirect (url_for('users'))
return render_template('userlogin.html')
@app.route('/users')
def users():
g.db = connect_db()
cur = g.db.execute('select rep_title,category from reps')
books = [dict(rep_title=row[0],category=row[1]) for row in cur.fetchall()]
g.db.close()
return render_template('users.html',books=books)
@app.route('/borrow')
def borrow():
if request.method == 'POST':
if request.form['book']:
g.db = connect_db()
cur = g.db.execute('select rep_title,category from reps')
books = [dict(rep_title=row[0],category=row[1]) for row in cur.fetchall()]
g.db.close()
return render_template('borrow.html',books=books)
@app.route('/',methods=['GET','POST'])
def login():
error = None
if request.method == 'POST':
if request.form['email'] != '[email protected]' or request.form['password']!= 'admin':
error = 'Invalid credentials .please try again'
else:
return redirect (url_for('admin'))
return render_template('login.html')
if __name__== '__main__':
app.run()
| [
"admin"
] | admin |
b46eb8ad515541f7d2dca44fc8545ec091fa2544 | 726a548766a9db586806ef540dcf8ea4d0a82a60 | /Python3/unit_testing/pytest/phonebook/phonebook.py | c9f4a4f2dcbac826ca6232de245f48fa455d4e4b | [] | no_license | ArseniD/learn_python | 6fd735a594ff83ea97888d6688e474e94182ea74 | d73fc790514f50a2f61c5cc198073299b0c71277 | refs/heads/master | 2022-05-28T04:53:54.603475 | 2019-08-27T10:15:29 | 2019-08-27T10:15:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 576 | py | import os
class Phonebook():
def __init__(self, cachedir):
self.entries = {}
self.filename = "phonebook.txt"
self.file_cache = open(os.path.join(str(cachedir), self.filename), "w")
def add(self, name, number):
self.entries[name] = number
def lookup(self, name):
return self.entries[name]
def names(self):
return self.entries.keys()
def numbers(self):
return self.entries.values()
def clear(self):
self.entries = {}
self.file_cache.close()
os.remove(self.filename)
| [
"[email protected]"
] | |
d3ff1e8fd2b9310e9ac4b6e16b83c3b07946f17f | 349c4f37b6a003d10dd78d864395a0d596d24fe6 | /Learn_advanced/5201_container.py | 2c7ae775dfcca224e76bba0ef1aea78bf35bbcbc | [] | no_license | bwjubrother/Algorithms | 55c2980a4540a7e48cb3afd298cbd2e3d81c594e | 03daa2c778b1cc59ce1920363a27c88bec5ec289 | refs/heads/master | 2023-04-07T00:38:58.715786 | 2021-04-25T08:00:08 | 2021-04-25T08:00:08 | 279,016,520 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 434 | py | import sys
sys.stdin = open('5201.txt', 'r')
T = int(input())
for tc in range(T):
n, m = map(int, input().split())
ns = list(map(int, input().split()))
ms = list(map(int, input().split()))
ans = 0
while ns and ms:
if max(ns) <= max(ms):
ans += max(ns)
ns.remove(max(ns))
ms.remove(max(ms))
else:
ns.remove(max(ns))
print('#%d %d' % (tc+1, ans)) | [
"[email protected]"
] | |
0b6a6e23efe28a1af8e2c40f4e5303ad9ea84029 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/r4.py | 221ab5ce6aa1a15e08da2cf1163ece2f4e587d08 | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 82 | py | ii = [('LeakWTI3.py', 1), ('BackGNE.py', 1), ('BachARE.py', 1), ('SomeMMH.py', 4)] | [
"[email protected]"
] | |
01b617e6e058ce0246a7c101cf43bf4e1c81a5c1 | 7b798a55cf7bd42ab5d2d423ab77814c2564bd44 | /Easy/Longest Harmonious Subsequence.py | 6c543dadc03fa60cf5efa587aed5009fc279b69a | [] | no_license | addherbs/LeetCode | d933839eb0a2eb53c192f76c42152c6f3a6ef3f2 | cadd48225d93aa69745a94a214e55e7751996e19 | refs/heads/master | 2021-04-15T05:12:26.855696 | 2021-02-27T05:53:42 | 2021-02-27T05:53:42 | 126,174,823 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 368 | py | import collections
class Solution:
def findLHS(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
p = collections.Counter (nums)
ans = 0
for num in nums:
temp = 1
if (num + 1 in p):
ans = max (ans, p[num + 1] + p[num])
print (p, ans)
return ans | [
"[email protected]"
] | |
d781e56aa2a7dee0672b8024be04ffa77770c0c0 | b5c47c66fb48294460e72833e4f3c7ec0042ff0a | /tutorial/venv/lib/python3.9/site-packages/setuptools/_distutils/command/bdist_rpm.py | 9ce00ca35396d2e6d101f8bc3a258135e91566e5 | [] | no_license | MariaKireeva/projects | 19c3caae9ee5f6a92c69c8c61e152dbb8f5c704f | 8a1d030d1e8c094109b70c43216f8491df58a02d | refs/heads/main | 2023-08-27T21:26:11.517225 | 2021-09-22T17:39:46 | 2021-09-22T17:39:46 | 312,046,523 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,537 | py | """distutils.command.bdist_rpm
Implements the Distutils 'bdist_rpm' command (create RPM source and binary
distributions)."""
import subprocess, sys, os
from distutils.core import Command
from distutils.debug import DEBUG
from distutils.file_util import write_file
from distutils.errors import *
from distutils.sysconfig import get_python_version
from distutils import log
class bdist_rpm(Command):
description = "create an RPM distribution"
user_options = [
('bdist-base=', None,
"base directory for creating built distributions"),
('rpm-base=', None,
"base directory for creating RPMs (defaults to \"rpm\" under "
"--bdist-base; must be specified for RPM 2)"),
('dist-dir=', 'd',
"directory to put final RPM files in "
"(and .spec files if --spec-only)"),
('python=', None,
"path to Python interpreter to hard-code in the .spec news "
"(default: \"python\")"),
('fix-python', None,
"hard-code the exact path to the current Python interpreter in "
"the .spec news"),
('spec-only', None,
"only regenerate spec news"),
('source-only', None,
"only generate source RPM"),
('binary-only', None,
"only generate binary RPM"),
('use-bzip2', None,
"use bzip2 instead of gzip to create source distribution"),
# More meta-data: too RPM-specific to put in the setup script,
# but needs to go in the .spec news -- so we make these options
# to "bdist_rpm". The idea is that packagers would put this
# info in setup.cfg, although they are of course free to
# supply it on the command line.
('distribution-name=', None,
"name of the (Linux) distribution to which this "
"RPM applies (*not* the name of the module distribution!)"),
('group=', None,
"package classification [default: \"Development/Libraries\"]"),
('release=', None,
"RPM release number"),
('serial=', None,
"RPM serial number"),
('vendor=', None,
"RPM \"vendor\" (eg. \"Joe Blow <[email protected]>\") "
"[default: maintainer or author from setup script]"),
('packager=', None,
"RPM packager (eg. \"Jane Doe <[email protected]>\") "
"[default: vendor]"),
('doc-files=', None,
"list of documentation files (space or comma-separated)"),
('changelog=', None,
"RPM changelog"),
('icon=', None,
"name of icon news"),
('provides=', None,
"capabilities provided by this package"),
('requires=', None,
"capabilities required by this package"),
('conflicts=', None,
"capabilities which conflict with this package"),
('build-requires=', None,
"capabilities required to build this package"),
('obsoletes=', None,
"capabilities made obsolete by this package"),
('no-autoreq', None,
"do not automatically calculate dependencies"),
# Actions to take when building RPM
('keep-temp', 'k',
"don't clean up RPM build directory"),
('no-keep-temp', None,
"clean up RPM build directory [default]"),
('use-rpm-opt-flags', None,
"compile with RPM_OPT_FLAGS when building from source RPM"),
('no-rpm-opt-flags', None,
"do not pass any RPM CFLAGS to compiler"),
('rpm3-mode', None,
"RPM 3 compatibility mode (default)"),
('rpm2-mode', None,
"RPM 2 compatibility mode"),
# Add the hooks necessary for specifying custom scripts
('prep-script=', None,
"Specify a script for the PREP phase of RPM building"),
('build-script=', None,
"Specify a script for the BUILD phase of RPM building"),
('pre-install=', None,
"Specify a script for the pre-INSTALL phase of RPM building"),
('install-script=', None,
"Specify a script for the INSTALL phase of RPM building"),
('post-install=', None,
"Specify a script for the post-INSTALL phase of RPM building"),
('pre-uninstall=', None,
"Specify a script for the pre-UNINSTALL phase of RPM building"),
('post-uninstall=', None,
"Specify a script for the post-UNINSTALL phase of RPM building"),
('clean-script=', None,
"Specify a script for the CLEAN phase of RPM building"),
('verify-script=', None,
"Specify a script for the VERIFY phase of the RPM build"),
# Allow a packager to explicitly force an architecture
('force-arch=', None,
"Force an architecture onto the RPM build process"),
('quiet', 'q',
"Run the INSTALL phase of RPM building in quiet mode"),
]
boolean_options = ['keep-temp', 'use-rpm-opt-flags', 'rpm3-mode',
'no-autoreq', 'quiet']
negative_opt = {'no-keep-temp': 'keep-temp',
'no-rpm-opt-flags': 'use-rpm-opt-flags',
'rpm2-mode': 'rpm3-mode'}
def initialize_options(self):
self.bdist_base = None
self.rpm_base = None
self.dist_dir = None
self.python = None
self.fix_python = None
self.spec_only = None
self.binary_only = None
self.source_only = None
self.use_bzip2 = None
self.distribution_name = None
self.group = None
self.release = None
self.serial = None
self.vendor = None
self.packager = None
self.doc_files = None
self.changelog = None
self.icon = None
self.prep_script = None
self.build_script = None
self.install_script = None
self.clean_script = None
self.verify_script = None
self.pre_install = None
self.post_install = None
self.pre_uninstall = None
self.post_uninstall = None
self.prep = None
self.provides = None
self.requires = None
self.conflicts = None
self.build_requires = None
self.obsoletes = None
self.keep_temp = 0
self.use_rpm_opt_flags = 1
self.rpm3_mode = 1
self.no_autoreq = 0
self.force_arch = None
self.quiet = 0
def finalize_options(self):
self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
if self.rpm_base is None:
if not self.rpm3_mode:
raise DistutilsOptionError(
"you must specify --rpm-base in RPM 2 mode")
self.rpm_base = os.path.join(self.bdist_base, "rpm")
if self.python is None:
if self.fix_python:
self.python = sys.executable
else:
self.python = "python3"
elif self.fix_python:
raise DistutilsOptionError(
"--python and --fix-python are mutually exclusive options")
if os.name != 'posix':
raise DistutilsPlatformError("don't know how to create RPM "
"distributions on platform %s" % os.name)
if self.binary_only and self.source_only:
raise DistutilsOptionError(
"cannot supply both '--source-only' and '--binary-only'")
# don't pass CFLAGS to pure python distributions
if not self.distribution.has_ext_modules():
self.use_rpm_opt_flags = 0
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
self.finalize_package_data()
def finalize_package_data(self):
self.ensure_string('group', "Development/Libraries")
self.ensure_string('vendor',
"%s <%s>" % (self.distribution.get_contact(),
self.distribution.get_contact_email()))
self.ensure_string('packager')
self.ensure_string_list('doc_files')
if isinstance(self.doc_files, list):
for readme in ('README', 'README.txt'):
if os.path.exists(readme) and readme not in self.doc_files:
self.doc_files.append(readme)
self.ensure_string('release', "1")
self.ensure_string('serial') # should it be an int?
self.ensure_string('distribution_name')
self.ensure_string('changelog')
# Format changelog correctly
self.changelog = self._format_changelog(self.changelog)
self.ensure_filename('icon')
self.ensure_filename('prep_script')
self.ensure_filename('build_script')
self.ensure_filename('install_script')
self.ensure_filename('clean_script')
self.ensure_filename('verify_script')
self.ensure_filename('pre_install')
self.ensure_filename('post_install')
self.ensure_filename('pre_uninstall')
self.ensure_filename('post_uninstall')
# XXX don't forget we punted on summaries and descriptions -- they
# should be handled here eventually!
# Now *this* is some meta-data that belongs in the setup script...
self.ensure_string_list('provides')
self.ensure_string_list('requires')
self.ensure_string_list('conflicts')
self.ensure_string_list('build_requires')
self.ensure_string_list('obsoletes')
self.ensure_string('force_arch')
def run(self):
if DEBUG:
print("before _get_package_data():")
print("vendor =", self.vendor)
print("packager =", self.packager)
print("doc_files =", self.doc_files)
print("changelog =", self.changelog)
# make directories
if self.spec_only:
spec_dir = self.dist_dir
self.mkpath(spec_dir)
else:
rpm_dir = {}
for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'):
rpm_dir[d] = os.path.join(self.rpm_base, d)
self.mkpath(rpm_dir[d])
spec_dir = rpm_dir['SPECS']
# Spec news goes into 'dist_dir' if '--spec-only specified',
# build/rpm.<plat> otherwise.
spec_path = os.path.join(spec_dir,
"%s.spec" % self.distribution.get_name())
self.execute(write_file,
(spec_path,
self._make_spec_file()),
"writing '%s'" % spec_path)
if self.spec_only: # stop if requested
return
# Make a source distribution and copy to SOURCES directory with
# optional icon.
saved_dist_files = self.distribution.dist_files[:]
sdist = self.reinitialize_command('sdist')
if self.use_bzip2:
sdist.formats = ['bztar']
else:
sdist.formats = ['gztar']
self.run_command('sdist')
self.distribution.dist_files = saved_dist_files
source = sdist.get_archive_files()[0]
source_dir = rpm_dir['SOURCES']
self.copy_file(source, source_dir)
if self.icon:
if os.path.exists(self.icon):
self.copy_file(self.icon, source_dir)
else:
raise DistutilsFileError(
"icon news '%s' does not exist" % self.icon)
# build package
log.info("building RPMs")
rpm_cmd = ['rpmbuild']
if self.source_only: # what kind of RPMs?
rpm_cmd.append('-bs')
elif self.binary_only:
rpm_cmd.append('-bb')
else:
rpm_cmd.append('-ba')
rpm_cmd.extend(['--define', '__python %s' % self.python])
if self.rpm3_mode:
rpm_cmd.extend(['--define',
'_topdir %s' % os.path.abspath(self.rpm_base)])
if not self.keep_temp:
rpm_cmd.append('--clean')
if self.quiet:
rpm_cmd.append('--quiet')
rpm_cmd.append(spec_path)
# Determine the binary rpm names that should be built out of this spec
# news
# Note that some of these may not be really built (if the news
# list is empty)
nvr_string = "%{name}-%{version}-%{release}"
src_rpm = nvr_string + ".src.rpm"
non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm"
q_cmd = r"rpm -q --qf '%s %s\n' --specfile '%s'" % (
src_rpm, non_src_rpm, spec_path)
out = os.popen(q_cmd)
try:
binary_rpms = []
source_rpm = None
while True:
line = out.readline()
if not line:
break
l = line.strip().split()
assert(len(l) == 2)
binary_rpms.append(l[1])
# The source rpm is named after the first entry in the spec news
if source_rpm is None:
source_rpm = l[0]
status = out.close()
if status:
raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd))
finally:
out.close()
self.spawn(rpm_cmd)
if not self.dry_run:
if self.distribution.has_ext_modules():
pyversion = get_python_version()
else:
pyversion = 'any'
if not self.binary_only:
srpm = os.path.join(rpm_dir['SRPMS'], source_rpm)
assert(os.path.exists(srpm))
self.move_file(srpm, self.dist_dir)
filename = os.path.join(self.dist_dir, source_rpm)
self.distribution.dist_files.append(
('bdist_rpm', pyversion, filename))
if not self.source_only:
for rpm in binary_rpms:
rpm = os.path.join(rpm_dir['RPMS'], rpm)
if os.path.exists(rpm):
self.move_file(rpm, self.dist_dir)
filename = os.path.join(self.dist_dir,
os.path.basename(rpm))
self.distribution.dist_files.append(
('bdist_rpm', pyversion, filename))
def _dist_path(self, path):
return os.path.join(self.dist_dir, os.path.basename(path))
def _make_spec_file(self):
"""Generate the text of an RPM spec news and return it as a
list of strings (one per line).
"""
# definitions and headers
spec_file = [
'%define name ' + self.distribution.get_name(),
'%define version ' + self.distribution.get_version().replace('-','_'),
'%define unmangled_version ' + self.distribution.get_version(),
'%define release ' + self.release.replace('-','_'),
'',
'Summary: ' + self.distribution.get_description(),
]
# Workaround for #14443 which affects some RPM based systems such as
# RHEL6 (and probably derivatives)
vendor_hook = subprocess.getoutput('rpm --eval %{__os_install_post}')
# Generate a potential replacement value for __os_install_post (whilst
# normalizing the whitespace to simplify the test for whether the
# invocation of brp-python-bytecompile passes in __python):
vendor_hook = '\n'.join([' %s \\' % line.strip()
for line in vendor_hook.splitlines()])
problem = "brp-python-bytecompile \\\n"
fixed = "brp-python-bytecompile %{__python} \\\n"
fixed_hook = vendor_hook.replace(problem, fixed)
if fixed_hook != vendor_hook:
spec_file.append('# Workaround for http://bugs.python.org/issue14443')
spec_file.append('%define __os_install_post ' + fixed_hook + '\n')
# put locale summaries into spec news
# XXX not supported for now (hard to put a dictionary
# in a config news -- arg!)
#for locale in self.summaries.keys():
# spec_file.append('Summary(%s): %s' % (locale,
# self.summaries[locale]))
spec_file.extend([
'Name: %{name}',
'Version: %{version}',
'Release: %{release}',])
# XXX yuck! this filename is available from the "sdist" command,
# but only after it has run: and we create the spec news before
# running "sdist", in case of --spec-only.
if self.use_bzip2:
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2')
else:
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz')
spec_file.extend([
'License: ' + self.distribution.get_license(),
'Group: ' + self.group,
'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot',
'Prefix: %{_prefix}', ])
if not self.force_arch:
# noarch if no extension modules
if not self.distribution.has_ext_modules():
spec_file.append('BuildArch: noarch')
else:
spec_file.append( 'BuildArch: %s' % self.force_arch )
for field in ('Vendor',
'Packager',
'Provides',
'Requires',
'Conflicts',
'Obsoletes',
):
val = getattr(self, field.lower())
if isinstance(val, list):
spec_file.append('%s: %s' % (field, ' '.join(val)))
elif val is not None:
spec_file.append('%s: %s' % (field, val))
if self.distribution.get_url() != 'UNKNOWN':
spec_file.append('Url: ' + self.distribution.get_url())
if self.distribution_name:
spec_file.append('Distribution: ' + self.distribution_name)
if self.build_requires:
spec_file.append('BuildRequires: ' +
' '.join(self.build_requires))
if self.icon:
spec_file.append('Icon: ' + os.path.basename(self.icon))
if self.no_autoreq:
spec_file.append('AutoReq: 0')
spec_file.extend([
'',
'%description',
self.distribution.get_long_description()
])
# put locale descriptions into spec news
# XXX again, suppressed because config news syntax doesn't
# easily support this ;-(
#for locale in self.descriptions.keys():
# spec_file.extend([
# '',
# '%description -l ' + locale,
# self.descriptions[locale],
# ])
# rpm scripts
# figure out default build script
def_setup_call = "%s %s" % (self.python,os.path.basename(sys.argv[0]))
def_build = "%s build" % def_setup_call
if self.use_rpm_opt_flags:
def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
# insert contents of files
# XXX this is kind of misleading: user-supplied options are files
# that we open and interpolate into the spec news, but the defaults
# are just text that we drop in as-is. Hmmm.
install_cmd = ('%s install -O1 --root=$RPM_BUILD_ROOT '
'--record=INSTALLED_FILES') % def_setup_call
script_options = [
('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"),
('build', 'build_script', def_build),
('install', 'install_script', install_cmd),
('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"),
('verifyscript', 'verify_script', None),
('pre', 'pre_install', None),
('post', 'post_install', None),
('preun', 'pre_uninstall', None),
('postun', 'post_uninstall', None),
]
for (rpm_opt, attr, default) in script_options:
# Insert contents of news referred to, if no news is referred to
# use 'default' as contents of script
val = getattr(self, attr)
if val or default:
spec_file.extend([
'',
'%' + rpm_opt,])
if val:
with open(val) as f:
spec_file.extend(f.read().split('\n'))
else:
spec_file.append(default)
# files section
spec_file.extend([
'',
'%files -f INSTALLED_FILES',
'%defattr(-,root,root)',
])
if self.doc_files:
spec_file.append('%doc ' + ' '.join(self.doc_files))
if self.changelog:
spec_file.extend([
'',
'%changelog',])
spec_file.extend(self.changelog)
return spec_file
def _format_changelog(self, changelog):
"""Format the changelog correctly and convert it to a list of strings
"""
if not changelog:
return changelog
new_changelog = []
for line in changelog.strip().split('\n'):
line = line.strip()
if line[0] == '*':
new_changelog.extend(['', line])
elif line[0] == '-':
new_changelog.append(line)
else:
new_changelog.append(' ' + line)
# strip trailing newline inserted by first changelog entry
if not new_changelog[0]:
del new_changelog[0]
return new_changelog
| [
"[email protected]"
] | |
ac79892ea7a04e438b9f617893c0aeddfc3de5db | 33daf4c69a8f46d7ad8d93eaa73fc60e36fd022d | /gestion/opos_2016/corregir_nombres.py | 74f44cc7b0761081bb11028fe73577b9f2112e9e | [] | no_license | OscarMaestre/estructurado | 81cfc9412b77d5015be1bebf66785c357746d8e2 | 7649747e48128cb9c17dee937574e9490fcc9087 | refs/heads/master | 2021-01-10T15:05:47.695362 | 2016-04-28T07:30:50 | 2016-04-28T07:30:50 | 53,923,820 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,058 | py | #!/usr/bin/env python3
#coding=utf-8
from utilidades.ficheros.GestorFicheros import GestorFicheros
modificaciones=[
("La Torre de Esteban Hambran", "Torre de Esteban Hambran"),
("Cortijos de Arriba", "Cortijo de Arriba"),
("Villafranca de los Caballeros", "Villafranca de los"),
("Las Ventas con Peña Aguilera", "Ventas Con Peña Aguilera"),
("Herrera de la Mancha", "Centro Penitenciario Herrera")
]
gf=GestorFicheros()
sql_modificar_origen="update rutas set origen='{0}' where origen='{1}';"
sql_modificar_destino="update rutas set destino='{0}' where destino='{1}';"
ARCHIVO_BD="rutas.db"
for m in modificaciones:
pueblo_antes=m[0]
pueblo_como_debe_quedar=m[1]
comando_sql_1=sql_modificar_origen.format (pueblo_antes, pueblo_como_debe_quedar)
gf.ejecutar_comando(
"echo \"" + comando_sql_1+ "\"", "| sqlite3 "+ARCHIVO_BD
)
comando_sql_2=sql_modificar_destino.format (pueblo_antes, pueblo_como_debe_quedar)
gf.ejecutar_comando(
"echo \"" + comando_sql_2+ "\"", "| sqlite3 "+ARCHIVO_BD
) | [
"[email protected]"
] | |
95ec4bc862a5962847f4a2417b1f107854719b08 | 699c4c15667166788a4eaf9d9c1197cf3393986f | /backend/gatherspace/manage.py | e4bfc5db43a4bb74f93d01ea4c3060c182757b4a | [] | no_license | Merevoli-DatLuu/GatherSpace | 2fdd07f5b0a59d27ee9567de952880dc1242b54e | 4305f392fb8f810cfd193d9ba5b11ac94e3ea24d | refs/heads/master | 2023-08-31T18:09:31.645095 | 2021-09-22T05:05:04 | 2021-09-22T05:05:04 | 409,068,869 | 0 | 0 | null | 2021-09-28T08:51:43 | 2021-09-22T04:55:48 | Python | UTF-8 | Python | false | false | 667 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'gatherspace.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
9951868b35d55b8b9969caede6c4916b987b0f5c | e2f0806ca1cdd887ea40d050a19fa2710427bd38 | /기본 문제/05주차_스택2/2167_2차원 배열의 합/강승훈.py | a590cd46cdd52cc0944208211c63cc1350c95b58 | [] | no_license | JY-Dev/AlgorithmStudy-1 | 001f94d80097c850c79eeb2bc86971a01aa5bd5d | 2ad1df0fd65c72a6f6d1feeba09f889000ff8c15 | refs/heads/main | 2023-08-21T18:38:18.235994 | 2021-09-28T07:07:11 | 2021-09-28T07:07:11 | 406,208,087 | 1 | 0 | null | 2021-09-14T03:14:32 | 2021-09-14T03:14:31 | null | UTF-8 | Python | false | false | 607 | py | from sys import stdin
# 입력.
n,m = map(int, stdin.readline().split(" "))
arr = list(list(map(int, stdin.readline().split())) for _ in range(n))
test_case = int(stdin.readline().strip())
for _ in range(test_case):
i1, j1, i2, j2 = map(int, stdin.readline().split(" ")) # 좌표 입력.
sub_sum = 0 # 결과 저장 할 변수.
for i in range(i1-1, i2): # 일단, 입력으로 들어온 i1와 i2 넣으면 col연산 되고,
for j in range(j1-1, j2): # 포문 마다, j1부터 j2 까지 더하면 row연산 됨.
sub_sum += arr[i][j]
# 매번 출력.
print(sub_sum)
| [
"[email protected]"
] | |
979a71a39688b941580d1480aaa2802ebc8058a2 | 2af94f8a7609d47fdcea28a2132c4f8bacb103e3 | /src/services/service_manager.py | da578ee4f10fc46bdd44de29797f6e45099bc02f | [] | no_license | bernhara/DigiGateway4Raph | 685527723f0b306f387233c78d27fe9d78717c38 | f36ba29ef883d70f94b8609ff734b5dcde786c66 | refs/heads/master | 2020-07-05T19:56:27.027547 | 2019-08-19T06:10:46 | 2019-08-19T06:10:46 | 202,756,662 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,464 | py | ############################################################################
# #
# Copyright (c)2008, 2009, Digi International (Digi). All Rights Reserved. #
# #
# Permission to use, copy, modify, and distribute this software and its #
# documentation, without fee and without a signed licensing agreement, is #
# hereby granted, provided that the software is used on Digi products only #
# and that the software contain this copyright notice, and the following #
# two paragraphs appear in all copies, modifications, and distributions as #
# well. Contact Product Management, Digi International, Inc., 11001 Bren #
# Road East, Minnetonka, MN, +1 952-912-3444, for commercial licensing #
# opportunities for non-Digi products. #
# #
# DIGI SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED #
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A #
# PARTICULAR PURPOSE. THE SOFTWARE AND ACCOMPANYING DOCUMENTATION, IF ANY, #
# PROVIDED HEREUNDER IS PROVIDED "AS IS" AND WITHOUT WARRANTY OF ANY KIND. #
# DIGI HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, #
# ENHANCEMENTS, OR MODIFICATIONS. #
# #
# IN NO EVENT SHALL DIGI BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, #
# SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST PROFITS, #
# ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF #
# DIGI HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. #
# #
############################################################################
"""\
Manages the loading and instances of individual services.
The ServiceManager allows for the dynamic loading of services.
drivers as well as the ability to retrieve an instance of a service
by name.
The ServiceManager also provides an interface to start and stop
an instance as well as to query the instance for its configuration
parameters.
"""
# imports
from common.abstract_service_manager import AbstractServiceManager
# constants
# exception classes
# interface functions
# classes
class ServiceManager(AbstractServiceManager):
def __init__(self, core_services):
self.__core = core_services
self.__core.set_service("service_manager", self)
# Initialize our base class:
AbstractServiceManager.__init__(self, core_services, ('services',))
def driver_load(self, name):
"""\
Loads a service driver class dynamically.
If the driver has not been loaded previously, an unconfigured
instance of the driver will be created and managed by the
ServiceManager. If the driver has already been loaded
nothing will be done. In either case, this function will
return True.
If the service driver cannot be loaded for any reason, an
exception will be raised.
"""
return AbstractServiceManager.service_load(self, name)
# internal functions & classes
| [
"[email protected]"
] | |
39231c851e4390fefee972dc33794a199ac03564 | 589b5eedb71d83c15d44fedf60c8075542324370 | /project/stock_project/alpha_model/alpha_factor/GrossProfitYOY.py | 7132439c922d47498483410e22ffd1b56dbe32b9 | [] | no_license | rlcjj/quant | 4c2be8a8686679ceb675660cb37fad554230e0d4 | c07e8f0f6e1580ae29c78c1998a53774a15a67e1 | refs/heads/master | 2020-03-31T07:15:48.111511 | 2018-08-27T05:29:00 | 2018-08-27T05:29:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,813 | py | import pandas as pd
import numpy as np
from quant.stock.stock import Stock
from quant.stock.date import Date
from quant.stock.stock_factor_operate import StockFactorOperate
def GrossProfitYOY(beg_date, end_date):
"""
因子说明: 当季毛利润的同比增长
披露日期 为 最近财报
"""
# param
#################################################################################
factor_name = 'GrossProfitYOY'
ipo_num = 90
# read data
#################################################################################
income = Stock().get_factor_h5("OperatingIncome", None, "primary_mfc").T
cost = Stock().get_factor_h5("OperatingCost", None, "primary_mfc").T
[income, cost] = Stock().make_same_index_columns([income, cost])
gross_profit = income - cost
gross_profit_4 = gross_profit.shift(4)
gross_profit_yoy = gross_profit / gross_profit_4 - 1.0
gross_profit_yoy = gross_profit_yoy.T
gross_profit_yoy = StockFactorOperate().change_quarter_to_daily_with_report_date(gross_profit_yoy, beg_date, end_date)
# data precessing
#################################################################################
pass
# calculate data daily
#################################################################################
res = gross_profit_yoy.T.dropna(how='all').T
# save data
#############################################################################
Stock().write_factor_h5(res, factor_name, "alpha_dfc")
return res
#############################################################################
if __name__ == '__main__':
from datetime import datetime
beg_date = '2004-01-01'
end_date = datetime.today()
data = GrossProfitYOY(beg_date, end_date)
print(data)
| [
"[email protected]"
] | |
51ceb3570f27107ac2da3e8147aab9eefffd42dc | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p3BR/R2/benchmark/startQiskit_noisy42.py | a8bfb10ba754da62197c75c385a3e667cff3009b | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,214 | py | # qubit number=3
# total number=7
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
# oracle.draw('mpl', filename=(kernel + '-oracle.png'))
return oracle
def build_circuit(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the Bernstein-Vazirani circuit
zero = np.binary_repr(0, n)
b = f(zero)
# initial n + 1 bits
input_qubit = QuantumRegister(n+1, "qc")
classicals = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classicals)
# inverse last one (can be omitted if using O_f^\pm)
prog.x(input_qubit[n])
# circuit begin
prog.h(input_qubit[1]) # number=1
prog.x(input_qubit[2]) # number=2
prog.cx(input_qubit[2],input_qubit[1]) # number=4
prog.z(input_qubit[2]) # number=3
# apply H to get superposition
for i in range(n):
prog.h(input_qubit[i])
prog.h(input_qubit[n])
prog.barrier()
# apply oracle O_f
oracle = build_oracle(n, f)
prog.append(
oracle.to_gate(),
[input_qubit[i] for i in range(n)] + [input_qubit[n]])
# apply H back (QFT on Z_2^n)
for i in range(n):
prog.h(input_qubit[i])
prog.barrier()
# measure
return prog
def get_statevector(prog: QuantumCircuit) -> Any:
state_backend = Aer.get_backend('statevector_simulator')
statevec = execute(prog, state_backend).result()
quantum_state = statevec.get_statevector()
qubits = round(log2(len(quantum_state)))
quantum_state = {
"|" + np.binary_repr(i, qubits) + ">": quantum_state[i]
for i in range(2 ** qubits)
}
return quantum_state
def evaluate(backend_str: str, prog: QuantumCircuit, shots: int, b: str) -> Any:
# Q: which backend should we use?
# get state vector
quantum_state = get_statevector(prog)
# get simulate results
# provider = IBMQ.load_account()
# backend = provider.get_backend(backend_str)
# qobj = compile(prog, backend, shots)
# job = backend.run(qobj)
# job.result()
backend = Aer.get_backend(backend_str)
# transpile/schedule -> assemble -> backend.run
results = execute(prog, backend, shots=shots).result()
counts = results.get_counts()
a = Counter(counts).most_common(1)[0][0][::-1]
return {
"measurements": counts,
# "state": statevec,
"quantum_state": quantum_state,
"a": a,
"b": b
}
def bernstein_test_1(rep: str):
"""011 . x + 1"""
a = "011"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_2(rep: str):
"""000 . x + 0"""
a = "000"
b = "0"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_3(rep: str):
"""111 . x + 1"""
a = "111"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
if __name__ == "__main__":
n = 2
a = "11"
b = "1"
f = lambda rep: \
bitwise_xor(bitwise_dot(a, rep), b)
prog = build_circuit(n, f)
sample_shot =4000
writefile = open("../data/startQiskit_noisy42.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
backend = FakeYorktown()
circuit1 = transpile(prog, FakeYorktown())
circuit1.h(qubit=2)
circuit1.x(qubit=3)
circuit1.measure_all()
info = execute(circuit1,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| [
"[email protected]"
] | |
1393b6316d1e0a3f66c872c6f21c11da41fbb9e9 | ad715f9713dc5c6c570a5ac51a18b11932edf548 | /tensorflow/lite/testing/op_tests/ceil.py | 02d6ab3f76b56e09d806372a7e08eef7ec137d0a | [
"LicenseRef-scancode-generic-cla",
"Apache-2.0",
"BSD-2-Clause"
] | permissive | rockzhuang/tensorflow | f1f31bc8edfa402b748c500efb97473c001bac95 | cb40c060b36c6a75edfefbc4e5fc7ee720273e13 | refs/heads/master | 2022-11-08T20:41:36.735747 | 2022-10-21T01:45:52 | 2022-10-21T01:45:52 | 161,580,587 | 27 | 11 | Apache-2.0 | 2019-01-23T11:00:44 | 2018-12-13T03:47:28 | C++ | UTF-8 | Python | false | false | 1,835 | py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test configs for ceil."""
import tensorflow.compat.v1 as tf
from tensorflow.lite.testing.zip_test_utils import create_tensor_data
from tensorflow.lite.testing.zip_test_utils import make_zip_of_tests
from tensorflow.lite.testing.zip_test_utils import register_make_test_function
@register_make_test_function()
def make_ceil_tests(options):
"""Make a set of tests to do ceil."""
test_parameters = [{
"input_dtype": [tf.float32],
"input_shape": [[], [1], [1, 2], [5, 6, 7, 8], [3, 4, 5, 6]],
}]
def build_graph(parameters):
"""Build the ceil op testing graph."""
input_value = tf.compat.v1.placeholder(
dtype=parameters["input_dtype"],
name="input1",
shape=parameters["input_shape"])
out = tf.math.ceil(input_value)
return [input_value], [out]
def build_inputs(parameters, sess, inputs, outputs):
input_value = create_tensor_data(parameters["input_dtype"],
parameters["input_shape"])
return [input_value], sess.run(outputs, feed_dict={inputs[0]: input_value})
make_zip_of_tests(options, test_parameters, build_graph, build_inputs)
| [
"[email protected]"
] | |
a1ff4766613a6a27fc4395c92e158607ac3292fc | 99c4d4a6592fded0e8e59652484ab226ac0bd38c | /code/batch-1/vse-naloge-brez-testov/DN4-Z-225.py | 9583739c790cad2cec42909834833817e30398cc | [] | no_license | benquick123/code-profiling | 23e9aa5aecb91753e2f1fecdc3f6d62049a990d5 | 0d496d649247776d121683d10019ec2a7cba574c | refs/heads/master | 2021-10-08T02:53:50.107036 | 2018-12-06T22:56:38 | 2018-12-06T22:56:38 | 126,011,752 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,597 | py | # Tu pišite svoje funkcije:
from math import *
def koordinate(ime, kraji):
for kraj, x, y in kraji:
if kraj == ime:
return(x, y)
else:
return None
def razdalja_koordinat(x1, y1, x2, y2):
return sqrt((x1-x2) ** 2 + (y1-y2) ** 2)
def razdalja(ime1, ime2, kraji):
x1, y1 = koordinate(ime1, kraji)
x2, y2 = koordinate(ime2, kraji)
return razdalja_koordinat(x1, y1, x2, y2)
def v_dometu(ime, domet, kraji):
s = []
for mesto, x, y in kraji:
if mesto != ime:
if razdalja(ime, mesto, kraji) <= domet:
s.append(mesto)
return s
def najbolj_oddaljeni(ime, imena, kraji):
s = []
naj_r = 0
naj_k = ''
for kraj, x, y in kraji:
if kraj in imena:
r = razdalja(ime, kraj, kraji)
s.append((kraj, r))
for kraj, r in s:
if r > naj_r:
naj_r = r
naj_k = kraj
return naj_k
def zalijemo(ime, domet, kraji):
return najbolj_oddaljeni(ime, v_dometu(ime,domet,kraji), kraji)
def presek(s1, s2):
return list(set(s1).intersection(s2))
def skupno_zalivanje(ime1, ime2, domet, kraji):
mes1 = []
mes2 = []
for mesto, x, y in kraji:
if mesto == ime1:
for mesto, x, y in kraji:
if razdalja(mesto,ime1,kraji) <= domet:
mes1.append(mesto)
if mesto == ime2:
for mesto, x, y in kraji:
if razdalja(mesto,ime2,kraji) <= domet:
mes2.append(mesto)
return presek(mes1, mes2)
| [
"[email protected]"
] | |
ce739380e97a96bf00fcdc9d4059e29f2e122645 | 099256b28df65fb7c90c077b060dca16b8655235 | /unsupervised_learning/0x03-hyperparameter_tuning/2-gp.py | 948f3c23d718cd4522f60f7ce711796142e5c0e1 | [] | no_license | Immaannn2222/holbertonschool-machine_learning | 1cebb9a889b363669bed7645d102dc56ab943c08 | 80bf8d3354702f7fb9f79bbb5ed7e00fc19f788d | refs/heads/master | 2023-08-01T05:35:00.180472 | 2021-09-22T20:28:17 | 2021-09-22T20:28:17 | 317,624,526 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,373 | py | #!/usr/bin/env python3
"""HYPERPARAÙETER"""
import numpy as np
class GaussianProcess:
""" represents a noiseless 1D Gaussian process"""
def __init__(self, X_init, Y_init, l=1, sigma_f=1):
"""class constructor"""
self.X = X_init
self.Y = Y_init
self.l = l
self.sigma_f = sigma_f
self.K = self.kernel(X_init, X_init)
def kernel(self, X1, X2):
"""calculates the covariance kernel matrix between two matrices"""
exp_term = (X1 - X2.T) ** 2
RBF = (((self.sigma_f) ** 2) * (np.exp(exp_term * (
-0.5 / self.l ** 2))))
return RBF
def predict(self, X_s):
"""predicts mean, standard deviation of points in a Gaussian process"""
K_ss = self.kernel(X_s, X_s)
K = self.kernel(self.X, self.X)
decompositon = np.linalg.cholesky(K)
K_k = self.kernel(self.X, X_s)
result = np.linalg.solve(decompositon, K_k)
mu = np.dot(result.T, np.linalg.solve(decompositon, self.Y)).reshape((
X_s.shape[0],))
s2 = np.diag(K_ss) - np.sum(result**2, axis=0)
return mu, s2
def update(self, X_new, Y_new):
"""updates a Gaussian Process"""
self.X = np.append(self.X, X_new).reshape(-1, 1)
self.Y = np.append(self.Y, Y_new).reshape(-1, 1)
self.K = self.kernel(self.X, self.X)
| [
"[email protected]"
] | |
7d1bcd8e386680914a0800493669b944fd4b31b4 | 04e5b6df2ee3bcfb7005d8ec91aab8e380333ac4 | /Lib/objc/_BiomeFoundation.py | 46963683901ead01d0776eb2ce541ab36ad30317 | [
"MIT"
] | permissive | ColdGrub1384/Pyto | 64e2a593957fd640907f0e4698d430ea7754a73e | 7557485a733dd7e17ba0366b92794931bdb39975 | refs/heads/main | 2023-08-01T03:48:35.694832 | 2022-07-20T14:38:45 | 2022-07-20T14:38:45 | 148,944,721 | 884 | 157 | MIT | 2023-02-26T21:34:04 | 2018-09-15T22:29:07 | C | UTF-8 | Python | false | false | 324 | py | """
Classes from the 'BiomeFoundation' framework.
"""
try:
from rubicon.objc import ObjCClass
except ValueError:
def ObjCClass(name):
return None
def _Class(name):
try:
return ObjCClass(name)
except NameError:
return None
BMCoreAnalyticsEvents = _Class("BMCoreAnalyticsEvents")
| [
"[email protected]"
] | |
1a113e39024e17830518e548d9edbf161cb4665c | 6caab8d886e8bd302d1994ff663cf5ccb5e11522 | /MyNotes_01/Step02/1-Data_Structure/day03/demo04_order_set.py | bea67a7c9c1c6c6023282873b66b421d9bb4c5d7 | [] | no_license | ZimingGuo/MyNotes01 | 7698941223c79ee754b17296b9984b731858b238 | 55e6681da1a9faf9c0ec618ed60f5da9ecc6beb6 | refs/heads/master | 2022-07-30T21:30:32.100042 | 2020-05-19T16:59:09 | 2020-05-19T16:59:09 | 265,254,345 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,924 | py | # author: Ziming Guo
# time: 2020/3/14
'''
三种排序集合的汇总:冒泡;选择;插入
'''
# 冒泡排序:双层循环
def bubble(list_):
for m in range(0, len(list_) - 1):
for n in range(m + 1, len(list_)): # 注意这个地方一定是从 m 开始的
if list_[m] > list_[n]:
list_[m], list_[n] = list_[n], list_[m]
# 选择排序
def seq_ord(list_target):
"""
把一个列表用选择排序的方式从小到大排列
思想:
1) 找剩下的元素里最小的元素
2) 把这个元素和最前面的那个元素交换
:param list_target: 要排列的列表
"""
for m in range(0, len(list_target) - 1):
dict_min = {}
dict_min["val"] = list_target[m]
dict_min["ind"] = m
for i in range(m + 1, len(list_target)):
if list_target[i] < dict_min["val"]:
dict_min["val"] = list_target[i] # 找到了最小的元素,存数据
dict_min["ind"] = i # 找到了最小元素,存索引值
list_target[m], list_target[dict_min["ind"]] = dict_min["val"], list_target[m]
# 插入排序
def insert_ord(list_target):
"""
思想:
# 1 按顺序拿出来一个元素,和前面的进行比较
# 2 当比到一个比他小的元素后,就插在他的前面一个位置
# 注意:比较是和前面的元素进行比较
# 注意:是插入而不是交换(insert)
:param list_target: 要进行排序的列表
"""
for m in range(1, len(list_target)):
for n in range(m - 1, -1, -1):
if list_target[n] < list_target[m]:
list_target.insert(n + 1, list_target[m])
del list_target[m + 1]
break
elif n == 0:
list_target.insert(0, list_target[m])
del list_target[m + 1]
| [
"[email protected]"
] | |
61b4cdb93612dde44672fc2ceda9f4c5e7e07d60 | ae7f4a70a0bdb2e98d13c996c75d274241c25278 | /basics/bubble_sort.py | ec8166badc0e61cd877a955c07c700c7d8f6268f | [
"MIT"
] | permissive | zi-NaN/algorithm_exercise | 5d17e1f6c3cae89ed3c7523b344e55c5a10e3e62 | 817916a62774145fe6387b715f76c5badbf99197 | refs/heads/master | 2020-03-30T12:00:46.694490 | 2019-06-23T11:04:34 | 2019-06-23T11:04:34 | 151,204,296 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 309 | py | def bubble_sort(arr:'list'):
# inplace sort
for i in range(len(arr)-1):
for j in range(len(arr)-1, 0, -1):
if arr[j-1] > arr[j]:
arr[j-1], arr[j] = arr[j], arr[j-1]
return arr
# test
if __name__ == '__main__':
print(bubble_sort([1, 3, 2, 4])) | [
"[email protected]"
] | |
f0854c67101d14e317c013d042d3a110dd01e05b | c20a7a651e63c1e7b1c5e6b5c65c8150898bbaf2 | /KG/BiLSTM+CRF.py | cd232b9f936518d33947a35a03e4efc752e6c09d | [] | no_license | Nobody0321/MyCodes | 08dbc878ae1badf82afaf0c9fc608b70dfce5cea | b60e2b7a8f2ad604c7d28b21498991da60066dc3 | refs/heads/master | 2023-08-19T14:34:23.169792 | 2023-08-15T15:50:24 | 2023-08-15T15:50:24 | 175,770,050 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,542 | py | import torch
import torch.autograd as autograd
import torch.nn as nn
import torch.optim as optim
def to_scalar(var): #var是Variable,维度是1
# returns a python float
return var.view(-1).data.tolist()[0]
def argmax(vec):
# return the argmax as a python int
_, idx = torch.max(vec, 1)
return to_scalar(idx)
def prepare_sequence(seq, to_ix):
idxs = [to_ix[w] for w in seq]
tensor = torch.LongTensor(idxs)
return autograd.Variable(tensor)
# Compute log sum exp in a numerically stable way for the forward algorithm
def log_sum_exp(vec): #vec是1*5, type是Variable
max_score = vec[0, argmax(vec)]
#max_score维度是1, max_score.view(1,-1)维度是1*1,max_score.view(1, -1).expand(1, vec.size()[1])的维度是1*5
max_score_broadcast = max_score.view(1, -1).expand(1, vec.size()[1]) # vec.size()维度是1*5
return max_score + torch.log(torch.sum(torch.exp(vec - max_score_broadcast)))#为什么指数之后再求和,而后才log呢
class BiLSTM_CRF(nn.Module):
def __init__(self, vocab_size, tag_to_ix, embedding_dim, hidden_dim):
super(BiLSTM_CRF, self).__init__()
self.embedding_dim = embedding_dim
self.hidden_dim = hidden_dim
self.vocab_size = vocab_size
self.tag_to_ix = tag_to_ix
self.tagset_size = len(tag_to_ix)
self.word_embeds = nn.Embedding(vocab_size, embedding_dim)
self.lstm = nn.LSTM(embedding_dim, hidden_dim // 2, num_layers=1, bidirectional=True)
# Maps the output of the LSTM into tag space.
self.hidden2tag = nn.Linear(hidden_dim, self.tagset_size)
# Matrix of transition parameters. Entry i,j is the score of
# transitioning *to* i *from* j.
self.transitions = nn.Parameter(torch.randn(self.tagset_size, self.tagset_size))
# These two statements enforce the constraint that we never transfer
# to the start tag and we never transfer from the stop tag
self.transitions.data[tag_to_ix[START_TAG], :] = -10000
self.transitions.data[:, tag_to_ix[STOP_TAG]] = -10000
self.hidden = self.init_hidden()
def init_hidden(self):
return (autograd.Variable(torch.randn(2, 1, self.hidden_dim // 2)),
autograd.Variable(torch.randn(2, 1, self.hidden_dim // 2)))
#预测序列的得分
def _forward_alg(self, feats):
# Do the forward algorithm to compute the partition function
init_alphas = torch.Tensor(1, self.tagset_size).fill_(-10000.)
# START_TAG has all of the score.
init_alphas[0][self.tag_to_ix[START_TAG]] = 0.
# Wrap in a variable so that we will get automatic backprop
forward_var = autograd.Variable(init_alphas) #初始状态的forward_var,随着step t变化
# Iterate through the sentence
for feat in feats: #feat的维度是5
alphas_t = [] # The forward variables at this timestep
for next_tag in range(self.tagset_size):
# broadcast the emission score: it is the same regardless of
# the previous tag
emit_score = feat[next_tag].view(1, -1).expand(1, self.tagset_size) #维度是1*5
# the ith entry of trans_score is the score of transitioning to
# next_tag from i
trans_score = self.transitions[next_tag].view(1, -1) #维度是1*5
# The ith entry of next_tag_var is the value for the
# edge (i -> next_tag) before we do log-sum-exp
#第一次迭代时理解:
# trans_score所有其他标签到B标签的概率
# 由lstm运行进入隐层再到输出层得到标签B的概率,emit_score维度是1*5,5个值是相同的
next_tag_var = forward_var + trans_score + emit_score
# The forward variable for this tag is log-sum-exp of all the
# scores.
alphas_t.append(log_sum_exp(next_tag_var))
forward_var = torch.cat(alphas_t).view(1, -1)#到第(t-1)step时5个标签的各自分数
terminal_var = forward_var + self.transitions[self.tag_to_ix[STOP_TAG]]
alpha = log_sum_exp(terminal_var)
return alpha
#得到feats
def _get_lstm_features(self, sentence):
self.hidden = self.init_hidden()
#embeds = self.word_embeds(sentence).view(len(sentence), 1, -1)
embeds = self.word_embeds(sentence)
embeds = embeds.unsqueeze(1)
lstm_out, self.hidden = self.lstm(embeds, self.hidden)
lstm_out = lstm_out.view(len(sentence), self.hidden_dim)
lstm_feats = self.hidden2tag(lstm_out)
return lstm_feats
#得到gold_seq tag的score
def _score_sentence(self, feats, tags):
# Gives the score of a provided tag sequence
score = autograd.Variable(torch.Tensor([0]))
tags = torch.cat([torch.LongTensor([self.tag_to_ix[START_TAG]]), tags]) #将START_TAG的标签3拼接到tag序列上
for i, feat in enumerate(feats):
#self.transitions[tags[i + 1], tags[i]] 实际得到的是从标签i到标签i+1的转移概率
#feat[tags[i+1]], feat是step i 的输出结果,有5个值,对应B, I, E, START_TAG, END_TAG, 取对应标签的值
score = score + self.transitions[tags[i + 1], tags[i]] + feat[tags[i + 1]]
score = score + self.transitions[self.tag_to_ix[STOP_TAG], tags[-1]]
return score
#解码,得到预测的序列,以及预测序列的得分
def _viterbi_decode(self, feats):
backpointers = []
# Initialize the viterbi variables in log space
init_vvars = torch.Tensor(1, self.tagset_size).fill_(-10000.)
init_vvars[0][self.tag_to_ix[START_TAG]] = 0
# forward_var at step i holds the viterbi variables for step i-1
forward_var = autograd.Variable(init_vvars)
for feat in feats:
bptrs_t = [] # holds the backpointers for this step
viterbivars_t = [] # holds the viterbi variables for this step
for next_tag in range(self.tagset_size):
# next_tag_var[i] holds the viterbi variable for tag i at the
# previous step, plus the score of transitioning
# from tag i to next_tag.
# We don't include the emission scores here because the max
# does not depend on them (we add them in below)
next_tag_var = forward_var + self.transitions[next_tag] #其他标签(B,I,E,Start,End)到标签next_tag的概率
best_tag_id = argmax(next_tag_var)
bptrs_t.append(best_tag_id)
viterbivars_t.append(next_tag_var[0][best_tag_id])
# Now add in the emission scores, and assign forward_var to the set
# of viterbi variables we just computed
forward_var = (torch.cat(viterbivars_t) + feat).view(1, -1)#从step0到step(i-1)时5个序列中每个序列的最大score
backpointers.append(bptrs_t) #bptrs_t有5个元素
# Transition to STOP_TAG
terminal_var = forward_var + self.transitions[self.tag_to_ix[STOP_TAG]]#其他标签到STOP_TAG的转移概率
best_tag_id = argmax(terminal_var)
path_score = terminal_var[0][best_tag_id]
# Follow the back pointers to decode the best path.
best_path = [best_tag_id]
for bptrs_t in reversed(backpointers):#从后向前走,找到一个best路径
best_tag_id = bptrs_t[best_tag_id]
best_path.append(best_tag_id)
# Pop off the start tag (we dont want to return that to the caller)
start = best_path.pop()
assert start == self.tag_to_ix[START_TAG] # Sanity check
best_path.reverse()# 把从后向前的路径正过来
return path_score, best_path
def neg_log_likelihood(self, sentence, tags):
feats = self._get_lstm_features(sentence)
forward_score = self._forward_alg(feats)
gold_score = self._score_sentence(feats, tags)
return forward_score - gold_score
def forward(self, sentence): # dont confuse this with _forward_alg above.
# Get the emission scores from the BiLSTM
lstm_feats = self._get_lstm_features(sentence)
# Find the best path, given the features.
score, tag_seq = self._viterbi_decode(lstm_feats)
return score, tag_seq
START_TAG = "<START>"
STOP_TAG = "<STOP>"
EMBEDDING_DIM = 5
HIDDEN_DIM = 4
# Make up some training data
training_data = [("the wall street journal reported today that apple corporation made money".split(), "B I I I O O O B I O O".split()),
("georgia tech is a university in georgia".split(), "B I O O O O B".split())]
word_to_ix = {}
for sentence, tags in training_data:
for word in sentence:
if word not in word_to_ix:
word_to_ix[word] = len(word_to_ix)
tag_to_ix = {"B": 0, "I": 1, "O": 2, START_TAG: 3, STOP_TAG: 4}
model = BiLSTM_CRF(len(word_to_ix), tag_to_ix, EMBEDDING_DIM, HIDDEN_DIM)
optimizer = optim.SGD(model.parameters(), lr=0.01, weight_decay=1e-4)
# Check predictions before training
# precheck_sent = prepare_sequence(training_data[0][0], word_to_ix)
# precheck_tags = torch.LongTensor([tag_to_ix[t] for t in training_data[0][1]])
# print(model(precheck_sent))
# Make sure prepare_sequence from earlier in the LSTM section is loaded
for epoch in range(1): # again, normally you would NOT do 300 epochs, it is toy data
for sentence, tags in training_data:
# Step 1. Remember that Pytorch accumulates gradients.
# We need to clear them out before each instance
model.zero_grad()
# Step 2. Get our inputs ready for the network, that is,
# turn them into Variables of word indices.
sentence_in = prepare_sequence(sentence, word_to_ix)
targets = torch.LongTensor([tag_to_ix[t] for t in tags])
# Step 3. Run our forward pass.
neg_log_likelihood = model.neg_log_likelihood(sentence_in, targets)
# Step 4. Compute the loss, gradients, and update the parameters by
# calling optimizer.step()
neg_log_likelihood.backward()
optimizer.step()
# Check predictions after training
precheck_sent = prepare_sequence(training_data[0][0], word_to_ix)
print(model(precheck_sent)[0]) #得分
print('^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^')
print(model(precheck_sent)[1]) #tag sequence | [
"[email protected]"
] | |
244d50cea282092239e50d4c7fae5eae2ae5d443 | d2a564ee5ecc46ad55ba4a17504dd79b26f77d0f | /educa/courses/migrations/0002_content_file_image_text_video.py | 672f9544b897995f2adb898129ce06f0b7bb6096 | [] | no_license | Da1anna/Educa | ab5eead0337a2447b87271a6a06c2bcfc61f09a2 | 736fd9840c66221212275f2cfa7374cb521e79ff | refs/heads/master | 2022-12-30T12:31:36.014607 | 2020-10-15T03:52:49 | 2020-10-15T03:52:49 | 303,141,101 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,898 | py | # Generated by Django 2.0.5 on 2020-09-18 14:12
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('courses', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Content',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('object_id', models.PositiveIntegerField()),
('content_type', models.ForeignKey(limit_choices_to={'model__in': ('text', 'image', 'file', 'video')}, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
('module', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='contents', to='courses.Module')),
],
),
migrations.CreateModel(
name='File',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=250)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('content', models.FileField(upload_to='files')),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='file_related', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Image',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=250)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('content', models.ImageField(upload_to='images')),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='image_related', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Text',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=250)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('content', models.TextField()),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='text_related', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Video',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=250)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('content', models.URLField()),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='video_related', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
]
| [
"[email protected]"
] | |
96c732b39274d27dba371d3ba780deafa53399a0 | 8dbb2a3e2286c97b1baa3ee54210189f8470eb4d | /kubernetes-stubs/client/api/autoscaling_v1_api.pyi | 483ed490e8ae7275b864b7af091f1df71f67ac70 | [] | no_license | foodpairing/kubernetes-stubs | e4b0f687254316e6f2954bacaa69ff898a88bde4 | f510dc3d350ec998787f543a280dd619449b5445 | refs/heads/master | 2023-08-21T21:00:54.485923 | 2021-08-25T03:53:07 | 2021-08-25T04:45:17 | 414,555,568 | 0 | 0 | null | 2021-10-07T10:26:08 | 2021-10-07T10:26:08 | null | UTF-8 | Python | false | false | 5,142 | pyi | import typing
import kubernetes.client
class AutoscalingV1Api:
def __init__(
self, api_client: typing.Optional[kubernetes.client.ApiClient] = ...
) -> None: ...
def get_api_resources(self) -> kubernetes.client.V1APIResourceList: ...
def list_horizontal_pod_autoscaler_for_all_namespaces(
self,
*,
allow_watch_bookmarks: typing.Optional[bool] = ...,
_continue: typing.Optional[str] = ...,
field_selector: typing.Optional[str] = ...,
label_selector: typing.Optional[str] = ...,
limit: typing.Optional[int] = ...,
pretty: typing.Optional[str] = ...,
resource_version: typing.Optional[str] = ...,
timeout_seconds: typing.Optional[int] = ...,
watch: typing.Optional[bool] = ...
) -> kubernetes.client.V1HorizontalPodAutoscalerList: ...
def list_namespaced_horizontal_pod_autoscaler(
self,
namespace: str,
*,
pretty: typing.Optional[str] = ...,
allow_watch_bookmarks: typing.Optional[bool] = ...,
_continue: typing.Optional[str] = ...,
field_selector: typing.Optional[str] = ...,
label_selector: typing.Optional[str] = ...,
limit: typing.Optional[int] = ...,
resource_version: typing.Optional[str] = ...,
timeout_seconds: typing.Optional[int] = ...,
watch: typing.Optional[bool] = ...
) -> kubernetes.client.V1HorizontalPodAutoscalerList: ...
def create_namespaced_horizontal_pod_autoscaler(
self,
namespace: str,
body: kubernetes.client.V1HorizontalPodAutoscaler,
*,
pretty: typing.Optional[str] = ...,
dry_run: typing.Optional[str] = ...,
field_manager: typing.Optional[str] = ...
) -> kubernetes.client.V1HorizontalPodAutoscaler: ...
def delete_collection_namespaced_horizontal_pod_autoscaler(
self,
namespace: str,
*,
pretty: typing.Optional[str] = ...,
body: typing.Optional[kubernetes.client.V1DeleteOptions] = ...,
_continue: typing.Optional[str] = ...,
dry_run: typing.Optional[str] = ...,
field_selector: typing.Optional[str] = ...,
grace_period_seconds: typing.Optional[int] = ...,
label_selector: typing.Optional[str] = ...,
limit: typing.Optional[int] = ...,
orphan_dependents: typing.Optional[bool] = ...,
propagation_policy: typing.Optional[str] = ...,
resource_version: typing.Optional[str] = ...,
timeout_seconds: typing.Optional[int] = ...
) -> kubernetes.client.V1Status: ...
def read_namespaced_horizontal_pod_autoscaler(
self,
name: str,
namespace: str,
*,
pretty: typing.Optional[str] = ...,
exact: typing.Optional[bool] = ...,
export: typing.Optional[bool] = ...
) -> kubernetes.client.V1HorizontalPodAutoscaler: ...
def replace_namespaced_horizontal_pod_autoscaler(
self,
name: str,
namespace: str,
body: kubernetes.client.V1HorizontalPodAutoscaler,
*,
pretty: typing.Optional[str] = ...,
dry_run: typing.Optional[str] = ...,
field_manager: typing.Optional[str] = ...
) -> kubernetes.client.V1HorizontalPodAutoscaler: ...
def delete_namespaced_horizontal_pod_autoscaler(
self,
name: str,
namespace: str,
*,
pretty: typing.Optional[str] = ...,
body: typing.Optional[kubernetes.client.V1DeleteOptions] = ...,
dry_run: typing.Optional[str] = ...,
grace_period_seconds: typing.Optional[int] = ...,
orphan_dependents: typing.Optional[bool] = ...,
propagation_policy: typing.Optional[str] = ...
) -> kubernetes.client.V1Status: ...
def patch_namespaced_horizontal_pod_autoscaler(
self,
name: str,
namespace: str,
body: typing.Any,
*,
pretty: typing.Optional[str] = ...,
dry_run: typing.Optional[str] = ...,
field_manager: typing.Optional[str] = ...,
force: typing.Optional[bool] = ...
) -> kubernetes.client.V1HorizontalPodAutoscaler: ...
def read_namespaced_horizontal_pod_autoscaler_status(
self, name: str, namespace: str, *, pretty: typing.Optional[str] = ...
) -> kubernetes.client.V1HorizontalPodAutoscaler: ...
def replace_namespaced_horizontal_pod_autoscaler_status(
self,
name: str,
namespace: str,
body: kubernetes.client.V1HorizontalPodAutoscaler,
*,
pretty: typing.Optional[str] = ...,
dry_run: typing.Optional[str] = ...,
field_manager: typing.Optional[str] = ...
) -> kubernetes.client.V1HorizontalPodAutoscaler: ...
def patch_namespaced_horizontal_pod_autoscaler_status(
self,
name: str,
namespace: str,
body: typing.Any,
*,
pretty: typing.Optional[str] = ...,
dry_run: typing.Optional[str] = ...,
field_manager: typing.Optional[str] = ...,
force: typing.Optional[bool] = ...
) -> kubernetes.client.V1HorizontalPodAutoscaler: ...
| [
"[email protected]"
] | |
0cbd1c8aeac8d787abd3ecf791a38ec0389941b3 | 4569d707a4942d3451f3bbcfebaa8011cc5a128d | /privateticketsplugin/branches/0.10/privatetickets/report.py | e36569c4591e109ecf3d68f4f5d34b955ec69b4a | [] | no_license | woochica/trachacks | 28749b924c897747faa411876a3739edaed4cff4 | 4fcd4aeba81d734654f5d9ec524218b91d54a0e1 | refs/heads/master | 2021-05-30T02:27:50.209657 | 2013-05-24T17:31:23 | 2013-05-24T17:31:23 | 13,418,837 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,095 | py | from trac.core import *
from trac.web.api import IRequestFilter
from trac.ticket.report import ReportModule
from api import PrivateTicketsSystem
__all__ = ['PrivateTicketsReportFilter']
class PrivateTicketsReportFilter(Component):
"""Show only ticket the user is involved in in the reports."""
implements(IRequestFilter)
# IRequestFilter methods
def pre_process_request(self, req, handler):
if isinstance(handler, ReportModule) and \
not req.perm.has_permission('TICKET_VIEW') and \
req.args.get('format') in ('tab', 'csv'):
raise TracError('Access denied')
return handler
def post_process_request(self, req, template, content_type):
if req.args.get('DO_PRIVATETICKETS_FILTER') == 'report':
# Walk the HDF
fn = PrivateTicketsSystem(self.env).check_ticket_access
deleted = []
left = []
node = req.hdf.getObj('report.items')
if node is None:
return template, content_type
node = node.child()
while node:
i = node.name()
id = req.hdf['report.items.%s.ticket'%i]
if not fn(req, id):
deleted.append(i)
else:
left.append(i)
node = node.next()
# Delete the needed subtrees
for n in deleted:
req.hdf.removeTree('report.items.%s'%n)
# Recalculate this
req.hdf['report.numrows'] = len(left)
# Move the remaining items into their normal places
for src, dest in zip(left, xrange(len(left)+len(deleted))):
if src == dest: continue
req.hdf.getObj('report.items').copy(str(dest), req.hdf.getObj('report.items.%s'%src))
for n in xrange(len(left), len(left)+len(deleted)):
req.hdf.removeTree('report.items.%s'%n)
return template, content_type
| [
"coderanger@7322e99d-02ea-0310-aa39-e9a107903beb"
] | coderanger@7322e99d-02ea-0310-aa39-e9a107903beb |
f17d60f3ba2d4ccd6446efee607a59d13b9f6596 | b09db2bba8019b1d11720f1092304e5ce9948d91 | /lib/sqlalchemy/util/__init__.py | 273570357b09f600df0913bd840eed8f0a4f6efe | [
"MIT"
] | permissive | theosotr/sqlalchemy | 6da34f5e28859f4ae7479db4ca9963c8392d7ac8 | e1d4e59116bbf1a12bb6b3f57d33ddfc757d4567 | refs/heads/master | 2022-10-17T08:42:31.757925 | 2020-06-11T03:14:46 | 2020-06-11T03:14:46 | 271,558,840 | 0 | 0 | MIT | 2020-06-11T13:51:28 | 2020-06-11T13:51:28 | null | UTF-8 | Python | false | false | 6,629 | py | # util/__init__.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from collections import defaultdict # noqa
from contextlib import contextmanager # noqa
from functools import partial # noqa
from functools import update_wrapper # noqa
from ._collections import coerce_generator_arg # noqa
from ._collections import collections_abc # noqa
from ._collections import column_dict # noqa
from ._collections import column_set # noqa
from ._collections import EMPTY_SET # noqa
from ._collections import FacadeDict # noqa
from ._collections import flatten_iterator # noqa
from ._collections import has_dupes # noqa
from ._collections import has_intersection # noqa
from ._collections import IdentitySet # noqa
from ._collections import ImmutableContainer # noqa
from ._collections import immutabledict # noqa
from ._collections import ImmutableProperties # noqa
from ._collections import LRUCache # noqa
from ._collections import ordered_column_set # noqa
from ._collections import OrderedDict # noqa
from ._collections import OrderedIdentitySet # noqa
from ._collections import OrderedProperties # noqa
from ._collections import OrderedSet # noqa
from ._collections import PopulateDict # noqa
from ._collections import Properties # noqa
from ._collections import ScopedRegistry # noqa
from ._collections import ThreadLocalRegistry # noqa
from ._collections import to_column_set # noqa
from ._collections import to_list # noqa
from ._collections import to_set # noqa
from ._collections import unique_list # noqa
from ._collections import UniqueAppender # noqa
from ._collections import update_copy # noqa
from ._collections import WeakPopulateDict # noqa
from ._collections import WeakSequence # noqa
from .compat import b # noqa
from .compat import b64decode # noqa
from .compat import b64encode # noqa
from .compat import binary_type # noqa
from .compat import byte_buffer # noqa
from .compat import callable # noqa
from .compat import cmp # noqa
from .compat import cpython # noqa
from .compat import decode_backslashreplace # noqa
from .compat import dottedgetter # noqa
from .compat import has_refcount_gc # noqa
from .compat import inspect_getfullargspec # noqa
from .compat import int_types # noqa
from .compat import iterbytes # noqa
from .compat import itertools_filter # noqa
from .compat import itertools_filterfalse # noqa
from .compat import namedtuple # noqa
from .compat import next # noqa
from .compat import parse_qsl # noqa
from .compat import pickle # noqa
from .compat import print_ # noqa
from .compat import py2k # noqa
from .compat import py36 # noqa
from .compat import py37 # noqa
from .compat import py3k # noqa
from .compat import quote_plus # noqa
from .compat import raise_ # noqa
from .compat import raise_from_cause # noqa
from .compat import reduce # noqa
from .compat import reraise # noqa
from .compat import string_types # noqa
from .compat import StringIO # noqa
from .compat import text_type # noqa
from .compat import threading # noqa
from .compat import timezone # noqa
from .compat import TYPE_CHECKING # noqa
from .compat import u # noqa
from .compat import ue # noqa
from .compat import unquote # noqa
from .compat import unquote_plus # noqa
from .compat import win32 # noqa
from .compat import with_metaclass # noqa
from .compat import zip_longest # noqa
from .deprecations import deprecated # noqa
from .deprecations import deprecated_20 # noqa
from .deprecations import deprecated_20_cls # noqa
from .deprecations import deprecated_cls # noqa
from .deprecations import deprecated_params # noqa
from .deprecations import inject_docstring_text # noqa
from .deprecations import warn_deprecated # noqa
from .deprecations import warn_deprecated_20 # noqa
from .langhelpers import add_parameter_text # noqa
from .langhelpers import as_interface # noqa
from .langhelpers import asbool # noqa
from .langhelpers import asint # noqa
from .langhelpers import assert_arg_type # noqa
from .langhelpers import attrsetter # noqa
from .langhelpers import bool_or_str # noqa
from .langhelpers import chop_traceback # noqa
from .langhelpers import class_hierarchy # noqa
from .langhelpers import classproperty # noqa
from .langhelpers import clsname_as_plain_name # noqa
from .langhelpers import coerce_kw_type # noqa
from .langhelpers import constructor_copy # noqa
from .langhelpers import constructor_key # noqa
from .langhelpers import counter # noqa
from .langhelpers import decode_slice # noqa
from .langhelpers import decorator # noqa
from .langhelpers import dictlike_iteritems # noqa
from .langhelpers import duck_type_collection # noqa
from .langhelpers import ellipses_string # noqa
from .langhelpers import EnsureKWArgType # noqa
from .langhelpers import format_argspec_init # noqa
from .langhelpers import format_argspec_plus # noqa
from .langhelpers import generic_repr # noqa
from .langhelpers import get_callable_argspec # noqa
from .langhelpers import get_cls_kwargs # noqa
from .langhelpers import get_func_kwargs # noqa
from .langhelpers import getargspec_init # noqa
from .langhelpers import HasMemoized # noqa
from .langhelpers import hybridmethod # noqa
from .langhelpers import hybridproperty # noqa
from .langhelpers import iterate_attributes # noqa
from .langhelpers import map_bits # noqa
from .langhelpers import md5_hex # noqa
from .langhelpers import memoized_instancemethod # noqa
from .langhelpers import memoized_property # noqa
from .langhelpers import MemoizedSlots # noqa
from .langhelpers import methods_equivalent # noqa
from .langhelpers import monkeypatch_proxied_specials # noqa
from .langhelpers import NoneType # noqa
from .langhelpers import only_once # noqa
from .langhelpers import PluginLoader # noqa
from .langhelpers import portable_instancemethod # noqa
from .langhelpers import preload_module # noqa
from .langhelpers import preloaded # noqa
from .langhelpers import quoted_token_parser # noqa
from .langhelpers import safe_reraise # noqa
from .langhelpers import set_creation_order # noqa
from .langhelpers import string_or_unprintable # noqa
from .langhelpers import symbol # noqa
from .langhelpers import unbound_method_to_callable # noqa
from .langhelpers import warn # noqa
from .langhelpers import warn_exception # noqa
from .langhelpers import warn_limited # noqa
from .langhelpers import wrap_callable # noqa
SQLALCHEMY_WARN_20 = False
| [
"[email protected]"
] | |
72d39a6c4a2057758f588c718c98fe591544ee9e | 0cd64f3f67c6a3b130a788906da84ffc3d15396a | /Library/lib/python3.9/site-packages/sympy/stats/frv_types.py | 2baaa93e936929c304e4f1d3e880cd670228af8c | [
"MIT",
"BSD-3-Clause",
"0BSD",
"LicenseRef-scancode-free-unknown",
"GPL-1.0-or-later",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-python-cwi",
"Python-2.0"
] | permissive | Ryorama/codeapp | 32ef44a3e8058da9858924df211bf82f5f5018f1 | cf7f5753c6c4c3431d8209cbaacf5208c3c664fa | refs/heads/main | 2023-06-26T09:24:13.724462 | 2021-07-27T17:54:25 | 2021-07-27T17:54:25 | 388,520,626 | 0 | 0 | MIT | 2021-07-22T16:01:32 | 2021-07-22T16:01:32 | null | UTF-8 | Python | false | false | 22,777 | py | """
Finite Discrete Random Variables - Prebuilt variable types
Contains
========
FiniteRV
DiscreteUniform
Die
Bernoulli
Coin
Binomial
BetaBinomial
Hypergeometric
Rademacher
IdealSoliton
RobustSoliton
"""
from sympy import (S, sympify, Rational, binomial, cacheit, Integer,
Dummy, Eq, Intersection, Interval, log, Range,
Symbol, Lambda, Piecewise, Or, Gt, Lt, Ge, Le, Contains)
from sympy import beta as beta_fn
from sympy.stats.frv import (SingleFiniteDistribution,
SingleFinitePSpace)
from sympy.stats.rv import _value_check, Density, is_random
__all__ = ['FiniteRV',
'DiscreteUniform',
'Die',
'Bernoulli',
'Coin',
'Binomial',
'BetaBinomial',
'Hypergeometric',
'Rademacher',
'IdealSoliton',
'RobustSoliton',
]
def rv(name, cls, *args, **kwargs):
args = list(map(sympify, args))
dist = cls(*args)
if kwargs.pop('check', True):
dist.check(*args)
pspace = SingleFinitePSpace(name, dist)
if any(is_random(arg) for arg in args):
from sympy.stats.compound_rv import CompoundPSpace, CompoundDistribution
pspace = CompoundPSpace(name, CompoundDistribution(dist))
return pspace.value
class FiniteDistributionHandmade(SingleFiniteDistribution):
@property
def dict(self):
return self.args[0]
def pmf(self, x):
x = Symbol('x')
return Lambda(x, Piecewise(*(
[(v, Eq(k, x)) for k, v in self.dict.items()] + [(S.Zero, True)])))
@property
def set(self):
return set(self.dict.keys())
@staticmethod
def check(density):
for p in density.values():
_value_check((p >= 0, p <= 1),
"Probability at a point must be between 0 and 1.")
val = sum(density.values())
_value_check(Eq(val, 1) != S.false, "Total Probability must be 1.")
def FiniteRV(name, density, **kwargs):
r"""
Create a Finite Random Variable given a dict representing the density.
Parameters
==========
name : Symbol
Represents name of the random variable.
density: A dict
Dictionary conatining the pdf of finite distribution
check : bool
If True, it will check whether the given density
integrates to 1 over the given set. If False, it
will not perform this check. Default is False.
Examples
========
>>> from sympy.stats import FiniteRV, P, E
>>> density = {0: .1, 1: .2, 2: .3, 3: .4}
>>> X = FiniteRV('X', density)
>>> E(X)
2.00000000000000
>>> P(X >= 2)
0.700000000000000
Returns
=======
RandomSymbol
"""
# have a default of False while `rv` should have a default of True
kwargs['check'] = kwargs.pop('check', False)
return rv(name, FiniteDistributionHandmade, density, **kwargs)
class DiscreteUniformDistribution(SingleFiniteDistribution):
@staticmethod
def check(*args):
# not using _value_check since there is a
# suggestion for the user
if len(set(args)) != len(args):
from sympy.utilities.iterables import multiset
from sympy.utilities.misc import filldedent
weights = multiset(args)
n = Integer(len(args))
for k in weights:
weights[k] /= n
raise ValueError(filldedent("""
Repeated args detected but set expected. For a
distribution having different weights for each
item use the following:""") + (
'\nS("FiniteRV(%s, %s)")' % ("'X'", weights)))
@property
def p(self):
return Rational(1, len(self.args))
@property # type: ignore
@cacheit
def dict(self):
return {k: self.p for k in self.set}
@property
def set(self):
return set(self.args)
def pmf(self, x):
if x in self.args:
return self.p
else:
return S.Zero
def DiscreteUniform(name, items):
r"""
Create a Finite Random Variable representing a uniform distribution over
the input set.
Parameters
==========
items: list/tuple
Items over which Uniform distribution is to be made
Examples
========
>>> from sympy.stats import DiscreteUniform, density
>>> from sympy import symbols
>>> X = DiscreteUniform('X', symbols('a b c')) # equally likely over a, b, c
>>> density(X).dict
{a: 1/3, b: 1/3, c: 1/3}
>>> Y = DiscreteUniform('Y', list(range(5))) # distribution over a range
>>> density(Y).dict
{0: 1/5, 1: 1/5, 2: 1/5, 3: 1/5, 4: 1/5}
Returns
=======
RandomSymbol
References
==========
.. [1] https://en.wikipedia.org/wiki/Discrete_uniform_distribution
.. [2] http://mathworld.wolfram.com/DiscreteUniformDistribution.html
"""
return rv(name, DiscreteUniformDistribution, *items)
class DieDistribution(SingleFiniteDistribution):
_argnames = ('sides',)
@staticmethod
def check(sides):
_value_check((sides.is_positive, sides.is_integer),
"number of sides must be a positive integer.")
@property
def is_symbolic(self):
return not self.sides.is_number
@property
def high(self):
return self.sides
@property
def low(self):
return S.One
@property
def set(self):
if self.is_symbolic:
return Intersection(S.Naturals0, Interval(0, self.sides))
return set(map(Integer, list(range(1, self.sides + 1))))
def pmf(self, x):
x = sympify(x)
if not (x.is_number or x.is_Symbol or is_random(x)):
raise ValueError("'x' expected as an argument of type 'number' or 'Symbol' or , "
"'RandomSymbol' not %s" % (type(x)))
cond = Ge(x, 1) & Le(x, self.sides) & Contains(x, S.Integers)
return Piecewise((S.One/self.sides, cond), (S.Zero, True))
def Die(name, sides=6):
r"""
Create a Finite Random Variable representing a fair die.
Parameters
==========
sides: Integer
Represents the number of sides of the Die, by default is 6
Examples
========
>>> from sympy.stats import Die, density
>>> from sympy import Symbol
>>> D6 = Die('D6', 6) # Six sided Die
>>> density(D6).dict
{1: 1/6, 2: 1/6, 3: 1/6, 4: 1/6, 5: 1/6, 6: 1/6}
>>> D4 = Die('D4', 4) # Four sided Die
>>> density(D4).dict
{1: 1/4, 2: 1/4, 3: 1/4, 4: 1/4}
>>> n = Symbol('n', positive=True, integer=True)
>>> Dn = Die('Dn', n) # n sided Die
>>> density(Dn).dict
Density(DieDistribution(n))
>>> density(Dn).dict.subs(n, 4).doit()
{1: 1/4, 2: 1/4, 3: 1/4, 4: 1/4}
Returns
=======
RandomSymbol
"""
return rv(name, DieDistribution, sides)
class BernoulliDistribution(SingleFiniteDistribution):
_argnames = ('p', 'succ', 'fail')
@staticmethod
def check(p, succ, fail):
_value_check((p >= 0, p <= 1),
"p should be in range [0, 1].")
@property
def set(self):
return {self.succ, self.fail}
def pmf(self, x):
if isinstance(self.succ, Symbol) and isinstance(self.fail, Symbol):
return Piecewise((self.p, x == self.succ),
(1 - self.p, x == self.fail),
(S.Zero, True))
return Piecewise((self.p, Eq(x, self.succ)),
(1 - self.p, Eq(x, self.fail)),
(S.Zero, True))
def Bernoulli(name, p, succ=1, fail=0):
r"""
Create a Finite Random Variable representing a Bernoulli process.
Parameters
==========
p : Rational number between 0 and 1
Represents probability of success
succ : Integer/symbol/string
Represents event of success
fail : Integer/symbol/string
Represents event of failure
Examples
========
>>> from sympy.stats import Bernoulli, density
>>> from sympy import S
>>> X = Bernoulli('X', S(3)/4) # 1-0 Bernoulli variable, probability = 3/4
>>> density(X).dict
{0: 1/4, 1: 3/4}
>>> X = Bernoulli('X', S.Half, 'Heads', 'Tails') # A fair coin toss
>>> density(X).dict
{Heads: 1/2, Tails: 1/2}
Returns
=======
RandomSymbol
References
==========
.. [1] https://en.wikipedia.org/wiki/Bernoulli_distribution
.. [2] http://mathworld.wolfram.com/BernoulliDistribution.html
"""
return rv(name, BernoulliDistribution, p, succ, fail)
def Coin(name, p=S.Half):
r"""
Create a Finite Random Variable representing a Coin toss.
Parameters
==========
p : Rational Numeber between 0 and 1
Represents probability of getting "Heads", by default is Half
Examples
========
>>> from sympy.stats import Coin, density
>>> from sympy import Rational
>>> C = Coin('C') # A fair coin toss
>>> density(C).dict
{H: 1/2, T: 1/2}
>>> C2 = Coin('C2', Rational(3, 5)) # An unfair coin
>>> density(C2).dict
{H: 3/5, T: 2/5}
Returns
=======
RandomSymbol
See Also
========
sympy.stats.Binomial
References
==========
.. [1] https://en.wikipedia.org/wiki/Coin_flipping
"""
return rv(name, BernoulliDistribution, p, 'H', 'T')
class BinomialDistribution(SingleFiniteDistribution):
_argnames = ('n', 'p', 'succ', 'fail')
@staticmethod
def check(n, p, succ, fail):
_value_check((n.is_integer, n.is_nonnegative),
"'n' must be nonnegative integer.")
_value_check((p <= 1, p >= 0),
"p should be in range [0, 1].")
@property
def high(self):
return self.n
@property
def low(self):
return S.Zero
@property
def is_symbolic(self):
return not self.n.is_number
@property
def set(self):
if self.is_symbolic:
return Intersection(S.Naturals0, Interval(0, self.n))
return set(self.dict.keys())
def pmf(self, x):
n, p = self.n, self.p
x = sympify(x)
if not (x.is_number or x.is_Symbol or is_random(x)):
raise ValueError("'x' expected as an argument of type 'number' or 'Symbol' or , "
"'RandomSymbol' not %s" % (type(x)))
cond = Ge(x, 0) & Le(x, n) & Contains(x, S.Integers)
return Piecewise((binomial(n, x) * p**x * (1 - p)**(n - x), cond), (S.Zero, True))
@property # type: ignore
@cacheit
def dict(self):
if self.is_symbolic:
return Density(self)
return {k*self.succ + (self.n-k)*self.fail: self.pmf(k)
for k in range(0, self.n + 1)}
def Binomial(name, n, p, succ=1, fail=0):
r"""
Create a Finite Random Variable representing a binomial distribution.
Parameters
==========
n : Positive Integer
Represents number of trials
p : Rational Number between 0 and 1
Represents probability of success
succ : Integer/symbol/string
Represents event of success, by default is 1
fail : Integer/symbol/string
Represents event of failure, by default is 0
Examples
========
>>> from sympy.stats import Binomial, density
>>> from sympy import S, Symbol
>>> X = Binomial('X', 4, S.Half) # Four "coin flips"
>>> density(X).dict
{0: 1/16, 1: 1/4, 2: 3/8, 3: 1/4, 4: 1/16}
>>> n = Symbol('n', positive=True, integer=True)
>>> p = Symbol('p', positive=True)
>>> X = Binomial('X', n, S.Half) # n "coin flips"
>>> density(X).dict
Density(BinomialDistribution(n, 1/2, 1, 0))
>>> density(X).dict.subs(n, 4).doit()
{0: 1/16, 1: 1/4, 2: 3/8, 3: 1/4, 4: 1/16}
Returns
=======
RandomSymbol
References
==========
.. [1] https://en.wikipedia.org/wiki/Binomial_distribution
.. [2] http://mathworld.wolfram.com/BinomialDistribution.html
"""
return rv(name, BinomialDistribution, n, p, succ, fail)
#-------------------------------------------------------------------------------
# Beta-binomial distribution ----------------------------------------------------------
class BetaBinomialDistribution(SingleFiniteDistribution):
_argnames = ('n', 'alpha', 'beta')
@staticmethod
def check(n, alpha, beta):
_value_check((n.is_integer, n.is_nonnegative),
"'n' must be nonnegative integer. n = %s." % str(n))
_value_check((alpha > 0),
"'alpha' must be: alpha > 0 . alpha = %s" % str(alpha))
_value_check((beta > 0),
"'beta' must be: beta > 0 . beta = %s" % str(beta))
@property
def high(self):
return self.n
@property
def low(self):
return S.Zero
@property
def is_symbolic(self):
return not self.n.is_number
@property
def set(self):
if self.is_symbolic:
return Intersection(S.Naturals0, Interval(0, self.n))
return set(map(Integer, list(range(0, self.n + 1))))
def pmf(self, k):
n, a, b = self.n, self.alpha, self.beta
return binomial(n, k) * beta_fn(k + a, n - k + b) / beta_fn(a, b)
def BetaBinomial(name, n, alpha, beta):
r"""
Create a Finite Random Variable representing a Beta-binomial distribution.
Parameters
==========
n : Positive Integer
Represents number of trials
alpha : Real positive number
beta : Real positive number
Examples
========
>>> from sympy.stats import BetaBinomial, density
>>> X = BetaBinomial('X', 2, 1, 1)
>>> density(X).dict
{0: 1/3, 1: 2*beta(2, 2), 2: 1/3}
Returns
=======
RandomSymbol
References
==========
.. [1] https://en.wikipedia.org/wiki/Beta-binomial_distribution
.. [2] http://mathworld.wolfram.com/BetaBinomialDistribution.html
"""
return rv(name, BetaBinomialDistribution, n, alpha, beta)
class HypergeometricDistribution(SingleFiniteDistribution):
_argnames = ('N', 'm', 'n')
@staticmethod
def check(n, N, m):
_value_check((N.is_integer, N.is_nonnegative),
"'N' must be nonnegative integer. N = %s." % str(n))
_value_check((n.is_integer, n.is_nonnegative),
"'n' must be nonnegative integer. n = %s." % str(n))
_value_check((m.is_integer, m.is_nonnegative),
"'m' must be nonnegative integer. m = %s." % str(n))
@property
def is_symbolic(self):
return any(not x.is_number for x in (self.N, self.m, self.n))
@property
def high(self):
return Piecewise((self.n, Lt(self.n, self.m) != False), (self.m, True))
@property
def low(self):
return Piecewise((0, Gt(0, self.n + self.m - self.N) != False), (self.n + self.m - self.N, True))
@property
def set(self):
N, m, n = self.N, self.m, self.n
if self.is_symbolic:
return Intersection(S.Naturals0, Interval(self.low, self.high))
return {i for i in range(max(0, n + m - N), min(n, m) + 1)}
def pmf(self, k):
N, m, n = self.N, self.m, self.n
return S(binomial(m, k) * binomial(N - m, n - k))/binomial(N, n)
def Hypergeometric(name, N, m, n):
r"""
Create a Finite Random Variable representing a hypergeometric distribution.
Parameters
==========
N : Positive Integer
Represents finite population of size N.
m : Positive Integer
Represents number of trials with required feature.
n : Positive Integer
Represents numbers of draws.
Examples
========
>>> from sympy.stats import Hypergeometric, density
>>> X = Hypergeometric('X', 10, 5, 3) # 10 marbles, 5 white (success), 3 draws
>>> density(X).dict
{0: 1/12, 1: 5/12, 2: 5/12, 3: 1/12}
Returns
=======
RandomSymbol
References
==========
.. [1] https://en.wikipedia.org/wiki/Hypergeometric_distribution
.. [2] http://mathworld.wolfram.com/HypergeometricDistribution.html
"""
return rv(name, HypergeometricDistribution, N, m, n)
class RademacherDistribution(SingleFiniteDistribution):
@property
def set(self):
return {-1, 1}
@property
def pmf(self):
k = Dummy('k')
return Lambda(k, Piecewise((S.Half, Or(Eq(k, -1), Eq(k, 1))), (S.Zero, True)))
def Rademacher(name):
r"""
Create a Finite Random Variable representing a Rademacher distribution.
Examples
========
>>> from sympy.stats import Rademacher, density
>>> X = Rademacher('X')
>>> density(X).dict
{-1: 1/2, 1: 1/2}
Returns
=======
RandomSymbol
See Also
========
sympy.stats.Bernoulli
References
==========
.. [1] https://en.wikipedia.org/wiki/Rademacher_distribution
"""
return rv(name, RademacherDistribution)
class IdealSolitonDistribution(SingleFiniteDistribution):
_argnames = ('k',)
@staticmethod
def check(k):
_value_check(k.is_integer and k.is_positive,
"'k' must be a positive integer.")
@property
def low(self):
return S.One
@property
def high(self):
return self.k
@property
def set(self):
return set(list(Range(1, self.k+1)))
@property
@cacheit
def dict(self):
if self.k.is_Symbol:
return Density(self)
d = {1: Rational(1, self.k)}
d.update(dict((i, Rational(1, i*(i - 1))) for i in range(2, self.k + 1)))
return d
def pmf(self, x):
x = sympify(x)
if not (x.is_number or x.is_Symbol or is_random(x)):
raise ValueError("'x' expected as an argument of type 'number' or 'Symbol' or , "
"'RandomSymbol' not %s" % (type(x)))
cond1 = Eq(x, 1) & x.is_integer
cond2 = Ge(x, 1) & Le(x, self.k) & x.is_integer
return Piecewise((1/self.k, cond1), (1/(x*(x - 1)), cond2), (S.Zero, True))
def IdealSoliton(name, k):
r"""
Create a Finite Random Variable of Ideal Soliton Distribution
Parameters
==========
k : Positive Integer
Represents the number of input symbols in an LT (Luby Transform) code.
Examples
========
>>> from sympy.stats import IdealSoliton, density, P, E
>>> sol = IdealSoliton('sol', 5)
>>> density(sol).dict
{1: 1/5, 2: 1/2, 3: 1/6, 4: 1/12, 5: 1/20}
>>> density(sol).set
{1, 2, 3, 4, 5}
>>> from sympy import Symbol
>>> k = Symbol('k', positive=True, integer=True)
>>> sol = IdealSoliton('sol', k)
>>> density(sol).dict
Density(IdealSolitonDistribution(k))
>>> density(sol).dict.subs(k, 10).doit()
{1: 1/10, 2: 1/2, 3: 1/6, 4: 1/12, 5: 1/20, 6: 1/30, 7: 1/42, 8: 1/56, 9: 1/72, 10: 1/90}
>>> E(sol.subs(k, 10))
7381/2520
>>> P(sol.subs(k, 4) > 2)
1/4
Returns
=======
RandomSymbol
References
==========
.. [1] https://en.wikipedia.org/wiki/Soliton_distribution#Ideal_distribution
.. [2] http://pages.cs.wisc.edu/~suman/courses/740/papers/luby02lt.pdf
"""
return rv(name, IdealSolitonDistribution, k)
class RobustSolitonDistribution(SingleFiniteDistribution):
_argnames= ('k', 'delta', 'c')
@staticmethod
def check(k, delta, c):
_value_check(k.is_integer and k.is_positive,
"'k' must be a positive integer")
_value_check(Gt(delta, 0) and Le(delta, 1),
"'delta' must be a real number in the interval (0,1)")
_value_check(c.is_positive,
"'c' must be a positive real number.")
@property
def R(self):
return self.c * log(self.k/self.delta) * self.k**0.5
@property
def Z(self):
z = 0
for i in Range(1, round(self.k/self.R)):
z += (1/i)
z += log(self.R/self.delta)
return 1 + z * self.R/self.k
@property
def low(self):
return S.One
@property
def high(self):
return self.k
@property
def set(self):
return set(list(Range(1, self.k+1)))
@property
def is_symbolic(self):
return not all([self.k.is_number, self.c.is_number, self.delta.is_number])
def pmf(self, x):
x = sympify(x)
if not (x.is_number or x.is_Symbol or is_random(x)):
raise ValueError("'x' expected as an argument of type 'number' or 'Symbol' or , "
"'RandomSymbol' not %s" % (type(x)))
cond1 = Eq(x, 1) & x.is_integer
cond2 = Ge(x, 1) & Le(x, self.k) & x.is_integer
rho = Piecewise((Rational(1, self.k), cond1), (Rational(1, x*(x-1)), cond2), (S.Zero, True))
cond1 = Ge(x, 1) & Le(x, round(self.k/self.R)-1)
cond2 = Eq(x, round(self.k/self.R))
tau = Piecewise((self.R/(self.k * x), cond1), (self.R * log(self.R/self.delta)/self.k, cond2), (S.Zero, True))
return (rho + tau)/self.Z
def RobustSoliton(name, k, delta, c):
r'''
Create a Finite Random Variable of Robust Soliton Distribution
Parameters
==========
k : Positive Integer
Represents the number of input symbols in an LT (Luby Transform) code.
delta : Positive Rational Number
Represents the failure probability. Must be in the interval (0,1).
c : Positive Rational Number
Constant of proportionality. Values close to 1 are recommended
Examples
========
>>> from sympy.stats import RobustSoliton, density, P, E
>>> robSol = RobustSoliton('robSol', 5, 0.5, 0.01)
>>> density(robSol).dict
{1: 0.204253668152708, 2: 0.490631107897393, 3: 0.165210624506162, 4: 0.0834387731899302, 5: 0.0505633404760675}
>>> density(robSol).set
{1, 2, 3, 4, 5}
>>> from sympy import Symbol
>>> k = Symbol('k', positive=True, integer=True)
>>> c = Symbol('c', positive=True)
>>> robSol = RobustSoliton('robSol', k, 0.5, c)
>>> density(robSol).dict
Density(RobustSolitonDistribution(k, 0.5, c))
>>> density(robSol).dict.subs(k, 10).subs(c, 0.03).doit()
{1: 0.116641095387194, 2: 0.467045731687165, 3: 0.159984123349381, 4: 0.0821431680681869, 5: 0.0505765646770100,
6: 0.0345781523420719, 7: 0.0253132820710503, 8: 0.0194459129233227, 9: 0.0154831166726115, 10: 0.0126733075238887}
>>> E(robSol.subs(k, 10).subs(c, 0.05))
2.91358846104106
>>> P(robSol.subs(k, 4).subs(c, 0.1) > 2)
0.243650614389834
Returns
=======
RandomSymbol
References
==========
.. [1] https://en.wikipedia.org/wiki/Soliton_distribution#Robust_distribution
.. [2] http://www.inference.org.uk/mackay/itprnn/ps/588.596.pdf
.. [3] http://pages.cs.wisc.edu/~suman/courses/740/papers/luby02lt.pdf
'''
return rv(name, RobustSolitonDistribution, k, delta, c)
| [
"[email protected]"
] | |
a4939c1fd486001c5569097c8d0b69969c4afcca | 06c54acbc3d93601182170eef1c8f69396644003 | /glTools-master/tools/mirrorDeformerWeights.py | fd416031993b524c6ae37273571ed212844d52a9 | [] | no_license | moChen0607/pubTool | bfb05b7ba763c325b871a60d1a690bd67d6ad888 | 16337badb6d1b4266f31008ceb17cfd70fec3623 | refs/heads/master | 2021-05-31T17:59:06.840382 | 2016-06-06T07:11:42 | 2016-06-06T07:11:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,666 | py | import maya.cmds as mc
import maya.OpenMaya as OpenMaya
import maya.OpenMayaAnim as OpenMayaAnim
import glTools.utils.mesh
import glTools.utils.deformer
import glTools.tools.symmetryTable
class UserInputError(Exception): pass
def mirrorWeights(mesh,deformer,axis='x',posToNeg=True,refMesh=''):
'''
Mirror deformer weights
@param mesh: Mesh to mirror weights on
@type mesh: str
@param deformer: Deformer to mirror weights for
@type deformer: str
@param axis: Axis to mirror weights across
@type axis: str
@param posToNeg: Apply weight mirror from positive to negative vertices
@type posToNeg: bool
@param refMesh: Mesh used for symmetry reference
@type refMesh: str
'''
# Check deformers
if not mc.objExists(deformer):
raise UserInputError('Deformer "'+deformer+'" does not exist!!')
# Check refMesh
if not refMesh: refMesh = mesh
# Get symmetry table
axisIndex = {'x':0,'y':1,'z':2}[axis]
sTable = glTools.tools.symmetryTable.SymmetryTable()
symTable = sTable.buildSymTable(refMesh,axisIndex)
# Get current weights
wt = glTools.utils.deformer.getWeights(deformer)
mem = glTools.utils.deformer.getDeformerSetMemberIndices(deformer,mesh)
# Mirror weights
for i in [sTable.negativeIndexList,sTable.positiveIndexList][int(posToNeg)]:
if mem.count(i) and mem.count(symTable[i]):
wt[mem.index(symTable[i])] = wt[mem.index(i)]
# Apply mirrored weights
glTools.utils.deformer.setWeights(deformer,wt,mesh)
def flipWeights(mesh,sourceDeformer,targetDeformer='',axis='x',refMesh=''):
'''
Flip deformer weights
@param mesh: Mesh to flip weights for
@type mesh: str
@param sourceDeformer: Deformer to query weights from
@type sourceDeformer: str
@param targetDeformer: Deformer to apply weights to
@type targetDeformer: str
@param axis: Axis to flip weights across
@type axis: str
@param refMesh: Mesh used for symmetry reference
@type refMesh: str
'''
# Check deformers
if not mc.objExists(sourceDeformer):
raise UserInputError('Source deformer '+sourceDeformer+' does not exist!!')
if targetDeformer and not mc.objExists(targetDeformer):
raise UserInputError('Traget deformer '+targetDeformer+' does not exist!!')
if not targetDeformer:
targetDeformer = sourceDeformer
# Check refMesh
if not refMesh: refMesh = mesh
# Get mesh shape
meshShape = mesh
if mc.objectType(meshShape) == 'transform':
meshShape = mc.listRelatives(mesh,s=True,ni=True)[0]
# Get symmetry table
axisIndex = {'x':0,'y':1,'z':2}[axis]
symTable = glTools.common.symmetryTable.SymmetryTable().buildSymTable(refMesh,axisIndex)
# Get current weights
wt = glTools.utils.deformer.getWeights(sourceDeformer,mesh)
sourceMem = glTools.utils.deformer.getDeformerSetMemberIndices(sourceDeformer,meshShape)
targetMem = glTools.utils.deformer.getDeformerSetMemberIndices(targetDeformer,meshShape)
targetWt = [0.0 for i in range(len(targetMem))]
# Mirror weights
for i in sourceMem:
if targetMem.count(symTable[i]):
try: targetWt[targetMem.index(symTable[i])] = wt[sourceMem.index(i)]
except:
print('Error @: '+str(symTable[i]))
pass
else:
print('Cant find sym index for '+str(i))
# Apply mirrored weights
glTools.utils.deformer.setWeights(targetDeformer,targetWt,mesh)
def copyWeights(sourceMesh,targetMesh,sourceDeformer,targetDeformer):
'''
Copy deformer weights from one mesh to another.
Source and Target mesh objects must have matching point order!
@param sourceMesh: Mesh to copy weights from
@type sourceMesh: str
@param targetMesh: Mesh to copy weights to
@type targetMesh: str
@param sourceDeformer: Deformer to query weights from
@type sourceDeformer: str
@param targetDeformer: Deformer to apply weights to
@type targetDeformer: str
'''
# Check source and target mesh
if not mc.objExists(sourceMesh):
raise UserInputError('Source mesh "'+sourceMesh+'" does not exist!!')
if not mc.objExists(targetMesh):
raise UserInputError('Target mesh "'+targetMesh+'" does not exist!!')
# Check deformers
if not mc.objExists(sourceDeformer):
raise UserInputError('Source deformer "'+sourceDeformer+'" does not exist!!')
if targetDeformer and not mc.objExists(targetDeformer):
raise UserInputError('Target deformer "'+targetDeformer+'" does not exist!!')
if not targetDeformer: targetDeformer = sourceDeformer
# Compare vertex count
if mc.polyEvaluate(sourceMesh,v=True) != mc.polyEvaluate(targetMesh,v=True):
raise UserInputError('Source and Target mesh vertex counts do not match!!')
# Copy weights
wtList = glTools.utils.deformer.getWeights(sourceDeformer,sourceMesh)
# Paste weights
glTools.utils.deformer.setWeights(targetDeformer,wtList,targetMesh)
| [
"[email protected]"
] | |
73b93a628abb566067c2eb92e65f7271a0f5927b | 3f309b1dd9774ca1eef2c7bb7626447e6c3dbe70 | /peripheral/can_u2003/config/can.py | 4ed9f1ab5079c421fc350bd305ee791c8a5fc871 | [
"LicenseRef-scancode-unknown-license-reference",
"ISC",
"LicenseRef-scancode-public-domain"
] | permissive | Unitek-KL/csp | 30892ddf1375f5191173cafdfba5f098245a0ff7 | 2ac7ba59465f23959e51d2f16a5712b57b79ef5f | refs/heads/master | 2020-12-10T13:42:26.878408 | 2019-10-14T17:55:22 | 2019-10-14T17:56:20 | 233,609,402 | 0 | 0 | NOASSERTION | 2020-01-13T14:04:51 | 2020-01-13T14:04:51 | null | UTF-8 | Python | false | false | 38,515 | py | # coding: utf-8
"""*****************************************************************************
* Copyright (C) 2018 Microchip Technology Inc. and its subsidiaries.
*
* Subject to your compliance with these terms, you may use Microchip software
* and any derivatives exclusively with Microchip products. It is your
* responsibility to comply with third party license terms applicable to your
* use of third party software (including open source software) that may
* accompany Microchip software.
*
* THIS SOFTWARE IS SUPPLIED BY MICROCHIP "AS IS". NO WARRANTIES, WHETHER
* EXPRESS, IMPLIED OR STATUTORY, APPLY TO THIS SOFTWARE, INCLUDING ANY IMPLIED
* WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY, AND FITNESS FOR A
* PARTICULAR PURPOSE.
*
* IN NO EVENT WILL MICROCHIP BE LIABLE FOR ANY INDIRECT, SPECIAL, PUNITIVE,
* INCIDENTAL OR CONSEQUENTIAL LOSS, DAMAGE, COST OR EXPENSE OF ANY KIND
* WHATSOEVER RELATED TO THE SOFTWARE, HOWEVER CAUSED, EVEN IF MICROCHIP HAS
* BEEN ADVISED OF THE POSSIBILITY OR THE DAMAGES ARE FORESEEABLE. TO THE
* FULLEST EXTENT ALLOWED BY LAW, MICROCHIP'S TOTAL LIABILITY ON ALL CLAIMS IN
* ANY WAY RELATED TO THIS SOFTWARE WILL NOT EXCEED THE AMOUNT OF FEES, IF ANY,
* THAT YOU HAVE PAID DIRECTLY TO MICROCHIP FOR THIS SOFTWARE.
*****************************************************************************"""
global interruptVector
global interruptHandler
global interruptHandlerLock
canElementSizes = ["8 bytes", "12 bytes", "16 bytes", "20 bytes", "24 bytes", "32 bytes", "48 bytes", "64 bytes"]
opModeValues = ["NORMAL", "CAN FD"]
stdFilterList = []
extFilterList = []
# if the mode is changed to FD, then show options for more bytes
def showWhenFD(element, event):
if event["value"] == 'CAN FD':
element.setVisible(True)
else:
element.setVisible(False)
# Rx Buffer Element size
def RxBufferElementSize(element, event):
if ((event["id"] == 'CAN_OPMODE' and event["value"] == 'CAN FD' and Database.getSymbolValue(canInstanceName.getValue().lower(), "RXBUF_USE") == True)
or (event["id"] == 'RXBUF_USE' and event["value"] == True and Database.getSymbolValue(canInstanceName.getValue().lower(), "CAN_OPMODE") == 'CAN FD')):
element.setVisible(True)
element.setReadOnly(False)
else:
element.setVisible(False)
element.setReadOnly(True)
# for FD. Expects keyValue symbol. Use for RX and TX
def adornElementSize(fifo):
fifo.addKey("8 bytes", "0", "8 Bytes")
fifo.addKey("12 bytes", "1", "12 Bytes")
fifo.addKey("16 bytes", "2", "16 Bytes")
fifo.addKey("20 bytes", "3", "20 Bytes")
fifo.addKey("24 bytes", "4", "24 Bytes")
fifo.addKey("32 bytes", "5", "32 Bytes")
fifo.addKey("48 bytes", "6", "48 Bytes")
fifo.addKey("64 bytes", "7", "64 Bytes")
fifo.setDefaultValue(0)
fifo.setOutputMode("Value")
fifo.setDisplayMode("Description")
# if mode is changed to NORMAL then set element size to 8 bytes
def updateElementSize(symbol, event):
if event["value"] == 'CAN FD':
symbol.setVisible(True)
symbol.setReadOnly(False)
else:
symbol.setVisible(False)
symbol.setReadOnly(True)
# for extended and standard filters
def adornFilterType(filterType):
filterType.addKey("Range", "0", "Based on Range")
filterType.addKey("Dual", "1", "Based on Dual ID")
filterType.addKey("Classic", "2", "Based on Classic Mask/Value")
filterType.setOutputMode("Value")
filterType.setDisplayMode("Key")
filterType.setDefaultValue(0)
# for extended and standard filter configurations
def adornFilterConfig(config):
config.addKey("Disabled", "0", "Filter is Disabled")
config.addKey("RXF0", "1", "Store in RX FIFO 0")
config.addKey("RXF1", "2", "Store in RX FIFO 1")
config.addKey("Reject", "3", "Reject")
config.addKey("Priority", "4", "Set priority")
config.addKey("Priority0", "5", "Set priority and store in FIFO 0")
config.addKey("Priority1", "6", "Set priority and store in FIFO 1")
config.addKey("RXBUF", "7", "Store into Rx Buffer")
config.setOutputMode("Value")
config.setDisplayMode("Description")
config.setDefaultValue(1)
def standardFilterRangeCheck(symbol, event):
filterNumber = event["id"].split("_")[2].split("FILTER")[1]
if Database.getSymbolValue(canInstanceName.getValue().lower(),
canInstanceName.getValue() + "_STD_FILTER" + str(filterNumber) + "_TYPE") == 0:
id1 = Database.getSymbolValue(canInstanceName.getValue().lower(),
canInstanceName.getValue() + "_STD_FILTER" + str(filterNumber) + "_SFID1")
id2 = Database.getSymbolValue(canInstanceName.getValue().lower(),
canInstanceName.getValue() + "_STD_FILTER" + str(filterNumber) + "_SFID2")
if ((id1 > id2) and (Database.getSymbolValue(canInstanceName.getValue().lower(),
canInstanceName.getValue() + "_STD_FILTER" + str(filterNumber) + "_CONFIG") != 7)):
symbol.setVisible(True)
else:
symbol.setVisible(False)
else:
symbol.setVisible(False)
def canCreateStdFilter(component, menu, filterNumber):
stdFilter = component.createMenuSymbol(canInstanceName.getValue() + "_STD_FILTER"+ str(filterNumber), menu)
stdFilter.setLabel("Standard Filter " + str(filterNumber))
stdFilterType = component.createKeyValueSetSymbol(canInstanceName.getValue() + "_STD_FILTER" + str(filterNumber) + "_TYPE", stdFilter)
stdFilterType.setLabel("Type")
adornFilterType(stdFilterType)
sfid1 = component.createIntegerSymbol(canInstanceName.getValue() + "_STD_FILTER" + str(filterNumber) + "_SFID1", stdFilter)
sfid1.setLabel("ID1")
sfid1.setMin(0)
sfid1.setMax(2047)
sfid2 = component.createIntegerSymbol(canInstanceName.getValue() + "_STD_FILTER" + str(filterNumber) + "_SFID2", stdFilter)
sfid2.setLabel("ID2")
sfid2.setMin(0)
sfid2.setMax(2047)
stdFilterRangeInvalidSym = component.createCommentSymbol(canInstanceName.getValue() + "_STD_FILTER" + str(filterNumber) + "_COMMENT", stdFilter)
stdFilterRangeInvalidSym.setLabel("Warning!!! " + canInstanceName.getValue() + " Standard Filter " + str(filterNumber) + " ID2 must be greater or equal to ID1")
stdFilterRangeInvalidSym.setVisible(False)
config = component.createKeyValueSetSymbol(canInstanceName.getValue() + "_STD_FILTER" + str(filterNumber) + "_CONFIG", stdFilter)
config.setLabel("Element Configuration")
adornFilterConfig(config)
stdFilterRangeInvalidSym.setDependencies(standardFilterRangeCheck, [canInstanceName.getValue() + "_STD_FILTER" + str(filterNumber) + "_TYPE",
canInstanceName.getValue() + "_STD_FILTER" + str(filterNumber) + "_SFID1",
canInstanceName.getValue() + "_STD_FILTER" + str(filterNumber) + "_SFID2",
canInstanceName.getValue() + "_STD_FILTER" + str(filterNumber) + "_CONFIG"])
stdFilter.setVisible(False)
stdFilter.setEnabled(False)
return stdFilter
def extendedFilterRangeCheck(symbol, event):
filterNumber = event["id"].split("_")[2].split("FILTER")[1]
if Database.getSymbolValue(canInstanceName.getValue().lower(),
canInstanceName.getValue() + "_EXT_FILTER" + str(filterNumber) + "_TYPE") == 0:
id1 = Database.getSymbolValue(canInstanceName.getValue().lower(),
canInstanceName.getValue() + "_EXT_FILTER" + str(filterNumber) + "_EFID1")
id2 = Database.getSymbolValue(canInstanceName.getValue().lower(),
canInstanceName.getValue() + "_EXT_FILTER" + str(filterNumber) + "_EFID2")
if ((id1 > id2) and (Database.getSymbolValue(canInstanceName.getValue().lower(),
canInstanceName.getValue() + "_EXT_FILTER" + str(filterNumber) + "_CONFIG") != 7)):
symbol.setVisible(True)
else:
symbol.setVisible(False)
else:
symbol.setVisible(False)
def canCreateExtFilter(component, menu, filterNumber):
extFilter = component.createMenuSymbol(canInstanceName.getValue() + "_EXT_FILTER" + str(filterNumber), menu)
extFilter.setLabel("Extended Filter " + str(filterNumber))
extFilterType = component.createKeyValueSetSymbol(canInstanceName.getValue() + "_EXT_FILTER" + str(filterNumber) + "_TYPE", extFilter)
extFilterType.setLabel("Type")
adornFilterType(extFilterType)
efid1 = component.createIntegerSymbol(canInstanceName.getValue() + "_EXT_FILTER" + str(filterNumber) + "_EFID1", extFilter)
efid1.setLabel("ID1")
efid1.setMin(0)
efid1.setMax(536870911)
efid2 = component.createIntegerSymbol(canInstanceName.getValue() + "_EXT_FILTER" + str(filterNumber) + "_EFID2", extFilter)
efid2.setLabel("ID2")
efid2.setMin(0)
efid2.setMax(536870911)
extFilterRangeInvalidSym = component.createCommentSymbol(canInstanceName.getValue() + "_EXT_FILTER" + str(filterNumber) + "_COMMENT", extFilter)
extFilterRangeInvalidSym.setLabel("Warning!!! " + canInstanceName.getValue() + " Extended Filter " + str(filterNumber) + " ID2 must be greater or equal to ID1")
extFilterRangeInvalidSym.setVisible(False)
config = component.createKeyValueSetSymbol(canInstanceName.getValue() + "_EXT_FILTER" + str(filterNumber) + "_CONFIG", extFilter)
config.setLabel("Element Configuration")
adornFilterConfig(config)
extFilterRangeInvalidSym.setDependencies(extendedFilterRangeCheck, [canInstanceName.getValue() + "_EXT_FILTER" + str(filterNumber) + "_TYPE",
canInstanceName.getValue() + "_EXT_FILTER" + str(filterNumber) + "_EFID1",
canInstanceName.getValue() + "_EXT_FILTER" + str(filterNumber) + "_EFID2",
canInstanceName.getValue() + "_EXT_FILTER" + str(filterNumber) + "_CONFIG"])
extFilter.setVisible(False)
extFilter.setEnabled(False)
return extFilter
# adjust how many standard filters are shown based on number entered
def adjustStdFilters(filterList, event):
for filter in stdFilterList[:event["value"]]:
if filter.getVisible() != True:
filter.setVisible(True)
filter.setEnabled(True)
for filter in stdFilterList[event["value"]:]:
if filter.getVisible() != False:
filter.setVisible(False)
filter.setEnabled(False)
# adjust how many extended filters are shown based on number entered
def adjustExtFilters(filterList, event):
for filter in extFilterList[:event["value"]]:
if filter.getVisible() != True:
filter.setVisible(True)
filter.setEnabled(True)
for filter in extFilterList[event["value"]:]:
if filter.getVisible() != False:
filter.setVisible(False)
filter.setEnabled(False)
def interruptControl(symbol, event):
if event["value"] == True:
Database.setSymbolValue("core", interruptVector, True, 2)
Database.setSymbolValue("core", interruptHandler, canInstanceName.getValue() + "_InterruptHandler", 2)
Database.setSymbolValue("core", interruptHandlerLock, True, 2)
else:
Database.setSymbolValue("core", interruptVector, False, 2)
Database.setSymbolValue("core", interruptHandler, canInstanceName.getValue() + "_Handler", 2)
Database.setSymbolValue("core", interruptHandlerLock, False, 2)
# Dependency Function to show or hide the warning message depending on Interrupt enable/disable status
def InterruptStatusWarning(symbol, event):
if (Database.getSymbolValue(canInstanceName.getValue().lower(), "INTERRUPT_MODE") == True):
symbol.setVisible(event["value"])
def canCoreClockFreq(symbol, event):
symbol.setValue(int(Database.getSymbolValue("core", canInstanceName.getValue() + "_CLOCK_FREQUENCY")), 2)
def bitTimingCalculation(bitTiming, lowTq, highTq):
clk = Database.getSymbolValue("core", canInstanceName.getValue() + "_CLOCK_FREQUENCY")
if (bitTiming == "Data"):
prescaler = Database.getSymbolValue(canInstanceName.getValue().lower(), "DBTP_DBRP")
bitrate = Database.getSymbolValue(canInstanceName.getValue().lower(), "DATA_BITRATE")
samplePoint = Database.getSymbolValue(canInstanceName.getValue().lower(), "DATA_SAMPLE_POINT")
else:
prescaler = Database.getSymbolValue(canInstanceName.getValue().lower(), "NBTP_NBRP")
bitrate = Database.getSymbolValue(canInstanceName.getValue().lower(), "NOMINAL_BITRATE")
samplePoint = Database.getSymbolValue(canInstanceName.getValue().lower(), "NOMINAL_SAMPLE_POINT")
numOfTimeQuanta = clk / ((bitrate * 1000) * (prescaler + 1))
if (numOfTimeQuanta < lowTq):
canTimeQuantaInvalidSym.setLabel("Warning!!! Number of Time Quanta is too low for required " + bitTiming + " Bit Timing")
canTimeQuantaInvalidSym.setVisible(True)
canCoreClockInvalidSym.setLabel("Warning!!! " + canInstanceName.getValue() + " Clock Frequency is too low for required " + bitTiming + " Bit Timing")
canCoreClockInvalidSym.setVisible(True)
elif (numOfTimeQuanta > highTq):
canTimeQuantaInvalidSym.setLabel("Warning!!! Number of Time Quanta is too high for required " + bitTiming + " Bit Timing")
canTimeQuantaInvalidSym.setVisible(True)
canCoreClockInvalidSym.setLabel("Warning!!! " + canInstanceName.getValue() + " Clock Frequency is too high for required " + bitTiming + " Bit Timing")
canCoreClockInvalidSym.setVisible(True)
else:
canTimeQuantaInvalidSym.setVisible(False)
canCoreClockInvalidSym.setVisible(False)
tseg1 = int((numOfTimeQuanta * samplePoint) / 100.0)
tseg2 = numOfTimeQuanta - tseg1 - 1
tseg1 -= 2
return tseg1, tseg2
def dataBitTimingCalculation(symbol, event):
if (Database.getSymbolValue(canInstanceName.getValue().lower(), "CAN_OPMODE") == "CAN FD"):
tseg1, tseg2 = bitTimingCalculation("Data", 4, 49)
Database.setSymbolValue(canInstanceName.getValue().lower(), "DBTP_DTSEG1", tseg1, 2)
Database.setSymbolValue(canInstanceName.getValue().lower(), "DBTP_DTSEG2", tseg2, 2)
def nominalBitTimingCalculation(symbol, event):
tseg1, tseg2 = bitTimingCalculation("Nominal", 4, 385)
Database.setSymbolValue(canInstanceName.getValue().lower(), "NBTP_NTSEG1", tseg1, 2)
Database.setSymbolValue(canInstanceName.getValue().lower(), "NBTP_NTSEG2", tseg2, 2)
def instantiateComponent(canComponent):
global canInstanceName
global interruptVector
global interruptHandler
global interruptHandlerLock
global interruptVectorUpdate
global canCoreClockInvalidSym
global canTimeQuantaInvalidSym
canInstanceName = canComponent.createStringSymbol("CAN_INSTANCE_NAME", None)
canInstanceName.setVisible(False)
canInstanceName.setDefaultValue(canComponent.getID().upper())
print("Running " + canInstanceName.getValue())
def hideMenu(menu, event):
menu.setVisible(event["value"])
#either the watermark % changed or the number of elements
def RXF0WatermarkUpdate(watermark, event):
watermark_percentage = Database.getSymbolValue(canInstanceName.getValue().lower(), "RXF0_WP")
number_of_elements = Database.getSymbolValue(canInstanceName.getValue().lower(), "RXF0_ELEMENTS")
watermark.setValue(watermark_percentage * number_of_elements / 100, 0)
def RXF1WatermarkUpdate(watermark, event):
watermark_percentage = Database.getSymbolValue(canInstanceName.getValue().lower(), "RXF1_WP")
number_of_elements = Database.getSymbolValue(canInstanceName.getValue().lower(), "RXF1_ELEMENTS")
watermark.setValue(watermark_percentage * number_of_elements / 100, 0)
def TXWatermarkUpdate(watermark, event):
watermark_percentage = Database.getSymbolValue(canInstanceName.getValue().lower(), "TX_FIFO_WP")
number_of_elements = Database.getSymbolValue(canInstanceName.getValue().lower(), "TX_FIFO_ELEMENTS")
watermark.setValue(watermark_percentage * number_of_elements / 100, 0)
# Initialize peripheral clock
Database.setSymbolValue("core", canInstanceName.getValue()+"_CLOCK_ENABLE", True, 1)
# CAN operation mode - default to FD
canOpMode = canComponent.createComboSymbol("CAN_OPMODE", None, opModeValues)
canOpMode.setLabel("CAN Operation Mode")
canOpMode.setDefaultValue("NORMAL")
canInterruptMode = canComponent.createBooleanSymbol("INTERRUPT_MODE", None)
canInterruptMode.setLabel("Interrupt Mode")
canInterruptMode.setDefaultValue(False)
interruptVector = canInstanceName.getValue() + "_INTERRUPT_ENABLE"
interruptHandler = canInstanceName.getValue() + "_INTERRUPT_HANDLER"
interruptHandlerLock = canInstanceName.getValue() + "_INTERRUPT_HANDLER_LOCK"
interruptVectorUpdate = canInstanceName.getValue() + "_INTERRUPT_ENABLE_UPDATE"
# CAN Bit Timing Calculation
canBitTimingCalculationMenu = canComponent.createMenuSymbol("BIT_TIMING_CALCULATION", None)
canBitTimingCalculationMenu.setLabel("Bit Timing Calculation")
canBitTimingCalculationMenu.setDescription("CAN Bit Timing Calculation for Normal and CAN-FD Operation")
canCoreClockValue = canComponent.createIntegerSymbol("CAN_CORE_CLOCK_FREQ", canBitTimingCalculationMenu)
canCoreClockValue.setLabel("Clock Frequency")
canCoreClockValue.setReadOnly(True)
canCoreClockValue.setDefaultValue(int(Database.getSymbolValue("core", canInstanceName.getValue() + "_CLOCK_FREQUENCY")))
canCoreClockValue.setVisible(True)
canCoreClockValue.setDependencies(canCoreClockFreq, ["core." + canInstanceName.getValue() + "_CLOCK_FREQUENCY"])
canCoreClockInvalidSym = canComponent.createCommentSymbol("CAN_CORE_CLOCK_INVALID_COMMENT", None)
canCoreClockInvalidSym.setLabel("Warning!!! " + canInstanceName.getValue() + " Clock Frequency is too low for required Nominal Bit Timing")
canCoreClockInvalidSym.setVisible((canCoreClockValue.getValue() == 0))
canTimeQuantaInvalidSym = canComponent.createCommentSymbol("CAN_TIME_QUANTA_INVALID_COMMENT", None)
canTimeQuantaInvalidSym.setLabel("Warning!!! Number of Time Quanta is too low for required Nominal Bit Timing")
canTimeQuantaInvalidSym.setVisible(False)
# CAN Nominal Bit Timing Calculation
canNominalBitTimingMenu = canComponent.createMenuSymbol("NOMINAL_BIT_TIMING_CALCULATION", canBitTimingCalculationMenu)
canNominalBitTimingMenu.setLabel("Nominal Bit Timing")
canNominalBitTimingMenu.setDescription("This timing must be less or equal to the CAN-FD Data Bit Timing if used")
canNominalBitrate = canComponent.createIntegerSymbol("NOMINAL_BITRATE", canNominalBitTimingMenu)
canNominalBitrate.setLabel("Bit Rate (Kbps)")
canNominalBitrate.setMin(1)
canNominalBitrate.setMax(1000)
canNominalBitrate.setDefaultValue(500)
canNominalBitrate.setDependencies(nominalBitTimingCalculation, ["NOMINAL_BITRATE", "core." + canInstanceName.getValue() + "_CLOCK_FREQUENCY"])
canNominalSamplePoint = canComponent.createFloatSymbol("NOMINAL_SAMPLE_POINT", canNominalBitTimingMenu)
canNominalSamplePoint.setLabel("Sample Point %")
canNominalSamplePoint.setMin(50.0)
canNominalSamplePoint.setMax(100.0)
canNominalSamplePoint.setDefaultValue(75.0)
canNominalSamplePoint.setDependencies(nominalBitTimingCalculation, ["NOMINAL_SAMPLE_POINT"])
NBTPsyncJump = canComponent.createIntegerSymbol("NBTP_NSJW", canNominalBitTimingMenu)
NBTPsyncJump.setLabel("Synchronization Jump Width")
NBTPsyncJump.setMin(0)
NBTPsyncJump.setMax(127)
NBTPsyncJump.setDefaultValue(3)
NBTPprescale = canComponent.createIntegerSymbol("NBTP_NBRP", canNominalBitTimingMenu)
NBTPprescale.setLabel("Bit Rate Prescaler")
NBTPprescale.setMin(0)
NBTPprescale.setMax(511)
NBTPprescale.setDefaultValue(0)
NBTPprescale.setDependencies(nominalBitTimingCalculation, ["NBTP_NBRP"])
tseg1, tseg2 = bitTimingCalculation("Nominal", 4, 385)
NBTPBeforeSP = canComponent.createIntegerSymbol("NBTP_NTSEG1", canNominalBitTimingMenu)
NBTPBeforeSP.setLabel("Time Segment Before Sample Point")
NBTPBeforeSP.setMin(1)
NBTPBeforeSP.setMax(255)
NBTPBeforeSP.setDefaultValue(tseg1)
NBTPBeforeSP.setReadOnly(True)
NBTPAfterSP = canComponent.createIntegerSymbol("NBTP_NTSEG2", canNominalBitTimingMenu)
NBTPAfterSP.setLabel("Time Segment After Sample Point")
NBTPAfterSP.setMin(0)
NBTPAfterSP.setMax(127)
NBTPAfterSP.setDefaultValue(tseg2)
NBTPAfterSP.setReadOnly(True)
# CAN Data Bit Timing Calculation
canDataBitTimingMenu = canComponent.createMenuSymbol("DATA_BIT_TIMING_CALCULATION", canBitTimingCalculationMenu)
canDataBitTimingMenu.setLabel("Data Bit Timing")
canDataBitTimingMenu.setDescription("This timing must be greater or equal to the Nominal Bit Timing")
canDataBitTimingMenu.setVisible(False)
canDataBitTimingMenu.setDependencies(showWhenFD, ["CAN_OPMODE"])
canDataBitrate = canComponent.createIntegerSymbol("DATA_BITRATE", canDataBitTimingMenu)
canDataBitrate.setLabel("Bit Rate (Kbps)")
canDataBitrate.setMin(1)
canDataBitrate.setDefaultValue(500)
canDataBitrate.setDependencies(dataBitTimingCalculation, ["DATA_BITRATE", "CAN_OPMODE", "core." + canInstanceName.getValue() + "_CLOCK_FREQUENCY"])
canDataSamplePoint = canComponent.createFloatSymbol("DATA_SAMPLE_POINT", canDataBitTimingMenu)
canDataSamplePoint.setLabel("Sample Point %")
canDataSamplePoint.setMin(50.0)
canDataSamplePoint.setMax(100.0)
canDataSamplePoint.setDefaultValue(75.0)
canDataSamplePoint.setDependencies(dataBitTimingCalculation, ["DATA_SAMPLE_POINT"])
DBTPsyncJump = canComponent.createIntegerSymbol("DBTP_DSJW", canDataBitTimingMenu)
DBTPsyncJump.setLabel("Synchronization Jump Width")
DBTPsyncJump.setMin(0)
DBTPsyncJump.setDefaultValue(3)
DBTPsyncJump.setMax(7)
DBTPprescale = canComponent.createIntegerSymbol("DBTP_DBRP", canDataBitTimingMenu)
DBTPprescale.setLabel("Bit Rate Prescaler")
DBTPprescale.setMin(0)
DBTPprescale.setMax(31)
DBTPprescale.setDefaultValue(0)
DBTPprescale.setDependencies(dataBitTimingCalculation, ["DBTP_DBRP"])
DBTPBeforeSP = canComponent.createIntegerSymbol("DBTP_DTSEG1", canDataBitTimingMenu)
DBTPBeforeSP.setLabel("Time Segment Before Sample Point")
DBTPBeforeSP.setMin(1)
DBTPBeforeSP.setMax(31)
DBTPBeforeSP.setDefaultValue(10)
DBTPBeforeSP.setReadOnly(True)
DBTPAfterSP = canComponent.createIntegerSymbol("DBTP_DTSEG2", canDataBitTimingMenu)
DBTPAfterSP.setLabel("Time Segment After Sample Point")
DBTPAfterSP.setMin(0)
DBTPAfterSP.setDefaultValue(3)
DBTPAfterSP.setMax(15)
DBTPAfterSP.setReadOnly(True)
# ----- Rx FIFO 0 -----
canRXF0 = canComponent.createBooleanSymbol("RXF0_USE", None)
canRXF0.setLabel("Use RX FIFO 0")
canRXF0.setDefaultValue(True)
canRXF0.setReadOnly(True)
canRXF0Menu = canComponent.createMenuSymbol("rxf0Menu", canRXF0)
canRXF0Menu.setLabel("RX FIFO 0 Settings")
canRXF0Menu.setDependencies(hideMenu, ["RXF0_USE"])
# number of RX FIFO 0 elements
canRXF0Elements = canComponent.createIntegerSymbol("RXF0_ELEMENTS", canRXF0Menu)
canRXF0Elements.setLabel("Number of Elements")
canRXF0Elements.setDefaultValue(1)
canRXF0Elements.setMin(0)
canRXF0Elements.setMax(64)
canRXF0watermarkP = canComponent.createIntegerSymbol("RXF0_WP", canRXF0Menu)
canRXF0watermarkP.setLabel("Watermark %")
canRXF0watermarkP.setDefaultValue(0)
canRXF0watermarkP.setMin(0)
canRXF0watermarkP.setMax(99)
#This is a computed value
canRXF0watermark = canComponent.createIntegerSymbol("RXF0_WATERMARK", canRXF0Menu)
canRXF0watermark.setLabel("Watermark at element")
canRXF0watermark.setDescription("A value of 0 disables watermark")
canRXF0watermark.setDefaultValue(0)
canRXF0watermark.setReadOnly(True)
canRXF0watermark.setDependencies(RXF0WatermarkUpdate, ["RXF0_ELEMENTS", "RXF0_WP"])
canRXF0elementSize = canComponent.createKeyValueSetSymbol("RXF0_BYTES_CFG", canRXF0Menu)
canRXF0elementSize.setLabel("Element Size")
canRXF0elementSize.setVisible(False)
adornElementSize(canRXF0elementSize)
canRXF0elementSize.setDependencies(updateElementSize, ["CAN_OPMODE"])
canRx0overwrite = canComponent.createBooleanSymbol("RXF0_OVERWRITE", canRXF0Menu)
canRx0overwrite.setLabel("Use Overwrite Mode")
canRx0overwrite.setDescription("Overwrite RX FIFO 0 entries without blocking")
canRx0overwrite.setDefaultValue(True)
# ----- Rx FIFO 1 -----
canRXF1 = canComponent.createBooleanSymbol("RXF1_USE", None)
canRXF1.setLabel("Use RX FIFO 1")
canRXF1.setDefaultValue(True)
canRXF1Menu = canComponent.createMenuSymbol("rxf1menu", canRXF1)
canRXF1Menu.setLabel("RX FIFO 1 Settings")
canRXF1Menu.setDependencies(hideMenu, ["RXF1_USE"])
canRXF1Elements = canComponent.createIntegerSymbol("RXF1_ELEMENTS", canRXF1Menu)
canRXF1Elements.setLabel("Number of Elements")
canRXF1Elements.setDefaultValue(1)
canRXF1Elements.setMin(1)
canRXF1Elements.setMax(64)
canRXF1watermarkP = canComponent.createIntegerSymbol("RXF1_WP", canRXF1Menu)
canRXF1watermarkP.setLabel("Watermark %")
canRXF1watermarkP.setDefaultValue(0)
canRXF1watermarkP.setMin(0)
canRXF1watermarkP.setMax(99)
#This is a computed value for watermark
canRX1watermark = canComponent.createIntegerSymbol("RXF1_WATERMARK", canRXF1Menu)
canRX1watermark.setLabel("Watermark at element")
canRX1watermark.setDescription("A value of 0 disables watermark")
canRX1watermark.setDefaultValue(0)
canRX1watermark.setReadOnly(True)
canRX1watermark.setDependencies(RXF1WatermarkUpdate, ["RXF1_ELEMENTS", "RXF1_WP"])
canRXF1elementSize = canComponent.createKeyValueSetSymbol("RXF1_BYTES_CFG", canRXF1Menu)
canRXF1elementSize.setLabel("Element Size")
canRXF1elementSize.setVisible(False)
adornElementSize(canRXF1elementSize)
canRXF1elementSize.setDependencies(updateElementSize, ["CAN_OPMODE"])
canRXF1overwrite = canComponent.createBooleanSymbol("RXF1_OVERWRITE", canRXF1Menu)
canRXF1overwrite.setLabel("Use Overwrite Mode")
canRXF1overwrite.setDescription("Overwrite RX FIFO 1 entries without blocking")
canRXF1overwrite.setDefaultValue(True)
# ----- Rx Buffer -----
canRXBuf = canComponent.createBooleanSymbol("RXBUF_USE", None)
canRXBuf.setLabel("Use Dedicated Rx Buffer")
canRXBuf.setDefaultValue(False)
canRXBufElements = canComponent.createIntegerSymbol("RX_BUFFER_ELEMENTS", canRXBuf)
canRXBufElements.setLabel("Number of Elements")
canRXBufElements.setDefaultValue(1)
canRXBufElements.setMin(1)
canRXBufElements.setMax(64)
canRXBufElements.setVisible(False)
canRXBufElements.setDependencies(hideMenu, ["RXBUF_USE"])
canRXBufelementSize = canComponent.createKeyValueSetSymbol("RX_BUFFER_BYTES_CFG", canRXBuf)
canRXBufelementSize.setLabel("Element Size")
canRXBufelementSize.setVisible(False)
adornElementSize(canRXBufelementSize)
canRXBufelementSize.setDependencies(RxBufferElementSize, ["CAN_OPMODE", "RXBUF_USE"])
# ------ T X --------------
# ----- Tx FIFO -----
canTX = canComponent.createBooleanSymbol("TX_USE", None)
canTX.setLabel("Use TX FIFO")
canTX.setDefaultValue(True)
canTX.setReadOnly(True)
# make a menu separate for TX so it can be turned off and on at one point
canTXmenu = canComponent.createMenuSymbol("cantx", canTX)
canTXmenu.setLabel("TX FIFO Settings")
canTXmenu.setDependencies(hideMenu, ["TX_USE"])
# number of TX FIFO elements
canTXnumElements = canComponent.createIntegerSymbol("TX_FIFO_ELEMENTS", canTXmenu)
canTXnumElements.setLabel("Number of Elements")
canTXnumElements.setDefaultValue(1)
canTXnumElements.setMin(1)
canTXnumElements.setMax(32)
canTXwatermarkP = canComponent.createIntegerSymbol("TX_FIFO_WP", canTXmenu)
canTXwatermarkP.setLabel("Watermark %")
canTXwatermarkP.setDefaultValue(0)
canTXwatermarkP.setMin(0)
canTXwatermarkP.setMax(99)
#This is a computed value for watermark
canTXwatermark = canComponent.createIntegerSymbol("TX_FIFO_WATERMARK", canTXmenu)
canTXwatermark.setLabel("Watermark at element")
canTXwatermark.setDescription("A value of 0 disables watermark")
canTXwatermark.setDefaultValue(0)
canTXwatermark.setReadOnly(True)
canTXwatermark.setDependencies(TXWatermarkUpdate, ["TX_FIFO_ELEMENTS", "TX_FIFO_WP"])
canTXElementCfg = canComponent.createKeyValueSetSymbol("TX_FIFO_BYTES_CFG", canTXmenu)
canTXElementCfg.setLabel("Element Size")
adornElementSize(canTXElementCfg)
canTXElementCfg.setVisible(False)
canTXElementCfg.setDependencies(updateElementSize, ["CAN_OPMODE"])
canTXpause = canComponent.createBooleanSymbol("TX_PAUSE", None)
canTXpause.setLabel("Enable TX Pause")
canTXpause.setDescription("Pause 2 CAN bit times between transmissions")
canTXpause.setDefaultValue(False)
# ----- Tx Buffer -----
canTXBuf = canComponent.createBooleanSymbol("TXBUF_USE", None)
canTXBuf.setLabel("Use Dedicated Tx Buffer")
canTXBuf.setDefaultValue(False)
# number of TX buffer elements
canTXBufElements = canComponent.createIntegerSymbol("TX_BUFFER_ELEMENTS", canTXBuf)
canTXBufElements.setLabel("Number of TX Buffer Elements")
canTXBufElements.setDefaultValue(1)
canTXBufElements.setMin(1)
canTXBufElements.setMax(32)
canTXBufElements.setVisible(False)
canTXBufElements.setDependencies(hideMenu, ["TXBUF_USE"])
# up to 128 standard filters
canStdFilterMenu = canComponent.createMenuSymbol("stdFilterMenu", None)
canStdFilterMenu.setLabel("Standard Filters (up to 128)")
canStdFilterMenu.setDependencies(adjustStdFilters, ["FILTERS_STD"])
canStdFilterNumber = canComponent.createIntegerSymbol("FILTERS_STD", canStdFilterMenu)
canStdFilterNumber.setLabel("Number of Standard Filters:")
canStdFilterNumber.setDefaultValue(0)
canStdFilterNumber.setMin(0)
canStdFilterNumber.setMax(128)
#Create all of the standard filters in a disabled state
for filter in range (128):
stdFilterList.append(canCreateStdFilter(canComponent, canStdFilterMenu, filter + 1))
#What to do when a NO-MATCH is detected on a standard packet
canNoMatchStandard = canComponent.createKeyValueSetSymbol("FILTERS_STD_NOMATCH", None)
canNoMatchStandard.setLabel("Standard message No-Match disposition:")
canNoMatchStandard.addKey("CAN_GFC_ANFS_RXF0", "0", "Move to RX FIFO 0")
canNoMatchStandard.addKey("CAN_GFC_ANFS_RXF1", "1", "Move to RX FIFO 1")
canNoMatchStandard.addKey("CAN_GFC_ANFS_REJECT", "2", "Reject")
canNoMatchStandard.setOutputMode("Key")
canNoMatchStandard.setDisplayMode("Description")
canNoMatchStandard.setDefaultValue(2)
# Reject all standard IDs?
canStdReject = canComponent.createBooleanSymbol("FILTERS_STD_REJECT", None)
canStdReject.setLabel("Reject Standard Remote Frames")
canStdReject.setDescription("Reject all remote frames with 11-bit standard IDs")
canStdReject.setDefaultValue(False)
# 64 extended filters
canExtFilterMenu = canComponent.createMenuSymbol("extFilterMenu", None)
canExtFilterMenu.setLabel("Extended Filters (up to 64)")
canExtFilterMenu.setDependencies(adjustExtFilters, ["FILTERS_EXT"])
#How many extended filters
canExtFilterNumber = canComponent.createIntegerSymbol("FILTERS_EXT", canExtFilterMenu)
canExtFilterNumber.setLabel("Number of Extended Filters:")
canExtFilterNumber.setDefaultValue(0)
canExtFilterNumber.setMin(0)
canExtFilterNumber.setMax(64)
#Create all of the 64 extended filters in a disabled state
for filter in range (64):
extFilterList.append(canCreateExtFilter(canComponent, canExtFilterMenu, filter + 1))
#What to do when a NO-MATCH is detected on an extended message
canNoMatchExtended = canComponent.createKeyValueSetSymbol("FILTERS_EXT_NOMATCH", None)
canNoMatchExtended.setLabel("Extended message No-Match disposition:")
canNoMatchExtended.addKey("CAN_GFC_ANFE_RXF0", "0", "Move to RX FIFO 0")
canNoMatchExtended.addKey("CAN_GFC_ANFE_RXF1", "1", "Move to RX FIFO 1")
canNoMatchExtended.addKey("CAN_GFC_ANFE_REJECT", "2", "Reject")
canNoMatchExtended.setOutputMode("Key")
canNoMatchExtended.setDisplayMode("Description")
canNoMatchExtended.setDefaultValue(2)
# Reject all extended IDs?
canExtReject = canComponent.createBooleanSymbol("FILTERS_EXT_REJECT", None)
canExtReject.setLabel("Reject Extended Remote Frames")
canExtReject.setDescription("Reject all remote frames with 29-bit extended IDs")
canExtReject.setDefaultValue(False)
#use timeout?
canUseTimeout = canComponent.createBooleanSymbol("CAN_TIMEOUT", None)
canUseTimeout.setLabel("Use Timeout Counter")
canUseTimeout.setDescription("Enables Timeout Counter")
canUseTimeout.setDefaultValue(False)
#timout count
canTimeoutCount = canComponent.createIntegerSymbol("TIMEOUT_COUNT", canUseTimeout)
canTimeoutCount.setDependencies(hideMenu, ["CAN_TIMEOUT"])
canTimeoutCount.setLabel("Timeout Countdown from: ")
canTimeoutCount.setDefaultValue(40000)
canTimeoutCount.setMin(10)
canTimeoutCount.setMax(65535)
canTimeoutCount.setVisible(False)
canTimeoutCount.setDependencies(hideMenu, ["CAN_TIMEOUT"])
#timeout mode
canTimeoutMode = canComponent.createKeyValueSetSymbol("TIMEOUT_SELECT", canUseTimeout)
canTimeoutMode.setLabel("Timeout mode:")
canTimeoutMode.addKey("CAN_TOCC_TOS_CONT", "0", "CONTINUOUS")
canTimeoutMode.addKey("CAN_TOCC_TOS_TXEF", "1", "TX EVENT")
canTimeoutMode.addKey("CAN_TOCC_TOS_RXF0", "2", "RX0 EVENT")
canTimeoutMode.addKey("CAN_TOCC_TOS_RXF1", "3", "RX1 EVENT")
canTimeoutMode.setOutputMode("Key")
canTimeoutMode.setDisplayMode("Description")
canTimeoutMode.setVisible(False)
canTimeoutMode.setDependencies(hideMenu, ["CAN_TIMEOUT"])
canTimeoutMode.setDefaultValue(1)
#timestamp Modes
canTimestampMode = canComponent.createKeyValueSetSymbol("TIMESTAMP_MODE", None)
canTimestampMode.setLabel("Timestamp mode")
canTimestampMode.setDescription("EXT TIMESTAMP: external counter (needed for FD). ZERO: timestamp is always 0x0000. TCP INC: incremented according to TCP.")
canTimestampMode.addKey("CAN_TSCC_TSS_ZERO", "0", "ZERO")
canTimestampMode.addKey("CAN_TSCC_TSS_INC", "1", "TCP INC")
canTimestampMode.addKey("CAN_TSCC_TSS_EXT", "2", "EXT TIMESTAMP")
canTimestampMode.setOutputMode("Key")
canTimestampMode.setDisplayMode("Description")
canTimestampMode.setDefaultValue(1)
#timestamp/timeout Counter Prescaler
canTCP = canComponent.createIntegerSymbol("TIMESTAMP_PRESCALER", None)
canTCP.setLabel("Timestamp/Timeout Counter Prescaler (TCP):")
canTCP.setDescription("Configures Timestamp & Timeout counter prescaler in multiples of CAN bit times.")
canTCP.setDefaultValue(0)
canTCP.setMin(0)
canTCP.setMax(15)
# Interrupt Dynamic settings
caninterruptControl = canComponent.createBooleanSymbol("CAN_INTERRUPT_ENABLE", None)
caninterruptControl.setVisible(False)
caninterruptControl.setDependencies(interruptControl, ["INTERRUPT_MODE"])
# Dependency Status for interrupt
canIntEnComment = canComponent.createCommentSymbol("CAN_INTERRUPT_ENABLE_COMMENT", None)
canIntEnComment.setVisible(False)
canIntEnComment.setLabel("Warning!!! " + canInstanceName.getValue() + " Interrupt is Disabled in Interrupt Manager")
canIntEnComment.setDependencies(InterruptStatusWarning, ["core." + interruptVectorUpdate])
REG_MODULE_CAN = Register.getRegisterModule("CAN")
configName = Variables.get("__CONFIGURATION_NAME")
#Master Header
canMasterHeaderFile = canComponent.createFileSymbol("headerFile", None)
canMasterHeaderFile.setSourcePath("../peripheral/can_u2003/templates/plib_can_common.h")
canMasterHeaderFile.setOutputName("plib_can_common.h")
canMasterHeaderFile.setDestPath("/peripheral/can/")
canMasterHeaderFile.setProjectPath("config/" + configName + "/peripheral/can/")
canMasterHeaderFile.setType("HEADER")
#Instance Source File
canMainSourceFile = canComponent.createFileSymbol("sourceFile", None)
canMainSourceFile.setSourcePath("../peripheral/can_u2003/templates/plib_can.c.ftl")
canMainSourceFile.setOutputName("plib_"+canInstanceName.getValue().lower()+".c")
canMainSourceFile.setDestPath("/peripheral/can/")
canMainSourceFile.setProjectPath("config/" + configName + "/peripheral/can/")
canMainSourceFile.setType("SOURCE")
canMainSourceFile.setMarkup(True)
#Instance Header File
canInstHeaderFile = canComponent.createFileSymbol("instHeaderFile", None)
canInstHeaderFile.setSourcePath("../peripheral/can_u2003/templates/plib_can.h.ftl")
canInstHeaderFile.setOutputName("plib_"+canInstanceName.getValue().lower()+".h")
canInstHeaderFile.setDestPath("/peripheral/can/")
canInstHeaderFile.setProjectPath("config/" + configName + "/peripheral/can/")
canInstHeaderFile.setType("HEADER")
canInstHeaderFile.setMarkup(True)
#CAN Initialize
canSystemInitFile = canComponent.createFileSymbol("initFile", None)
canSystemInitFile.setType("STRING")
canSystemInitFile.setOutputName("core.LIST_SYSTEM_INIT_C_SYS_INITIALIZE_PERIPHERALS")
canSystemInitFile.setSourcePath("../peripheral/can_u2003/templates/system/initialization.c.ftl")
canSystemInitFile.setMarkup(True)
#CAN definitions header
canSystemDefFile = canComponent.createFileSymbol("defFile", None)
canSystemDefFile.setType("STRING")
canSystemDefFile.setOutputName("core.LIST_SYSTEM_DEFINITIONS_H_INCLUDES")
canSystemDefFile.setSourcePath("../peripheral/can_u2003/templates/system/definitions.h.ftl")
canSystemDefFile.setMarkup(True)
| [
"http://support.microchip.com"
] | http://support.microchip.com |
abc855529ce069a7208dd306d3988daf851774db | 2cfeb115b0ea14c52c3bf99abb53e935fa3d01b7 | /examples/vanilla/settings_quickstart.py | a8a2eec01f21006986c5b8f512f375b6eaf87a00 | [
"BSD-2-Clause"
] | permissive | aykut/django-oscar | 796fbc2f62d3dd7877833610f7bead2b006b9739 | ca3629e74ea1e0affc55d3de4e97f523e352d267 | refs/heads/master | 2021-01-22T07:27:59.359441 | 2011-06-30T19:36:01 | 2011-06-30T19:36:01 | 14,263,668 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,527 | py | import os
PROJECT_DIR = os.path.dirname(__file__)
location = lambda x: os.path.join(os.path.dirname(os.path.realpath(__file__)), x)
DEBUG = True
TEMPLATE_DEBUG = True
SQL_DEBUG = True
ADMINS = (
# ('Your Name', '[email protected]'),
)
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '/tmp/oscar_vanilla',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/London'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = location("assets")
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/admin/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '$)a7n&o80u!6y5t-+jrd3)3!%vh&shg$wqpjpxc!ar&p#!)n1a'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.request",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.contrib.messages.context_processors.messages",
# Oscar specific
'oscar.apps.search.context_processors.search_form',
'oscar.apps.promotions.context_processors.promotions',
'oscar.apps.promotions.context_processors.merchandising_blocks',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.transaction.TransactionMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
# Oscar specific
'oscar.apps.basket.middleware.BasketMiddleware'
)
INTERNAL_IPS = ('127.0.0.1',)
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
'handlers': {
'null': {
'level':'DEBUG',
'class':'django.utils.log.NullHandler',
},
'console':{
'level':'DEBUG',
'class':'logging.StreamHandler',
'formatter': 'verbose'
},
'file': {
'level': 'INFO',
'class': 'logging.FileHandler',
'filename': '/tmp/oscar.log',
'formatter': 'verbose'
},
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler',
},
},
'loggers': {
'django': {
'handlers':['null'],
'propagate': True,
'level':'INFO',
},
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': False,
},
'oscar.checkout': {
'handlers': ['console'],
'propagate': True,
'level':'INFO',
},
'django.db.backends': {
'handlers':['null'],
'propagate': False,
'level':'DEBUG',
},
}
}
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
'django.contrib.flatpages',
# External dependencies
'haystack',
'sorl.thumbnail',
# Apps from oscar
'oscar',
'oscar.apps.analytics',
'oscar.apps.discount',
'oscar.apps.order',
'oscar.apps.checkout',
'oscar.apps.shipping',
'oscar.apps.order_management',
'oscar.apps.product',
'oscar.apps.basket',
'oscar.apps.payment',
'oscar.apps.offer',
'oscar.apps.address',
'oscar.apps.partner',
'oscar.apps.image',
'oscar.apps.customer',
'oscar.apps.promotions',
'oscar.apps.reports',
'oscar.apps.search',
'oscar.apps.product.reviews',
'oscar.apps.payment.datacash',
)
LOGIN_REDIRECT_URL = '/accounts/profile/'
APPEND_SLASH = True
# Oscar settings
from oscar.defaults import *
# Haystack settings
HAYSTACK_SITECONF = 'oscar.search_sites'
HAYSTACK_SEARCH_ENGINE = 'dummy'
| [
"[email protected]"
] | |
4ca1a63aba6d81d8131ea2d1874236b45ee14bb9 | 283f85409e4aa92444fc865c802d2babd8629f88 | /app/errors/__init__.py | fab5ff4aceb9732c788992026b7de33c99e5c66b | [
"MIT"
] | permissive | tomwright01/EyeReport | df52a77b3cc6396ba51721421cc5616649286c8b | ab227190e7efe9af18125d175efd271ee11dbff4 | refs/heads/master | 2021-05-16T04:30:05.374448 | 2019-08-08T15:41:15 | 2019-08-08T15:41:15 | 106,033,903 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 172 | py | # -*- coding: utf-8 -*-
from flask import Blueprint
bp = Blueprint('errors', __name__,
template_folder='templates')
from app.errors import handlers | [
"[email protected]"
] | |
81df5fb4cda7e31f1ab5cd5b884be42f24cade5e | 137ded4225a84d1f5f46099ef6e5545b26cc5fb2 | /Configuration/GenProduction/python/Pythia8_TuneCP5_5TeV_D0_PiK_prompt_pt1p2_y2p4_cfi.py | 6281438c1a2a5fb8c8501629827135ab0b1fc8e0 | [] | no_license | davidlw/2017FlowMCRequest | 8a27f04d5a70c3f34d003d6ea25888a691e73bb6 | c9cd086db18ec3a661482cc457a1fdb5949d3b88 | refs/heads/master | 2022-08-28T21:42:32.093605 | 2022-08-02T18:00:06 | 2022-08-02T18:00:06 | 148,789,077 | 0 | 2 | null | 2021-01-06T21:45:03 | 2018-09-14T13:01:38 | Python | UTF-8 | Python | false | false | 2,674 | py | import FWCore.ParameterSet.Config as cms
from Configuration.Generator.Pythia8CommonSettings_cfi import *
from Configuration.Generator.MCTunes2017.PythiaCP5Settings_cfi import *
from GeneratorInterface.EvtGenInterface.EvtGenSetting_cff import *
generator = cms.EDFilter("Pythia8GeneratorFilter",
pythiaPylistVerbosity = cms.untracked.int32(0),
pythiaHepMCVerbosity = cms.untracked.bool(False),
comEnergy = cms.double(5020.0),
maxEventsToPrint = cms.untracked.int32(0),
ExternalDecays = cms.PSet(
EvtGen130 = cms.untracked.PSet(
decay_table = cms.string('GeneratorInterface/EvtGenInterface/data/DECAY_2010.DEC'),
operates_on_particles = cms.vint32(),
particle_property_file = cms.FileInPath('GeneratorInterface/EvtGenInterface/data/evt.pdl'),
user_decay_file = cms.vstring('GeneratorInterface/ExternalDecays/data/D0_Kpi.dec'),
list_forced_decays = cms.vstring('myD0', 'myanti-D0')
),
parameterSets = cms.vstring('EvtGen130')
),
PythiaParameters = cms.PSet(
pythia8CommonSettingsBlock,
pythia8CP5SettingsBlock,
processParameters = cms.vstring(
'HardQCD:all = on',
'PhaseSpace:pTHatMin = 0.', #min pthat
),
parameterSets = cms.vstring(
'pythia8CommonSettings',
'pythia8CP5Settings',
'processParameters',
)
)
)
generator.PythiaParameters.processParameters.extend(EvtGenExtraParticles)
partonfilter = cms.EDFilter("PythiaFilter",
ParticleID = cms.untracked.int32(4) # 4 for c and 5 for b quark
)
D0Daufilter = cms.EDFilter("PythiaMomDauFilter",
ParticleID = cms.untracked.int32(421),
MomMinPt = cms.untracked.double(0.0),
MomMinEta = cms.untracked.double(-10.0),
MomMaxEta = cms.untracked.double(10.0),
DaughterIDs = cms.untracked.vint32(211, -321),
NumberDaughters = cms.untracked.int32(2),
NumberDescendants = cms.untracked.int32(0),
BetaBoost = cms.untracked.double(0.0),
)
D0rapidityfilter = cms.EDFilter("PythiaFilter",
ParticleID = cms.untracked.int32(421),
MinPt = cms.untracked.double(1.2),
MaxPt = cms.untracked.double(1000.),
MinRapidity = cms.untracked.double(-2.5),
MaxRapidity = cms.untracked.double(2.5),
)
ProductionFilterSequence = cms.Sequence(generator*partonfilter*D0Daufilter*D0rapidityfilter)
| [
"[email protected]"
] | |
b916f740e286b9f3ef5c7acddf84b90d8541aa80 | 452f3354c04f887103d0c7c8b4a07dd29a72eed7 | /A2/app/form.py | 3d2b4af3bc9bb7c09fc3646a81f113d6fb7cda66 | [] | no_license | wmuf/ECE1779_Cloud_Computing | 2d8b4420a26ea6169a5ad8ea13f8dd7997190f71 | 1e385a0a54d4bd8b0c3689ccb4e4064f02efb670 | refs/heads/master | 2023-07-24T16:40:45.875193 | 2021-04-20T02:41:54 | 2021-04-20T02:41:54 | 404,382,723 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 950 | py | from flask_wtf import FlaskForm
from wtforms import FloatField, IntegerField, SubmitField, validators
class AutoScalarForm(FlaskForm):
cpu_threshold_grow = IntegerField('Cpu_Threshold_Grow', [validators.optional(), validators.NumberRange(min=0, max=100, message="Please specify range from 0 to 100")], filters=[lambda x: x or None])
cpu_threshold_shrink = IntegerField('Cpu_Threshold_Shrink', [validators.optional(), validators.NumberRange(min=0, max=100, message="Please specify range from 0 to 100")], filters=[lambda x: x or None])
expand_ratio = IntegerField('Expand_Ratio', [validators.optional(), validators.NumberRange(min=1, max=8, message="Please specify range from 1 to 8")], filters=[lambda x: x or None])
shrink_ratio = FloatField('Shrink_Ratio', [validators.optional(), validators.NumberRange(min=0, max=1, message="Please specify range from 0 to 1")], filters=[lambda x: x or None])
submit = SubmitField('Submit')
| [
"[email protected]"
] | |
279d8933e8c2057be7901387644e7ccbc5494a53 | a39ecd4dce4b14f5d17416233fa16c76d2d3f165 | /RepositoryBootstrap/Impl/Utilities.py | 305320a07ef4e303ff131cad0be6735155fe6662 | [
"BSL-1.0",
"Python-2.0",
"OpenSSL",
"LicenseRef-scancode-unknown-license-reference",
"GPL-2.0-only"
] | permissive | davidbrownell/Common_Environment_v3 | 8e6bbed15004a38a4c6e6f337d78eb2339484d64 | 2981ad1566e6d3c00fd390a67dbc1277ef40aaba | refs/heads/master | 2022-09-03T19:04:57.270890 | 2022-06-28T01:33:31 | 2022-06-28T01:33:31 | 132,171,665 | 0 | 0 | BSL-1.0 | 2021-08-13T21:19:48 | 2018-05-04T17:47:30 | Python | UTF-8 | Python | false | false | 4,909 | py | # ----------------------------------------------------------------------
# |
# | Utilities.py
# |
# | David Brownell <[email protected]>
# | 2018-05-02 15:57:42
# |
# ----------------------------------------------------------------------
# |
# | Copyright David Brownell 2018-22.
# | Distributed under the Boost Software License, Version 1.0.
# | (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
# |
# ----------------------------------------------------------------------
"""Utilities used by multiple files within this module."""
import hashlib
import importlib
import os
import re
import sys
from contextlib import contextmanager
import six
from RepositoryBootstrap import Constants
from RepositoryBootstrap.Impl import CommonEnvironmentImports
# ----------------------------------------------------------------------
_script_fullpath = CommonEnvironmentImports.CommonEnvironment.ThisFullpath()
_script_dir, _script_name = os.path.split(_script_fullpath)
# ----------------------------------------------------------------------
# ----------------------------------------------------------------------
def GenerateCommands( functor, # def Func() -> []
is_debug,
):
"""
Generates shell-specific commands as returned by the provided functor.
Returns:
(result, generated_commands)
"""
assert functor
commands = []
try:
result = functor()
if isinstance(result, int):
commands = []
elif isinstance(result, tuple):
result, commands = result
else:
commands = result
result = 0
except Exception as ex:
if is_debug:
import traceback
error = traceback.format_exc()
else:
error = str(ex)
commands = [ CommonEnvironmentImports.CurrentShell.Commands.Message("\n\nERROR: {}".format(CommonEnvironmentImports.StringHelpers.LeftJustify(error, len("ERROR: ")))),
CommonEnvironmentImports.CurrentShell.Commands.Exit(return_code=-1),
]
result = -1
if is_debug and commands:
commands = [ CommonEnvironmentImports.CurrentShell.Commands.Message("{}\n".format(CommonEnvironmentImports.StringHelpers.Prepend( "Debug: ",
CommonEnvironmentImports.CurrentShell.GenerateCommands(commands),
skip_first_line=False,
))),
] + commands
return result, commands
# ----------------------------------------------------------------------
def CalculateFingerprint(repo_dirs, relative_root=None):
"""
Returns a value that can be used to determine if any configuration info
has changed for a repo and its dependencies.
"""
results = {}
for repo_dir in repo_dirs:
md5 = hashlib.md5()
filename = os.path.join(repo_dir, Constants.SETUP_ENVIRONMENT_CUSTOMIZATION_FILENAME)
if not os.path.isfile(filename):
continue
with open(filename, 'rb') as f:
# Skip the file header, as it has no impact on the file's actual contents.
in_file_header = True
for line in f:
if in_file_header and line.lstrip().startswith(b'#'):
continue
in_file_header = False
md5.update(line)
if relative_root:
repo_dir = CommonEnvironmentImports.FileSystem.GetRelativePath(relative_root, repo_dir)
results[repo_dir] = md5.hexdigest()
return results
# ----------------------------------------------------------------------
@contextmanager
def CustomMethodManager(customization_filename, method_name):
"""Attempts to load a customization filename and extract the given method."""
if not os.path.isfile(customization_filename):
yield None
return
customization_path, customization_name = os.path.split(customization_filename)
customization_name = os.path.splitext(customization_name)[0]
sys.path.insert(0, customization_path)
with CommonEnvironmentImports.CallOnExit(lambda: sys.path.pop(0)):
mod = importlib.import_module(customization_name)
with CommonEnvironmentImports.CallOnExit(lambda: sys.modules.pop(customization_name)):
yield getattr(mod, method_name, None)
| [
"[email protected]"
] | |
6512eaa1731b6c91c774540047b19a5886180e3b | 080c13cd91a073457bd9eddc2a3d13fc2e0e56ae | /GIT-USERS/TOM2/CS32_Architecture_GP/day4/simple.py | b368bec2f1d9590d966617b2ce072a8e347ffd3e | [] | no_license | Portfolio-Projects42/UsefulResourceRepo2.0 | 1dccc8961a09347f124d3ed7c27c6d73b9806189 | 75b1e23c757845b5f1894ebe53551a1cf759c6a3 | refs/heads/master | 2023-08-04T12:23:48.862451 | 2021-09-15T12:51:35 | 2021-09-15T12:51:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,238 | py | # Let's build a data driven machine!
import sys
# What do we need to have our machine working?
"""
- Some sort of memory
- Some way of stopping operation
- Some way of keeping the CPU running
- Some sort of storage for local variables seperate from main RAM (memory) eg; Registers
- Some sort of operations that can be performed such as (printing something, saving data to a variable[register] )
- Some FETCH, DECODE, EXECUTE CYCLE
"""
# Operations that we can perform
HALT = 0b00000001
PRINT_VLAD = 2
PRINT_NUM = 3
SAVE = 0b10000010
PRINT_REG = 5
ADD = 6
# PUSH and POP
PUSH = 7
POP = 8
# TODO: CALL and RET
CALL = 0b01001001
RET = 0b00001010
SUB = 23
PRN = 0b01000111
SHL = 0b10101100
SHR = 0b10101101
# some sort of memory (lets refactor this to load in opcodes from a file)
def load_memory(filename):
# TODO do some logic here
try:
address = 0
with open(filename) as f:
for line in f:
comment_split = line.split("#")
n = comment_split[0].strip()
if n == '':
continue
val = int(n, 2)
# store val in memory
memory[address] = val
address += 1
print(f"{val:08b}: {val:d}")
except FileNotFoundError:
print(f"{sys.argv[0]}: {filename} not found")
sys.exit(2)
memory = [0] * 256
# keep track of running?
running = True
# some sort of counter
pc = 0
# Some local var holders [registers]
registers = [0] * 10
# Stack Pointer (R7) as per specs
# index of the registers list
# SP
SP = 7
# to use to store where the top of the stack is
# 0xF4 (244)
registers[SP] = 244
# size of opcode
op_size = 1
# grab any args
if len(sys.argv) != 2:
print("usage: simple.py filename")
sys.exit(1)
# load opcodes in to memory
load_memory(sys.argv[1])
# REPL to run once per cycle of CPU
# inside this we will have our FETCH, DECODE, EXECUTE CYCLE
while running:
# FETCH
cmd = memory[pc]
op_size = ((cmd >> 6) & 0b11) + 1
# DECODE
if cmd == PRINT_VLAD:
# EXECUTE
print("Vlad")
elif cmd == HALT:
running = False
elif cmd == PRINT_NUM:
num = memory[pc + 1]
print(num)
elif cmd == PRINT_REG:
index_of_reg = memory[pc + 1]
num_at_reg = registers[index_of_reg]
print(num_at_reg)
elif cmd == SAVE:
num_to_save = memory[pc + 1] # 300
reg_index = memory[pc + 2]
registers[reg_index] = num_to_save
elif cmd == ADD:
reg_index_a = memory[pc + 1]
reg_index_b = memory[pc + 2]
registers[reg_index_a] += registers[reg_index_b]
elif cmd == SUB:
reg_index_a = memory[pc + 1]
reg_index_b = memory[pc + 2]
registers[reg_index_a] -= registers[reg_index_b]
elif cmd == SHL:
reg_index_a = memory[pc + 1]
reg_index_b = memory[pc + 2]
registers[reg_index_a] <<= registers[reg_index_b]
elif cmd == SHR:
reg_index_a = memory[pc + 1]
reg_index_b = memory[pc + 2]
registers[reg_index_a] >>= registers[reg_index_b]
# PUSH
elif cmd == PUSH:
# setup
reg_index = memory[pc + 1]
val = registers[reg_index]
# decrememt Stack Pointer
registers[SP] -= 1
# insert val on to the stack
memory[registers[SP]] = val
# POP
elif cmd == POP:
# setup
reg_index = memory[pc + 1]
val = memory[registers[SP]]
# take value from stack and put it in reg
registers[reg_index] = val
# increment Stack Pointer
registers[SP] += 1
# CALL
elif cmd == CALL:
# push the return address on to the stack
registers[SP] -= 1
memory[registers[SP]] = pc + 2
# Set the PC to the subroutines address
reg = memory[pc + 1]
pc = registers[reg]
op_size = 0
# RET
elif cmd == RET:
# POP return address from stack to store in pc
pc = memory[registers[SP]]
registers[SP] += 1
op_size = 0
else:
print(f"Invalid Instruction: {cmd}")
running = False
pc += op_size | [
"[email protected]"
] | |
0dd564c9ec118b6ab6323eccabc8304d63041320 | 0f481498bba97a7bb9f38bc2b9a1dc5b9ebf50a5 | /Pacote-download/Exercicios/ex045.py | d818f1b6795a40fce1325086d8ba0bb24fd50a3f | [
"MIT"
] | permissive | agnaka/CEV-Python-Exercicios | d7e8efd6426d60d6920ba3cfddbd049a80e7d6da | a4299abd5da283b1b15ed2436965db162f42885f | refs/heads/master | 2022-10-23T11:45:56.298286 | 2020-06-10T21:13:15 | 2020-06-10T21:13:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,486 | py | from random import randint
print('-=' * 20)
print('\033[1;34mVAMOS A JOGAR JANKENPÔ!!!\033[m')
print('-=' * 20)
print('''SUAS OPÇÕES:
[1] Pedra
[2] Papel
[3] Tesoura''')
choice = int(input('Qual a sua escolha? '))
print('JAN')
print('KEN')
print('PO!!!')
itens = ('Pedra', 'Papel', 'Tesoura')
compu = randint(1, 3)
# print(compu)
if compu == 1 and choice == 1:
print('O computador escolheu {} e você {} - EMPATOU! Jogar novamente'.format('PEDRA', 'PEDRA'))
elif compu == 1 and choice == 2:
print('O computador escolheu {} e você {} - VOCÊ GANHOU!!!'.format('PEDRA', 'PAPEL'))
elif compu == 1 and choice == 3:
print('O computador escolheu {} e você {} - VOCÊ PERDEU'.format('PEDRA', 'TESOURA'))
elif compu == 2 and choice == 2:
print('O computador escolheu {} e você {} - EMPATOU! Jogar novamente'.format('PAPEL', 'PAPEL'))
elif compu == 2 and choice == 1:
print('O computador escolheu {} e você {} - VOCÊ PERDEU'.format('PAPEL', 'PEDRA'))
elif compu == 2 and choice == 3:
print('O computador escolheu {} e você {} - VOCÊ GANHOU!!!'.format('PAPEL', 'TESOURA'))
elif compu == 3 and choice == 3:
print('O computador escolheu {} e você {} - EMPATOU! Jogar novamente'.format('TESOURA', 'TESOURA'))
elif compu == 3 and choice == 1:
print('O computador escolheu {} e você {} - VOCÊ GANHOU!!!'.format('TESOURA', 'PEDRA'))
elif compu == 3 and choice == 2:
print('O computador escolheu {} e você {} - VOCÊ PERDEU'.format('TESOURA', 'PAPEL'))
| [
"[email protected]"
] | |
cf50250d8ef3adadc370a28b4e97588d22adf4a9 | 8898273f9811fab29eb5621734bafcdf204d8229 | /scipy-stubs/special/_precompute/expn_asy.pyi | 61ecaf6d73b6d529e4f36b9d6019a65c5721a799 | [] | no_license | tyrion/scipy-stubs | 628ad6321a7e1502683a2b55a759777508ab4b67 | bf49a91313523c4f635bc3e5d14444c1361caf64 | refs/heads/master | 2020-05-30T21:59:43.001510 | 2019-06-03T10:30:54 | 2019-06-03T10:30:54 | 189,984,340 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 238 | pyi | # Stubs for scipy.special._precompute.expn_asy (Python 3.6)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
from typing import Any
x: Any
def generate_A(K: Any): ...
WARNING: str
def main() -> None: ...
| [
"[email protected]"
] | |
7557b31d5f98ea2c2c7f9df591d067658163f0a1 | 3035e6a2b4e5b5662670c188785ed9fad0e1a315 | /Chapter07/example/python/permissions/can_get_all_acc_txs.py | 18c41042f78d6a276b339039ec7df00cbc8a5bdd | [
"MIT"
] | permissive | mahen92/Hyperledger-Cookbook | 52491da47ea7e4b3d988b1303ad4641d89bd3c0e | c2aaf9f9fd58757110a2a6b3ab7498da11fba254 | refs/heads/master | 2021-01-09T15:36:10.368893 | 2020-04-10T18:17:41 | 2020-04-10T18:17:41 | 242,358,174 | 0 | 0 | MIT | 2020-02-22T14:46:54 | 2020-02-22T14:46:53 | null | UTF-8 | Python | false | false | 1,308 | py | #
# Copyright Soramitsu Co., Ltd. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
#
import iroha
import commons
admin = commons.new_user('admin@first')
alice = commons.new_user('alice@second')
@commons.hex
def genesis_tx():
test_permissions = iroha.RolePermissionSet([iroha.Role_kGetAllAccTxs])
tx = iroha.ModelTransactionBuilder() \
.createdTime(commons.now()) \
.creatorAccountId(admin['id']) \
.addPeer('0.0.0.0:50541', admin['key'].publicKey()) \
.createRole('admin_role', commons.all_permissions()) \
.createRole('test_role', test_permissions) \
.createDomain('first', 'admin_role') \
.createDomain('second', 'test_role') \
.createAccount('admin', 'first', admin['key'].publicKey()) \
.createAccount('alice', 'second', alice['key'].publicKey()) \
.build()
return iroha.ModelProtoTransaction(tx) \
.signAndAddSignature(admin['key']).finish()
@commons.hex
def account_transactions_query():
tx = iroha.ModelQueryBuilder() \
.createdTime(commons.now()) \
.queryCounter(1) \
.creatorAccountId(alice['id']) \
.getAccountTransactions(admin['id']) \
.build()
return iroha.ModelProtoQuery(tx) \
.signAndAddSignature(alice['key']).finish()
| [
"[email protected]"
] | |
ef464d2028beaa30b26f3bd7318554f2e18e9109 | 7142c3941481e661075154d714a29d5e283a3074 | /Decorator1.py | d4a71bacf012ffb8e07545dfa66863b19ccd5332 | [] | no_license | nirajan5/Demo | 5642a9669fedcca47b0304ac423c0b3e6333b8e2 | 2451875bf5698cd38af69baa117c14099951bc9f | refs/heads/master | 2023-07-27T17:04:03.689673 | 2021-09-15T11:14:25 | 2021-09-15T11:14:25 | 406,732,005 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 237 | py | def make_pretty(func):
def inner():
print("I got decorated")
func()
return inner
def simple():
print("I am simple")
simple()
# let's decorate this ordinary function
pretty = make_pretty(simple)
pretty()
| [
"[email protected]"
] | |
f03d78865aa943a2184426a1b2b98fbe6b5b1c44 | 2d4545d2295ff88c425205af1ea593b85cf3b3c7 | /backend/manage.py | efb841ad080b09e4d08ac5b7802dcf086c4584af | [] | no_license | crowdbotics-apps/square-cloud-27361 | 202cf34b8d05497692ae16087a17dcd214526f36 | fb6441cbbf08d174c39e3d3969ce417fa44caf59 | refs/heads/master | 2023-05-04T12:43:40.597182 | 2021-05-24T23:24:56 | 2021-05-24T23:24:56 | 370,509,537 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 638 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'square_cloud_27361.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
1f26110f249939ecb0f0260b32cca636fdea2aee | 0c534f461e9c1e8b9ef442c1bac1d7a1dea851b1 | /new_plotter.py | 0dab04a15268c6cbe758b8500943bf32a14cc5ad | [] | no_license | paulgowdy/nle | bb77e07a02e319775266091e34ad6f669d1034cd | 27c62f443b7ff6fcd3822596b86152ef2f320804 | refs/heads/main | 2023-08-03T16:44:00.607002 | 2021-09-03T04:33:12 | 2021-09-03T04:33:12 | 390,802,676 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,721 | py | import matplotlib.pyplot as plt
import pandas as pd
runs = [
#'2021-08-05/09-53-24',
'2021-08-08/17-38-17',
'2021-08-09/17-54-30',
]
colors = ['navy','darkred','green','navy','navy','red','aqua','cyan','red','red','darkred']
prefix = "//wsl$/Ubuntu-20.04/home/paulgamble/neurips-2021-the-nethack-challenge/nethack_baselines/torchbeast/outputs/"
#prefix = "//wsl$/Ubuntu-20.04/home/paulgamble/hackbot_transformer/nethack_baselines/torchbeast/outputs/"
suffix = "/logs.csv"
roll_window = 100
plt.figure()
ax = plt.gca()
for r, c in zip(runs, colors):
log_fn = prefix + r + suffix
df = pd.read_csv(log_fn)
df['rolling_score'] = df['mean_episode_return'].rolling(roll_window).mean()
#df['score_std_low'] = df['rolling_score'] - df['mean_episode_return'].rolling(roll_window).std()
#df['score_std_high'] = df['rolling_score'] + df['mean_episode_return'].rolling(roll_window).std()
#ax.fill_between(df['step'], df['score_std_low'], df['score_std_high'], color=c, alpha=0.3)
df.plot(x='step',y='rolling_score',ax=ax, color=c)
labels = [x.split('/')[-1] for x in runs]
plt.legend(labels)
plt.title("Mean Episode Score")
#plt.ylim(-200,0)
plt.figure()
ax = plt.gca()
for r, c in zip(runs, colors):
log_fn = prefix + r + suffix
df = pd.read_csv(log_fn)
df['rolling_score'] = df['mean_episode_step'].rolling(roll_window).mean()
#df['rolling_score'] = df['mean_episode_return'].rolling(roll_window).mean()
#df['rolling_score'].plot(x='step')
#df['mean_episode_return'].plot()
df.plot(x='step',y='rolling_score',ax=ax, color=c)
plt.legend(runs)
#plt.ylim(-200,0)
plt.title("Mean Episode Steps")
plt.show()
| [
"[email protected]"
] | |
64ff0b3da04db2adfecb58e8771034b3ad7b2520 | 859093a06bb7b8ff2c00f21d4d3052b9d6b3a580 | /schedule/widgets.py | b7aa89545511619cbebd2f923c9a003ca96d629d | [
"MIT"
] | permissive | fitahol/fitahol | bbf71b695fbacad2d3a1f99a034c041ea6069529 | ce84dc909aa98f2dc7594ef26568e015cbfe0e94 | refs/heads/master | 2021-01-19T20:18:11.677674 | 2017-02-20T14:05:39 | 2017-02-20T14:05:39 | 82,561,065 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,540 | py | from __future__ import unicode_literals
from django.forms.widgets import TextInput
from django.utils.safestring import mark_safe
class SpectrumColorPicker(TextInput):
"""
Based on Brian Grinstead's Spectrum - http://bgrins.github.com/spectrum/
"""
class Media:
css = {'all': ("//cdnjs.cloudflare.com/ajax/libs/spectrum/1.7.1/spectrum.css",)}
js = ('//cdnjs.cloudflare.com/ajax/libs/jquery/1.8.3/jquery.min.js',
'//cdnjs.cloudflare.com/ajax/libs/spectrum/1.7.1/spectrum.js',)
def _render_js(self, _id, value):
js = u"""
<script type="text/javascript">
$(document).ready(function(){
$('#%s').spectrum({
color: "",
allowEmpty: true,
showAlpha: true,
showInput: true,
className: "full-spectrum",
showInitial: true,
showPalette: true,
showSelectionPalette: true,
maxSelectionSize: 10,
preferredFormat: "hex",
localStorageKey: "spectrum.demo",
palette: [
["rgb(0, 0, 0)", "rgb(67, 67, 67)", "rgb(102, 102, 102)",
"rgb(204, 204, 204)", "rgb(217, 217, 217)","rgb(255, 255, 255)"],
["rgb(152, 0, 0)", "rgb(255, 0, 0)", "rgb(255, 153, 0)", "rgb(255, 255, 0)", "rgb(0, 255, 0)",
"rgb(0, 255, 255)", "rgb(74, 134, 232)", "rgb(0, 0, 255)", "rgb(153, 0, 255)", "rgb(255, 0, 255)"],
["rgb(230, 184, 175)", "rgb(244, 204, 204)", "rgb(252, 229, 205)", "rgb(255, 242, 204)", "rgb(217, 234, 211)",
"rgb(208, 224, 227)", "rgb(201, 218, 248)", "rgb(207, 226, 243)", "rgb(217, 210, 233)", "rgb(234, 209, 220)",
"rgb(221, 126, 107)", "rgb(234, 153, 153)", "rgb(249, 203, 156)", "rgb(255, 229, 153)", "rgb(182, 215, 168)",
"rgb(162, 196, 201)", "rgb(164, 194, 244)", "rgb(159, 197, 232)", "rgb(180, 167, 214)", "rgb(213, 166, 189)",
"rgb(204, 65, 37)", "rgb(224, 102, 102)", "rgb(246, 178, 107)", "rgb(255, 217, 102)", "rgb(147, 196, 125)",
"rgb(118, 165, 175)", "rgb(109, 158, 235)", "rgb(111, 168, 220)", "rgb(142, 124, 195)", "rgb(194, 123, 160)",
"rgb(166, 28, 0)", "rgb(204, 0, 0)", "rgb(230, 145, 56)", "rgb(241, 194, 50)", "rgb(106, 168, 79)",
"rgb(69, 129, 142)", "rgb(60, 120, 216)", "rgb(61, 133, 198)", "rgb(103, 78, 167)", "rgb(166, 77, 121)",
"rgb(91, 15, 0)", "rgb(102, 0, 0)", "rgb(120, 63, 4)", "rgb(127, 96, 0)", "rgb(39, 78, 19)",
"rgb(12, 52, 61)", "rgb(28, 69, 135)", "rgb(7, 55, 99)", "rgb(32, 18, 77)", "rgb(76, 17, 48)"]
]
});
});
</script>""" % (_id)
return js
def render(self, name, value, attrs=None):
if 'id' not in attrs:
attrs['id'] = "id_%s" % name
rendered = super(SpectrumColorPicker, self).render(name, value, attrs)
return mark_safe(rendered + self._render_js(attrs['id'], value)) | [
"[email protected]"
] | |
2ddcf7148c7696de359ace2ede7a271758df3cfc | 2118f244be2e09508e3c89dee432d4a75343b430 | /Twitter Projects/twitter_sentiment_basic_with_function_RJ_Keys.py | 981ff629e356b9bde7b1e8186617e488aaf965f0 | [] | no_license | RamiJaloudi/Python-Scripts | 91d139093a95f9498a77b1df8ec2f790c4f4dd4c | 37e740a618ae543a02c38dc04a32ef95202ff613 | refs/heads/master | 2020-04-29T14:55:41.108332 | 2019-03-18T05:42:06 | 2019-03-18T05:42:06 | 176,212,014 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,027 | py | from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
#consumer key, consumer secret, access token, access secret.
ckey="r2I3FdcFB3WRKpKoxhpb9pkra"
csecret="Snt0LzxPyKIUQphTQmbsf0DKPALKPfCAy4Jjr3g9O3A93AGdHM"
atoken="18894514-JsJsbjRkWF4jgA7nrMyNYfLR3RccNSUlTzrYO5shJ"
asecret="BhFpvR3ZJe46wmA3sEUJ1eStz8y83WtgIlw91jJBU01z6"
##def sentimentAnalysis(text):
## encoded_text = urllib.quote(text)
class listener(StreamListener):
def on_data(self, data):
print(data)
#return(True)
## tweet = data.split(',"text:"')[1].split('","source')[0]
##
## saveMe = tweet+'::'+sentimentRating+'\n'
## output = open('output.txt','a')
## outpute.write(saveMe)
## output.close()
## return True
def on_error(self, status):
print (status)
auth = OAuthHandler(ckey, csecret)
auth.set_access_token(atoken, asecret)
twitterStream = Stream(auth, listener())
twitterStream.filter(track=["#target"])
| [
"[email protected]"
] | |
65398257cd8f44323e9a0e99c7ed1824e8f632ba | 974c5a4f101d0e6f4dfa5fc2f7c641c9d2bd8184 | /sdk/resources/azure-mgmt-resource/azure/mgmt/resource/policy/v2019_09_01/aio/_configuration.py | 38ab1c393bb97968a488f5f477a42303c3b73493 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | gaoyp830/azure-sdk-for-python | 4816f04c554dcffb7510a6b7044b0c86a2dd32e1 | 1c66defa502b754abcc9e5afa444ca03c609342f | refs/heads/master | 2022-10-20T21:33:44.281041 | 2022-09-29T17:03:13 | 2022-09-29T17:03:13 | 250,355,505 | 0 | 0 | MIT | 2020-03-26T19:42:13 | 2020-03-26T19:42:12 | null | UTF-8 | Python | false | false | 3,523 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class PolicyClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
"""Configuration for PolicyClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
:keyword api_version: Api Version. Default value is "2019-09-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
**kwargs: Any
) -> None:
super(PolicyClientConfiguration, self).__init__(**kwargs)
api_version = kwargs.pop('api_version', "2019-09-01") # type: str
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
self.credential = credential
self.subscription_id = subscription_id
self.api_version = api_version
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-resource/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(self.credential, *self.credential_scopes, **kwargs)
| [
"[email protected]"
] | |
3e9404e4d11e541baa608263d9150e061d42f754 | 7343ece3b82ac87a594865c4074623b45b0297b4 | /tests/push/test_bulk_push_rule_evaluator.py | 7c23b77e0a11be202bca81598c6613e73eb667d8 | [
"Apache-2.0"
] | permissive | matrix-org/synapse | a00111f83310783b78e2996557f8bbae4d9fb229 | d35bed8369514fe727b4fe1afb68f48cc8b2655a | refs/heads/develop | 2023-09-05T05:24:20.808942 | 2023-09-04T16:14:09 | 2023-09-04T16:14:09 | 22,844,864 | 12,215 | 2,869 | Apache-2.0 | 2023-09-14T15:20:48 | 2014-08-11T15:51:42 | Python | UTF-8 | Python | false | false | 16,762 | py | # Copyright 2022 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Optional
from unittest.mock import AsyncMock, patch
from parameterized import parameterized
from twisted.test.proto_helpers import MemoryReactor
from synapse.api.constants import EventContentFields, RelationTypes
from synapse.api.room_versions import RoomVersions
from synapse.push.bulk_push_rule_evaluator import BulkPushRuleEvaluator
from synapse.rest import admin
from synapse.rest.client import login, register, room
from synapse.server import HomeServer
from synapse.types import JsonDict, create_requester
from synapse.util import Clock
from tests.unittest import HomeserverTestCase, override_config
class TestBulkPushRuleEvaluator(HomeserverTestCase):
servlets = [
admin.register_servlets_for_client_rest_resource,
room.register_servlets,
login.register_servlets,
register.register_servlets,
]
def prepare(
self, reactor: MemoryReactor, clock: Clock, homeserver: HomeServer
) -> None:
# Create a new user and room.
self.alice = self.register_user("alice", "pass")
self.token = self.login(self.alice, "pass")
self.requester = create_requester(self.alice)
self.room_id = self.helper.create_room_as(
# This is deliberately set to V9, because we want to test the logic which
# handles stringy power levels. Stringy power levels were outlawed in V10.
self.alice,
room_version=RoomVersions.V9.identifier,
tok=self.token,
)
self.event_creation_handler = self.hs.get_event_creation_handler()
@parameterized.expand(
[
# The historically-permitted bad values. Alice's notification should be
# allowed if this threshold is at or below her power level (60)
("100", False),
("0", True),
(12.34, True),
(60.0, True),
(67.89, False),
# Values that int(...) would not successfully cast should be ignored.
# The room notification level should then default to 50, per the spec, so
# Alice's notification is allowed.
(None, True),
# We haven't seen `"room": []` or `"room": {}` in the wild (yet), but
# let's check them for paranoia's sake.
([], True),
({}, True),
]
)
def test_action_for_event_by_user_handles_noninteger_room_power_levels(
self, bad_room_level: object, should_permit: bool
) -> None:
"""We should convert strings in `room` to integers before passing to Rust.
Test this as follows:
- Create a room as Alice and invite two other users Bob and Charlie.
- Set PLs so that Alice has PL 60 and `notifications.room` is set to a bad value.
- Have Alice create a message notifying @room.
- Evaluate notification actions for that message. This should not raise.
- Look in the DB to see if that message triggered a highlight for Bob.
The test is parameterised with two arguments:
- the bad power level value for "room", before JSON serisalistion
- whether Bob should expect the message to be highlighted
Reproduces #14060.
A lack of validation: the gift that keeps on giving.
"""
# Join another user to the room, so that there is someone to see Alice's
# @room notification.
bob = self.register_user("bob", "pass")
bob_token = self.login(bob, "pass")
self.helper.join(self.room_id, bob, tok=bob_token)
# Alter the power levels in that room to include the bad @room notification
# level. We need to suppress
#
# - canonicaljson validation, because canonicaljson forbids floats;
# - the event jsonschema validation, because it will forbid bad values; and
# - the auth rules checks, because they stop us from creating power levels
# with `"room": null`. (We want to test this case, because we have seen it
# in the wild.)
#
# We have seen stringy and null values for "room" in the wild, so presumably
# some of this validation was missing in the past.
with patch("synapse.events.validator.validate_canonicaljson"), patch(
"synapse.events.validator.jsonschema.validate"
), patch("synapse.handlers.event_auth.check_state_dependent_auth_rules"):
pl_event_id = self.helper.send_state(
self.room_id,
"m.room.power_levels",
{
"users": {self.alice: 60},
"notifications": {"room": bad_room_level},
},
self.token,
state_key="",
)["event_id"]
# Create a new message event, and try to evaluate it under the dodgy
# power level event.
event, unpersisted_context = self.get_success(
self.event_creation_handler.create_event(
self.requester,
{
"type": "m.room.message",
"room_id": self.room_id,
"content": {
"msgtype": "m.text",
"body": "helo @room",
},
"sender": self.alice,
},
prev_event_ids=[pl_event_id],
)
)
context = self.get_success(unpersisted_context.persist(event))
bulk_evaluator = BulkPushRuleEvaluator(self.hs)
# should not raise
self.get_success(bulk_evaluator.action_for_events_by_user([(event, context)]))
# Did Bob see Alice's @room notification?
highlighted_actions = self.get_success(
self.hs.get_datastores().main.db_pool.simple_select_list(
table="event_push_actions_staging",
keyvalues={
"event_id": event.event_id,
"user_id": bob,
"highlight": 1,
},
retcols=("*",),
desc="get_event_push_actions_staging",
)
)
self.assertEqual(len(highlighted_actions), int(should_permit))
@override_config({"push": {"enabled": False}})
def test_action_for_event_by_user_disabled_by_config(self) -> None:
"""Ensure that push rules are not calculated when disabled in the config"""
# Create a new message event which should cause a notification.
event, unpersisted_context = self.get_success(
self.event_creation_handler.create_event(
self.requester,
{
"type": "m.room.message",
"room_id": self.room_id,
"content": {
"msgtype": "m.text",
"body": "helo",
},
"sender": self.alice,
},
)
)
context = self.get_success(unpersisted_context.persist(event))
bulk_evaluator = BulkPushRuleEvaluator(self.hs)
# Mock the method which calculates push rules -- we do this instead of
# e.g. checking the results in the database because we want to ensure
# that code isn't even running.
bulk_evaluator._action_for_event_by_user = AsyncMock() # type: ignore[method-assign]
# Ensure no actions are generated!
self.get_success(bulk_evaluator.action_for_events_by_user([(event, context)]))
bulk_evaluator._action_for_event_by_user.assert_not_called()
def _create_and_process(
self, bulk_evaluator: BulkPushRuleEvaluator, content: Optional[JsonDict] = None
) -> bool:
"""Returns true iff the `mentions` trigger an event push action."""
# Create a new message event which should cause a notification.
event, unpersisted_context = self.get_success(
self.event_creation_handler.create_event(
self.requester,
{
"type": "test",
"room_id": self.room_id,
"content": content or {},
"sender": f"@bob:{self.hs.hostname}",
},
)
)
context = self.get_success(unpersisted_context.persist(event))
# Execute the push rule machinery.
self.get_success(bulk_evaluator.action_for_events_by_user([(event, context)]))
# If any actions are generated for this event, return true.
result = self.get_success(
self.hs.get_datastores().main.db_pool.simple_select_list(
table="event_push_actions_staging",
keyvalues={"event_id": event.event_id},
retcols=("*",),
desc="get_event_push_actions_staging",
)
)
return len(result) > 0
def test_user_mentions(self) -> None:
"""Test the behavior of an event which includes invalid user mentions."""
bulk_evaluator = BulkPushRuleEvaluator(self.hs)
# Not including the mentions field should not notify.
self.assertFalse(self._create_and_process(bulk_evaluator))
# An empty mentions field should not notify.
self.assertFalse(
self._create_and_process(bulk_evaluator, {EventContentFields.MENTIONS: {}})
)
# Non-dict mentions should be ignored.
#
# Avoid C-S validation as these aren't expected.
with patch(
"synapse.events.validator.EventValidator.validate_new",
new=lambda s, event, config: True,
):
mentions: Any
for mentions in (None, True, False, 1, "foo", []):
self.assertFalse(
self._create_and_process(
bulk_evaluator, {EventContentFields.MENTIONS: mentions}
)
)
# A non-list should be ignored.
for mentions in (None, True, False, 1, "foo", {}):
self.assertFalse(
self._create_and_process(
bulk_evaluator,
{EventContentFields.MENTIONS: {"user_ids": mentions}},
)
)
# The Matrix ID appearing anywhere in the list should notify.
self.assertTrue(
self._create_and_process(
bulk_evaluator,
{EventContentFields.MENTIONS: {"user_ids": [self.alice]}},
)
)
self.assertTrue(
self._create_and_process(
bulk_evaluator,
{
EventContentFields.MENTIONS: {
"user_ids": ["@another:test", self.alice]
}
},
)
)
# Duplicate user IDs should notify.
self.assertTrue(
self._create_and_process(
bulk_evaluator,
{EventContentFields.MENTIONS: {"user_ids": [self.alice, self.alice]}},
)
)
# Invalid entries in the list are ignored.
#
# Avoid C-S validation as these aren't expected.
with patch(
"synapse.events.validator.EventValidator.validate_new",
new=lambda s, event, config: True,
):
self.assertFalse(
self._create_and_process(
bulk_evaluator,
{
EventContentFields.MENTIONS: {
"user_ids": [None, True, False, {}, []]
}
},
)
)
self.assertTrue(
self._create_and_process(
bulk_evaluator,
{
EventContentFields.MENTIONS: {
"user_ids": [None, True, False, {}, [], self.alice]
}
},
)
)
# The legacy push rule should not mention if the mentions field exists.
self.assertFalse(
self._create_and_process(
bulk_evaluator,
{
"body": self.alice,
"msgtype": "m.text",
EventContentFields.MENTIONS: {},
},
)
)
def test_room_mentions(self) -> None:
"""Test the behavior of an event which includes invalid room mentions."""
bulk_evaluator = BulkPushRuleEvaluator(self.hs)
# Room mentions from those without power should not notify.
self.assertFalse(
self._create_and_process(
bulk_evaluator, {EventContentFields.MENTIONS: {"room": True}}
)
)
# Room mentions from those with power should notify.
self.helper.send_state(
self.room_id,
"m.room.power_levels",
{"notifications": {"room": 0}},
self.token,
state_key="",
)
self.assertTrue(
self._create_and_process(
bulk_evaluator, {EventContentFields.MENTIONS: {"room": True}}
)
)
# Invalid data should not notify.
#
# Avoid C-S validation as these aren't expected.
with patch(
"synapse.events.validator.EventValidator.validate_new",
new=lambda s, event, config: True,
):
mentions: Any
for mentions in (None, False, 1, "foo", [], {}):
self.assertFalse(
self._create_and_process(
bulk_evaluator,
{EventContentFields.MENTIONS: {"room": mentions}},
)
)
# The legacy push rule should not mention if the mentions field exists.
self.assertFalse(
self._create_and_process(
bulk_evaluator,
{
"body": "@room",
"msgtype": "m.text",
EventContentFields.MENTIONS: {},
},
)
)
def test_suppress_edits(self) -> None:
"""Under the default push rules, event edits should not generate notifications."""
bulk_evaluator = BulkPushRuleEvaluator(self.hs)
# Create & persist an event to use as the parent of the relation.
event, unpersisted_context = self.get_success(
self.event_creation_handler.create_event(
self.requester,
{
"type": "m.room.message",
"room_id": self.room_id,
"content": {
"msgtype": "m.text",
"body": "helo",
},
"sender": self.alice,
},
)
)
context = self.get_success(unpersisted_context.persist(event))
self.get_success(
self.event_creation_handler.handle_new_client_event(
self.requester, events_and_context=[(event, context)]
)
)
# The edit should not cause a notification.
self.assertFalse(
self._create_and_process(
bulk_evaluator,
{
"body": "Test message",
"m.relates_to": {
"rel_type": RelationTypes.REPLACE,
"event_id": event.event_id,
},
},
)
)
# An edit which is a mention will cause a notification.
self.assertTrue(
self._create_and_process(
bulk_evaluator,
{
"body": "Test message",
"m.relates_to": {
"rel_type": RelationTypes.REPLACE,
"event_id": event.event_id,
},
"m.mentions": {
"user_ids": [self.alice],
},
},
)
)
| [
"[email protected]"
] | |
248c9152bbb8623c6fc0909ddc639ffa604c646b | 99e4d9226e124215aaf66945cfaa5c42d18cc19f | /questionbot/matchableSentence.py | 08fc5ea5a19bf209ddf7989190890511301aaabe | [
"CC0-1.0",
"LicenseRef-scancode-public-domain"
] | permissive | mathieucaroff/oxowlbot | d826423a1a4cca8a38c90383d0a71dbb40052f35 | a10c12b7c94b3e7030cef2f57c567bbd3034c8c9 | refs/heads/master | 2022-04-18T14:06:29.049957 | 2020-04-22T14:44:57 | 2020-04-22T14:44:57 | 255,177,595 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,442 | py | import logging
from typing import List
from .stanza.pword import PWord
symbolMap = {
-6: "<=-",
-5: "<-=",
-4: "<--",
-3: "<=",
-2: "<-",
-1: "<",
0: "==",
1: ">",
2: "->",
3: "=>",
4: "-->",
5: "=->",
6: "-=>",
}
def matchableSentence(wordList: List[PWord]) -> str:
matchableWordList = []
for word in wordList:
matchableWordList.append(matchableWord(word))
return " ".join(matchableWordList)
def matchableWord(word: PWord) -> str:
diff = word.head - int(word.id)
if word.head == 0:
diff = 0
symbol = symbolMap.get(diff)
number = "x"
hintString = ""
pastHint = 0
for piece in word.feats.split("|"):
if piece == "Number=Plur":
number = "2"
if piece == "Number=Sing":
number = "1"
if piece == "VerbForm=Part":
pastHint += 1
if piece == "Tense=Past":
pastHint += 1
if pastHint >= 2:
hintString += "_Hpast"
w = word
upos = w.upos.lower()
feats = w.feats.replace("|", "_F").replace(":", "+")
deprel = w.deprel.replace(':', '+')
result = f":{w.id}_L{w.lemma}_U{upos}_N{number}_R{deprel}{hintString}_F{feats}_{symbol}."
if "." in result[1:-1] or ":" in result[1:-1]:
logging.error(f"bad (:.) matchableWord: {result}")
result = ":" + result.replace(":", "").replace(".", "") + "."
return result
| [
"[email protected]"
] | |
9e367421bb74b17511012b38e47f3fc511540a62 | f98347c036a98c32a0c72c49bf1e298588d48bab | /MyProjectRest/MyProjectRest/settings.py | bdbb19e0337188845c243b6cae3526de63938721 | [] | no_license | ikki2530/django_isfun | 2de26ceb1e3e2a76063dcd602f8c3afa627713cb | 91615c96b2297005ca3a21edc123466ca7d4ae18 | refs/heads/master | 2022-12-26T21:14:29.824341 | 2020-10-16T16:21:43 | 2020-10-16T16:21:43 | 288,185,540 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,181 | py | """
Django settings for MyProjectRest project.
Generated by 'django-admin startproject' using Django 2.2.11.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'nt!@6n=-m_klbe=fg7)g0j2hqefw-pcj9t8vb(yl!g8^h*_(d^'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'api_basic',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'MyProjectRest.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'MyProjectRest.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
] | |
10f1a2beeb2f92dd6f7c12c073707ab12b23578b | 176497ba1cea7233f249a5f439a65f7c472b267f | /11_polls_part_4/02_refactor_using_django_form/feed/forms.py | b6e2d79cd5ed81d818be9a91bfe921cbf89f9fc8 | [] | no_license | CodingNomads/django-web-dev | 79a3a94707489ca0d5f0bf49193b7ffdf6270f4a | e03b8ed130f100afb0296c0d76a84206fbbf789d | refs/heads/master | 2023-05-02T05:12:21.427462 | 2022-11-06T17:56:14 | 2022-11-06T17:56:14 | 235,174,521 | 1 | 7 | null | 2023-04-21T20:54:10 | 2020-01-20T18:53:31 | Python | UTF-8 | Python | false | false | 489 | py | from django import forms
from django.utils.html import strip_tags
from .models import Post
class PostForm(forms.ModelForm):
body = forms.CharField(required=True,
widget=forms.widgets.Textarea(
attrs={
'placeholder': 'Post',
'class': 'form-control'
}))
class Meta:
model = Post
exclude = ('user', )
| [
"[email protected]"
] | |
ed2b24be9e79cc47a29adef832946f1b9008a54f | 3a298c93b67386392d3dee243671f2c101decf01 | /hackerrank/interview-preparation-kit/string-manipulation/02_alternating_characters.py | 4ed83c47964e820fee050e52be5a67ab600cced2 | [] | no_license | Zahidsqldba07/coding-problems-2 | ffbc8408e4408fc846c828af2ec50a9d72e799bc | 020bffbd14ca9993f1e678181ee7df761f1533de | refs/heads/master | 2023-06-26T11:05:34.089697 | 2021-07-21T15:16:10 | 2021-07-21T15:16:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 169 | py | def alternatingCharacters(s):
min_dels = 0
for i in range(1, len(s)):
if s[i] == s[i-1]:
i += 1
min_dels += 1
return min_dels | [
"[email protected]"
] | |
06987b844ae674541272c3184dcb10864d851190 | 1498148e5d0af365cd7fd16197174174a7fa9800 | /t001125.py | fbab733d238bfb3ac8c2b42ba0affa9097b2b6e9 | [] | no_license | feiyanshiren/myAcm | 59a2b80fe7e02787defcb152eee3eae26135322a | 00c7082d5143ddf87aeeafbdb6ce29da46dc8a12 | refs/heads/master | 2023-09-01T12:12:19.866447 | 2023-09-01T09:09:56 | 2023-09-01T09:09:56 | 148,560,672 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 464 | py | def iToR(n):
c = [["","I","II","III","IV","V","VI","VII","VIII","IX"],
["","X","XX","XXX","XL","L","LX","LXX","LXXX","XC"],
["","C","CC","CCC","CD","D","DC","DCC","DCCC","CM"],
["","M","MM","MMM"]]
s = ""
s += c[3][n // 1000 % 10]
s += c[2][n // 100 % 10]
s += c[1][n // 10 % 10]
s += c[0][n % 10]
return s
try:
while 1:
n = int(input())
print(iToR(n))
except:
pass
| [
"[email protected]"
] | |
16e2f39d93d44121207666aaed39b10a375cc842 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /bBExn57vLEsXgHC5m_18.py | 3f968eb28deda866f7bf09e4165adcac6ad9b42e | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 651 | py | """
Create a function that returns `True` if three points belong to the same line,
and `False` otherwise. Each point is represented by a list consisting of an x-
and y-coordinate.
### Examples
same_line([[0, 0], [1, 1], [3, 3]]) ➞ True
same_line([[-2, -1], [2, 1], [0, 0]]) ➞ True
same_line([[-2, 0], [-10, 0], [-8, 0]]) ➞ True
same_line([[0, 0], [1, 1], [1, 2]]) ➞ False
same_line([[3, 4], [3, 5], [6, 6]]) ➞ False
### Notes
Note the special case of a vertical line.
"""
def same_line(lst):
return (lst[1][0]-lst[0][0])*(lst[2][1]-lst[0][1])==(lst[2][0]-lst[0][0])*(lst[1][1]-lst[0][1])
| [
"[email protected]"
] | |
77a7abbd67fc0f5d958444057b77e1fa3518e3fa | eb9c3dac0dca0ecd184df14b1fda62e61cc8c7d7 | /google/monitoring/dashboard/v1/monitoring-dashboard-v1-py/google/monitoring/dashboard_v1/types/xychart.py | d94061b1defd3543e734bee18c04bf35a39253da | [
"Apache-2.0"
] | permissive | Tryweirder/googleapis-gen | 2e5daf46574c3af3d448f1177eaebe809100c346 | 45d8e9377379f9d1d4e166e80415a8c1737f284d | refs/heads/master | 2023-04-05T06:30:04.726589 | 2021-04-13T23:35:20 | 2021-04-13T23:35:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,725 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.monitoring.dashboard_v1.types import metrics
from google.protobuf import duration_pb2 as duration # type: ignore
__protobuf__ = proto.module(
package='google.monitoring.dashboard.v1',
manifest={
'XyChart',
'ChartOptions',
},
)
class XyChart(proto.Message):
r"""A chart that displays data on a 2D (X and Y axes) plane.
Attributes:
data_sets (Sequence[google.monitoring.dashboard_v1.types.XyChart.DataSet]):
Required. The data displayed in this chart.
timeshift_duration (google.protobuf.duration_pb2.Duration):
The duration used to display a comparison
chart. A comparison chart simultaneously shows
values from two similar-length time periods
(e.g., week-over-week metrics).
The duration must be positive, and it can only
be applied to charts with data sets of LINE plot
type.
thresholds (Sequence[google.monitoring.dashboard_v1.types.Threshold]):
Threshold lines drawn horizontally across the
chart.
x_axis (google.monitoring.dashboard_v1.types.XyChart.Axis):
The properties applied to the X axis.
y_axis (google.monitoring.dashboard_v1.types.XyChart.Axis):
The properties applied to the Y axis.
chart_options (google.monitoring.dashboard_v1.types.ChartOptions):
Display options for the chart.
"""
class DataSet(proto.Message):
r"""Groups a time series query definition with charting options.
Attributes:
time_series_query (google.monitoring.dashboard_v1.types.TimeSeriesQuery):
Required. Fields for querying time series
data from the Stackdriver metrics API.
plot_type (google.monitoring.dashboard_v1.types.XyChart.DataSet.PlotType):
How this data should be plotted on the chart.
legend_template (str):
A template string for naming ``TimeSeries`` in the resulting
data set. This should be a string with interpolations of the
form ``${label_name}``, which will resolve to the label's
value.
min_alignment_period (google.protobuf.duration_pb2.Duration):
Optional. The lower bound on data point frequency for this
data set, implemented by specifying the minimum alignment
period to use in a time series query For example, if the
data is published once every 10 minutes, the
``min_alignment_period`` should be at least 10 minutes. It
would not make sense to fetch and align data at one minute
intervals.
"""
class PlotType(proto.Enum):
r"""The types of plotting strategies for data sets."""
PLOT_TYPE_UNSPECIFIED = 0
LINE = 1
STACKED_AREA = 2
STACKED_BAR = 3
HEATMAP = 4
time_series_query = proto.Field(proto.MESSAGE, number=1,
message=metrics.TimeSeriesQuery,
)
plot_type = proto.Field(proto.ENUM, number=2,
enum='XyChart.DataSet.PlotType',
)
legend_template = proto.Field(proto.STRING, number=3)
min_alignment_period = proto.Field(proto.MESSAGE, number=4,
message=duration.Duration,
)
class Axis(proto.Message):
r"""A chart axis.
Attributes:
label (str):
The label of the axis.
scale (google.monitoring.dashboard_v1.types.XyChart.Axis.Scale):
The axis scale. By default, a linear scale is
used.
"""
class Scale(proto.Enum):
r"""Types of scales used in axes."""
SCALE_UNSPECIFIED = 0
LINEAR = 1
LOG10 = 2
label = proto.Field(proto.STRING, number=1)
scale = proto.Field(proto.ENUM, number=2,
enum='XyChart.Axis.Scale',
)
data_sets = proto.RepeatedField(proto.MESSAGE, number=1,
message=DataSet,
)
timeshift_duration = proto.Field(proto.MESSAGE, number=4,
message=duration.Duration,
)
thresholds = proto.RepeatedField(proto.MESSAGE, number=5,
message=metrics.Threshold,
)
x_axis = proto.Field(proto.MESSAGE, number=6,
message=Axis,
)
y_axis = proto.Field(proto.MESSAGE, number=7,
message=Axis,
)
chart_options = proto.Field(proto.MESSAGE, number=8,
message='ChartOptions',
)
class ChartOptions(proto.Message):
r"""Options to control visual rendering of a chart.
Attributes:
mode (google.monitoring.dashboard_v1.types.ChartOptions.Mode):
The chart mode.
"""
class Mode(proto.Enum):
r"""Chart mode options."""
MODE_UNSPECIFIED = 0
COLOR = 1
X_RAY = 2
STATS = 3
mode = proto.Field(proto.ENUM, number=1,
enum=Mode,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
a132f26dbd5f323274528482518a0db067ccaee7 | 24cce1ec7737f9ebb6df3e317a36c0a0329ec664 | /HZMX/amazon_api/wizard/__init__.py | ec88e0a602a1466bc35254ab81c4e14fb0155649 | [] | no_license | tate11/HangZhouMinXing | ab261cb347f317f9bc4a77a145797745e2531029 | 14b7d34af635db015bd3f2c139be1ae6562792f9 | refs/heads/master | 2021-04-12T04:23:20.165503 | 2018-03-14T05:02:05 | 2018-03-14T05:02:05 | 125,855,729 | 1 | 0 | null | 2018-03-19T12:42:07 | 2018-03-19T12:42:07 | null | UTF-8 | Python | false | false | 183 | py | # -*- coding: utf-8 -*-
from . import amazon_wizard
from . import shop_template_wizard
from . import sync_sale_order
from . import stock_adjust
from . import stock_immediate_transfer | [
"1121403085"
] | 1121403085 |
e9f2bec9246fd4ed09f7d97c23f46e5fcf455a55 | 7dd1738268b4ebb721592b02080626799f3c6eed | /tests/test_textparser.py | d32dae4bb5571e0834d0263cf72ec0b74ea70c68 | [
"MIT"
] | permissive | Afsaan/textparser | f721dad1aa8fd36d21274ea4cf5ec5722561fe8c | cc4a85f8b7e6d6be83f5072f45af4a7baf6c35df | refs/heads/master | 2022-04-06T03:27:03.962419 | 2020-01-02T14:51:05 | 2020-01-02T14:51:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 30,626 | py | import unittest
from collections import namedtuple
import textparser
from textparser import Grammar
from textparser import Sequence
from textparser import Choice
from textparser import choice
from textparser import ChoiceDict
from textparser import ZeroOrMore
from textparser import ZeroOrMoreDict
from textparser import OneOrMore
from textparser import OneOrMoreDict
from textparser import DelimitedList
from textparser import Token
from textparser import TokenizeError
from textparser import tokenize_init
from textparser import Any
from textparser import AnyUntil
from textparser import Optional
from textparser import Tag
from textparser import Forward
from textparser import NoMatch
from textparser import Not
from textparser import And
from textparser import markup_line
from textparser import replace_blocks
def tokenize(items, add_eof_token=True):
tokens = []
for item in items:
if len(item) == 2:
token = Token(*item, offset=1)
else:
token = Token(*item)
tokens.append(token)
if add_eof_token:
tokens.append(Token('__EOF__', None, -1))
return tokens
class TextParserTest(unittest.TestCase):
def parse_and_assert_tree(self, grammar, datas):
for tokens, expected_tree in datas:
tree = grammar.parse(tokenize(tokens))
self.assertEqual(tree, expected_tree)
def parse_and_assert_mismatch(self, grammar, datas):
for tokens, line in datas:
tokens = tokenize(tokens)
with self.assertRaises(textparser.GrammarError) as cm:
grammar.parse(tokens)
self.assertEqual(cm.exception.offset, line)
def test_grammar_sequence(self):
grammar = Grammar(Sequence('NUMBER', 'WORD'))
tokens = tokenize([
('NUMBER', '1.45'),
('WORD', 'm')
])
tree = grammar.parse(tokens)
self.assertEqual(tree, ['1.45', 'm'])
def test_grammar_sequence_mismatch(self):
grammar = Grammar(Sequence('NUMBER', 'WORD'))
tokens = tokenize([('NUMBER', '1.45')])
with self.assertRaises(textparser.GrammarError) as cm:
grammar.parse(tokens)
self.assertEqual(cm.exception.offset, -1)
def test_grammar_choice(self):
grammar = Grammar(Choice('NUMBER', 'WORD'))
datas = [
(
[('WORD', 'm')],
'm'
),
(
[('NUMBER', '5')],
'5'
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_choice_mismatch(self):
grammar = Grammar(Choice(Sequence('NUMBER', 'WORD'),
'WORD'))
datas = [
([('NUMBER', '1', 5)], -1),
([('NUMBER', '1', 5), ('NUMBER', '2', 7)], 7)
]
self.parse_and_assert_mismatch(grammar, datas)
def test_grammar_choice_dict(self):
number = Forward()
number <<= Sequence('NUMBER')
grammar = Grammar(ChoiceDict(number,
Tag('foo', Sequence('WORD')),
ChoiceDict('BAR'),
'FIE'))
datas = [
(
[('WORD', 'm')],
('foo', ['m'])
),
(
[('NUMBER', '5')],
['5']
),
(
[('BAR', 'foo')],
'foo'
),
(
[('FIE', 'fum')],
'fum'
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_choice_dict_mismatch(self):
grammar = Grammar(ChoiceDict(Sequence('NUMBER'),
Sequence('WORD')))
tokens = tokenize([(',', ',', 3)])
with self.assertRaises(textparser.Error) as cm:
grammar.parse(tokens)
self.assertEqual(cm.exception.offset, 3)
def test_grammar_choice_dict_init(self):
datas = [
(
('WORD', 'WORD'),
"First token kind must be unique, but WORD isn't."
),
(
('WORD', Sequence('WORD')),
"First token kind must be unique, but WORD isn't."
),
(
(Sequence(Sequence(Optional('WORD'))), ),
"Unsupported pattern type <class 'textparser.Optional'>."
)
]
for grammar, message in datas:
with self.assertRaises(textparser.Error) as cm:
ChoiceDict(*grammar)
self.assertEqual(str(cm.exception), message)
def test_grammar_delimited_list(self):
grammar = Grammar(Sequence(DelimitedList('WORD'), Optional('.')))
datas = [
(
[('WORD', 'foo')],
[['foo'], []]
),
(
[('WORD', 'foo'), (',', ','), ('WORD', 'bar')],
[['foo', 'bar'], []]
),
(
[('WORD', 'foo'), (',', ','), ('WORD', 'bar'), ('.', '.')],
[['foo', 'bar'], ['.']]
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_delimited_list_mismatch(self):
grammar = Grammar(Sequence(DelimitedList('WORD'), Optional('.')))
datas = [
(
[
('WORD', 'foo', 1),
(',', ',', 2)
],
2
),
(
[
('WORD', 'foo', 1),
(',', ',', 2),
('WORD', 'foo', 3),
(',', ',', 4),
('.', '.', 5)
],
4
)
]
self.parse_and_assert_mismatch(grammar, datas)
def test_grammar_zero_or_more(self):
grammar = Grammar(ZeroOrMore('WORD'))
datas = [
(
[],
[]
),
(
[('WORD', 'foo')],
['foo']
),
(
[('WORD', 'foo'), ('WORD', 'bar')],
['foo', 'bar']
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_zero_or_more_partial_element_match(self):
grammar = Grammar(Sequence(
ZeroOrMore(Sequence('WORD', 'NUMBER')), 'WORD'))
datas = [
(
[
('WORD', 'foo'),
('NUMBER', '1'),
('WORD', 'bar'),
('NUMBER', '2'),
('WORD', 'fie')],
[[['foo', '1'], ['bar', '2']], 'fie']
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_zero_or_more_dict(self):
grammar = Grammar(ZeroOrMoreDict(Sequence('WORD', 'NUMBER')))
datas = [
(
[],
{}
),
(
[('WORD', 'foo'), ('NUMBER', '1'),
('WORD', 'bar'), ('NUMBER', '2'),
('WORD', 'foo'), ('NUMBER', '3')],
{
'foo': [['foo', '1'], ['foo', '3']],
'bar': [['bar', '2']]
}
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_one_or_more(self):
grammar = Grammar(OneOrMore('WORD'))
datas = [
(
[('WORD', 'foo')],
['foo']
),
(
[('WORD', 'foo'), ('WORD', 'bar')],
['foo', 'bar']
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_one_or_more_mismatch(self):
grammar = Grammar(OneOrMore('WORD'))
datas = [
(
[]
, -1
),
(
[('NUMBER', 'foo', 2)],
2
)
]
self.parse_and_assert_mismatch(grammar, datas)
def test_grammar_one_or_more_dict(self):
grammar = Grammar(OneOrMoreDict(Sequence('WORD', 'NUMBER')))
datas = [
(
[('WORD', 'foo'), ('NUMBER', '1')],
{
'foo': [['foo', '1']]
}
),
(
[('WORD', 'foo'), ('NUMBER', '1'),
('WORD', 'bar'), ('NUMBER', '2'),
('WORD', 'foo'), ('NUMBER', '3')],
{
'foo': [['foo', '1'], ['foo', '3']],
'bar': [['bar', '2']]
}
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_one_or_more_dict_mismatch(self):
grammar = Grammar(OneOrMoreDict(Sequence('WORD', 'NUMBER')))
datas = [
(
[('WORD', 'foo', 5)],
-1
),
(
[
('WORD', 'foo', 5),
('WORD', 'bar', 6)
],
6
),
(
[
('WORD', 'foo', 5),
('NUMBER', '4', 6),
('WORD', 'bar', 7),
('WORD', 'fie', 8)
],
8
)
]
self.parse_and_assert_mismatch(grammar, datas)
def test_grammar_any(self):
grammar = Grammar(Any())
datas = [
(
[('A', r'a')],
'a'
),
(
[('B', r'b')],
'b'
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_any_until(self):
grammar = Grammar(Sequence(AnyUntil('STRING'), 'STRING'))
datas = [
(
[('NUMBER', '1'),
('WORD', 'a'),
('STRING', '"b"')],
[['1', 'a'], '"b"']
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_any_until_sequence(self):
grammar = Grammar(Sequence(AnyUntil(Sequence('WORD', 'STRING')),
'WORD',
'STRING'))
datas = [
(
[('NUMBER', '1'),
('WORD', 'a'),
('WORD', 'b'),
('STRING', '"b"')],
[['1', 'a'], 'b', '"b"']
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_1(self):
grammar = Grammar(Sequence(
'IF',
choice(Sequence(choice('A', 'B'), 'STRING'),
'STRING'),
'WORD',
choice(
Sequence(
choice(DelimitedList('STRING'), ZeroOrMore('NUMBER')), '.'),
'.')))
datas = [
(
[
('IF', 'IF'),
('STRING', 'foo'),
('WORD', 'bar'),
('.', '.')
],
['IF', 'foo', 'bar', [[], '.']]
),
(
[
('IF', 'IF'),
('STRING', 'foo'),
('WORD', 'bar'),
('NUMBER', '0'),
('NUMBER', '100'),
('.', '.')
],
['IF', 'foo', 'bar', [['0', '100'], '.']]
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_1_mismatch(self):
grammar = Grammar(Sequence(
'IF',
choice(Sequence(choice('A', 'B'), 'STRING'),
'STRING'),
'WORD',
choice(
Sequence(
choice(DelimitedList('STRING'), ZeroOrMore('NUMBER')), '.'),
'.')))
datas = [
(
[
('IF', 'IF', 1),
('STRING', 'foo', 2),
('WORD', 'bar', 3),
(',', ',', 4)
],
4
),
(
[
('IF', 'IF', 1),
('STRING', 'foo', 2),
('.', '.', 3)
],
3
),
(
[
('IF', 'IF', 1),
('NUMBER', '1', 2)
],
2
),
(
[
('IF', 'IF', 1),
('STRING', 'foo', 2),
('WORD', 'bar', 3),
('.', '.', 4),
('.', '.', 5)
],
5
)
]
self.parse_and_assert_mismatch(grammar, datas)
def test_grammar_forward(self):
foo = Forward()
foo <<= Sequence('FOO')
grammar = Grammar(foo)
datas = [
(
[('FOO', 'foo')],
['foo']
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_forward_text(self):
foo = Forward()
foo <<= 'FOO'
grammar = Grammar(foo)
datas = [
(
[('FOO', 'foo')],
'foo'
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_optional(self):
grammar = Grammar(Sequence(Optional('WORD'),
Optional('WORD'),
Optional('NUMBER')))
datas = [
(
[],
[[], [], []]
),
(
[('WORD', 'a')],
[['a'], [], []]
),
(
[('NUMBER', 'c')],
[[], [], ['c']]
),
(
[('WORD', 'a'), ('NUMBER', 'c')],
[['a'], [], ['c']]
),
(
[('WORD', 'a'), ('WORD', 'b'), ('NUMBER', 'c')],
[['a'], ['b'], ['c']]
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_tag(self):
grammar = Grammar(Tag('a',
Tag('b',
choice(Tag('c', 'WORD'),
Tag('d', Optional('NUMBER'))))))
datas = [
(
[('WORD', 'bar')],
('a', ('b', ('c', 'bar')))
),
(
[('NUMBER', '1')],
('a', ('b', ('d', ['1'])))
),
(
[],
('a', ('b', ('d', [])))
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_tag_mismatch(self):
grammar = Grammar(Tag('a', 'WORD'))
datas = [
(
[('NUMBER', 'bar')],
1
)
]
self.parse_and_assert_mismatch(grammar, datas)
def test_grammar_and(self):
grammar = Grammar(Sequence(And('NUMBER'), 'NUMBER'))
datas = [
(
[('NUMBER', '1')],
[[], '1']
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_and_mismatch(self):
grammar = Grammar(Sequence(And('NUMBER'), 'NUMBER'))
datas = [
(
[('WORD', 'foo', 3), ('NUMBER', '1', 4)],
3
)
]
self.parse_and_assert_mismatch(grammar, datas)
def test_grammar_not(self):
grammar = Grammar(Sequence(Not('WORD'), 'NUMBER'))
datas = [
(
[('NUMBER', '1')],
[[], '1']
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_not_mismatch(self):
grammar = Grammar(Sequence(Not('WORD'), 'NUMBER'))
datas = [
(
[('WORD', 'foo', 3), ('NUMBER', '1', 4)],
3
)
]
self.parse_and_assert_mismatch(grammar, datas)
def test_grammar_no_match(self):
grammar = Grammar(NoMatch())
datas = [
(
[('NUMBER', '1', 3)],
3
),
(
[('WORD', 'foo', 3)],
3
)
]
self.parse_and_assert_mismatch(grammar, datas)
def test_parse_start_and_end_of_file(self):
class Parser(textparser.Parser):
def grammar(self):
return Sequence('__SOF__', '__EOF__')
self.assertEqual(Parser().parse('', match_sof=True),
['__SOF__', '__EOF__'])
def test_parse_start_of_file_mismatch(self):
class Parser(textparser.Parser):
def grammar(self):
return Sequence('__EOF__')
with self.assertRaises(textparser.ParseError) as cm:
Parser().parse('123', match_sof=True)
self.assertEqual(str(cm.exception),
'Invalid syntax at line 1, column 1: ">>!<<123"')
def test_parse_end_of_file(self):
class Parser(textparser.Parser):
def grammar(self):
return '__EOF__'
self.assertEqual(Parser().parse('', match_sof=False), '__EOF__')
def test_grammar_none(self):
class AnyAsNone(textparser.Pattern):
def match(self, tokens):
tokens.get_value()
return None
grammar = Grammar(AnyAsNone())
datas = [
(
[('NUMBER', '1')],
None
)
]
self.parse_and_assert_tree(grammar, datas)
def test_grammar_error(self):
grammar = Grammar(NoMatch())
datas = [
[('NUMBER', '1', 3)],
[('WORD', 'foo', 3)]
]
for tokens in datas:
tokens = tokenize(tokens)
with self.assertRaises(textparser.GrammarError) as cm:
grammar.parse(tokens)
self.assertEqual(cm.exception.offset, 3)
self.assertEqual(str(cm.exception),
'Invalid syntax at offset 3.')
def test_tokenize_error(self):
datas = [
(2, 'hej', 'Invalid syntax at line 1, column 3: "he>>!<<j"'),
(0, 'a\nb\n', 'Invalid syntax at line 1, column 1: ">>!<<a"'),
(1, 'a\nb\n', 'Invalid syntax at line 1, column 2: "a>>!<<"'),
(2, 'a\nb\n', 'Invalid syntax at line 2, column 1: ">>!<<b"')
]
for offset, text, message in datas:
with self.assertRaises(TokenizeError) as cm:
raise TokenizeError(text, offset)
self.assertEqual(cm.exception.text, text)
self.assertEqual(cm.exception.offset, offset)
self.assertEqual(str(cm.exception), message)
def test_create_token_re(self):
datas = [
(
[('A', r'a')],
'(?P<A>a)'
),
(
[('A', r'b'), ('C', r'd')],
'(?P<A>b)|(?P<C>d)'
)
]
for spec, expected_re_token in datas:
tokens, re_token = tokenize_init(spec)
self.assertEqual(tokens,
[Token(kind='__SOF__', value='__SOF__', offset=0)])
self.assertEqual(re_token, expected_re_token)
def test_parser(self):
class Parser(textparser.Parser):
def keywords(self):
return set([
'IF',
'A',
'B'
])
def token_specs(self):
return [
('SKIP', r'[ \r\n\t]+'),
('NUMBER', r'-?\d+(\.\d+)?([eE][+-]?\d+)?'),
('DOT', '.', r'\.'),
('WORD', r'[A-Za-z0-9_]+'),
('ESCAPED_STRING', r'"(\\"|[^"])*?"'),
('MISMATCH', r'.')
]
def grammar(self):
return Sequence(
'IF',
Optional(choice('A', 'B')),
'ESCAPED_STRING',
'WORD',
Optional(choice(DelimitedList('ESCAPED_STRING'),
ZeroOrMore('NUMBER'))),
'.')
datas = [
(
'IF "foo" bar .',
['IF', [], '"foo"', 'bar', [[]], '.'],
[
Token(kind='IF', value='IF', offset=0),
[],
Token(kind='ESCAPED_STRING', value='"foo"', offset=3),
Token(kind='WORD', value='bar', offset=9),
[[]],
Token(kind='.', value='.', offset=13)
]
),
(
'IF B "" b 1 2 .',
['IF', ['B'], '""', 'b', [['1', '2']], '.'],
[
Token(kind='IF', value='IF', offset=0),
[
Token(kind='B', value='B', offset=3)
],
Token(kind='ESCAPED_STRING', value='""', offset=5),
Token(kind='WORD', value='b', offset=8),
[
[
Token(kind='NUMBER', value='1', offset=10),
Token(kind='NUMBER', value='2', offset=12)
]
],
Token(kind='.', value='.', offset=14)
]
)
]
for text, expected_tree, expected_token_tree in datas:
tree = Parser().parse(text)
self.assertEqual(tree, expected_tree)
tree = Parser().parse(text, token_tree=True)
self.assertEqual(tree, expected_token_tree)
def test_parser_default_keywords(self):
class Parser(textparser.Parser):
def token_specs(self):
return [
('SKIP', r'[ \r\n\t]+'),
('NUMBER', r'-?\d+(\.\d+)?([eE][+-]?\d+)?'),
('DOT', '.', r'\.'),
('WORD', r'[A-Za-z0-9_]+'),
('ESCAPED_STRING', r'"(\\"|[^"])*?"'),
('MISMATCH', r'.')
]
def grammar(self):
return Sequence(
'WORD',
Optional('WORD'),
'ESCAPED_STRING',
'WORD',
Optional(choice(DelimitedList('ESCAPED_STRING'),
ZeroOrMore('NUMBER'))),
'.')
datas = [
(
'IF "foo" bar .',
['IF', [], '"foo"', 'bar', [[]], '.'],
[
Token(kind='WORD', value='IF', offset=0),
[],
Token(kind='ESCAPED_STRING', value='"foo"', offset=3),
Token(kind='WORD', value='bar', offset=9),
[[]],
Token(kind='.', value='.', offset=13)
]
),
(
'IF B "" b 1 2 .',
['IF', ['B'], '""', 'b', [['1', '2']], '.'],
[
Token(kind='WORD', value='IF', offset=0),
[
Token(kind='WORD', value='B', offset=3)
],
Token(kind='ESCAPED_STRING', value='""', offset=5),
Token(kind='WORD', value='b', offset=8),
[
[
Token(kind='NUMBER', value='1', offset=10),
Token(kind='NUMBER', value='2', offset=12)
]
],
Token(kind='.', value='.', offset=14)
]
)
]
for text, expected_tree, expected_token_tree in datas:
tree = Parser().parse(text)
self.assertEqual(tree, expected_tree)
tree = Parser().parse(text, token_tree=True)
self.assertEqual(tree, expected_token_tree)
def test_parser_bare(self):
class Parser(textparser.Parser):
pass
with self.assertRaises(NotImplementedError) as cm:
Parser().parse('foo')
self.assertEqual(str(cm.exception), 'No grammar defined.')
def test_parser_default_token_specs(self):
class Parser(textparser.Parser):
def grammar(self):
return 'WORD'
tree = Parser().parse('foo')
self.assertEqual(tree, 'foo')
def test_parser_tokenize_mismatch(self):
class Parser(textparser.Parser):
def token_specs(self):
return [
('SKIP', r'[ \r\n\t]+'),
('NUMBER', r'-?\d+(\.\d+)?([eE][+-]?\d+)?'),
('MISMATCH', r'.')
]
def grammar(self):
return Grammar('NUMBER')
with self.assertRaises(textparser.ParseError) as cm:
Parser().parse('12\n34foo\n789')
self.assertEqual(cm.exception.offset, 5)
self.assertEqual(cm.exception.line, 2)
self.assertEqual(cm.exception.column, 3)
self.assertEqual(str(cm.exception),
'Invalid syntax at line 2, column 3: "34>>!<<foo"')
def test_parser_grammar_mismatch(self):
class Parser(textparser.Parser):
def tokenize(self, _text):
return tokenize([
('NUMBER', '1.45', 0),
('NUMBER', '2', 5)
])
def grammar(self):
return Sequence('NUMBER', 'WORD')
with self.assertRaises(textparser.ParseError) as cm:
Parser().parse('1.45 2')
self.assertEqual(cm.exception.offset, 5)
self.assertEqual(cm.exception.line, 1)
self.assertEqual(cm.exception.column, 6)
self.assertEqual(str(cm.exception),
'Invalid syntax at line 1, column 6: "1.45 >>!<<2"')
def test_parser_grammar_mismatch_choice_max(self):
class Parser(textparser.Parser):
def __init__(self, tokens):
self._tokens = tokens
def tokenize(self, _text):
return tokenize(self._tokens, add_eof_token=False)
def grammar(self):
return Choice(Sequence('NUMBER', 'WORD'),
'WORD')
Data = namedtuple('Data',
[
'text',
'tokens',
'offset',
'line',
'column',
'message',
])
datas = [
Data(
text='1.45',
tokens=[
('NUMBER', '1.45', 0)
],
offset=4,
line=1,
column=5,
message='Invalid syntax at line 1, column 5: "1.45>>!<<"'
),
Data(
text='1.45 2',
tokens=[
('NUMBER', '1.45', 0),
('NUMBER', '2', 5)
],
offset=5,
line=1,
column=6,
message='Invalid syntax at line 1, column 6: "1.45 >>!<<2"'
)
]
for text, tokens, offset, line, column, message in datas:
with self.assertRaises(textparser.ParseError) as cm:
Parser(tokens).parse(text)
self.assertEqual(cm.exception.offset, offset)
self.assertEqual(cm.exception.line, line)
self.assertEqual(cm.exception.column, column)
self.assertEqual(str(cm.exception), message)
def test_parse_error(self):
class Parser(textparser.Parser):
def tokenize(self, text):
raise TokenizeError(text, 5)
def grammar(self):
return Grammar(Sequence('NUMBER', 'WORD'))
with self.assertRaises(textparser.ParseError) as cm:
Parser().parse('12\n3456\n789')
self.assertEqual(cm.exception.text, '12\n3456\n789')
self.assertEqual(cm.exception.offset, 5)
self.assertEqual(cm.exception.line, 2)
self.assertEqual(cm.exception.column, 3)
self.assertEqual(str(cm.exception),
'Invalid syntax at line 2, column 3: "34>>!<<56"')
def test_markup_line(self):
datas = [
(0, '>>!<<0', None),
(1, '0>>!<<', None),
(2, '>>!<<1234', None),
(4, '12>>!<<34', None),
(6, '1234>>!<<', None),
(7, '>>!<<56', None),
(8, '5>>!<<6', None),
(9, '56>>!<<', None),
(3, '1x234', 'x')
]
for offset, line, marker in datas:
if marker is None:
text = markup_line('0\n1234\n56', offset)
else:
text = markup_line('0\n1234\n56',
offset,
marker=marker)
self.assertEqual(text, line)
def test_replace_blocks(self):
datas = [
('{}', '{}'),
('{{}}', '{ }'),
('{{\n} xxx {}}', '{ \n }'),
('1{a\n}2{b}3', '1{ \n}2{ }3')
]
for old, expected in datas:
new = replace_blocks(old)
self.assertEqual(new, expected)
def test_replace_blocks_start_end(self):
datas = [
('1[a]2[b]3', '1[ ]2[ ]3', '[', ']'),
('1{a}2{b}3', '1{ }2{ }3', '{', '}'),
('1(a)2(b)3', '1( )2( )3', '(', ')'),
('1((a))2((b))3', '1(( ))2(( ))3', '((', '))')
]
for old, expected, start, end in datas:
new = replace_blocks(old, start, end)
self.assertEqual(new, expected)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.