blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
281
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 6
116
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 313
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 18.2k
668M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
38.2k
| gha_license_id
stringclasses 17
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 107
values | src_encoding
stringclasses 20
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 4
6.02M
| extension
stringclasses 78
values | content
stringlengths 2
6.02M
| authors
listlengths 1
1
| author
stringlengths 0
175
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4813c7d7b4d892a34cc126da8ba1f3ffa6323048 | 43137cc9b223b0ca13c547f8557e3cbc50de55f2 | /day07_函数基础/05-全局变量和局部变量.py | cd7428d318ddb32e29f73435f7a4576e42eb34a1 | []
| no_license | WeiPromise/study | fe60e0e89d03f640d21d7cc19d96f12677c6c6ed | 0b61bcdb98e869e6cba0c942ff9ee69868d8bf12 | refs/heads/master | 2023-01-08T17:23:43.554984 | 2020-10-23T07:08:32 | 2020-10-23T07:08:32 | 286,971,568 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 854 | py | #!/usr/bin/env python3.5
# encoding: utf-8
# Created by leiwei on 2020/8/18 15:39
a = 100 # 全局变量,整个python文件都可以使用
word = 'hello'
def my_test():
x = 'hello' # 局部变量,只能在函数体内可以用
print('x = {}'.format(x))
a = 10 # 如果局部变量名和全局变量同名,会在函数内部又定义一个新的局部变量
print('函数内部a = {}'.format(a))
# 通过global 对变量进行申明,可以通过函数修改全局变量函数,也可以申明全局变量
global word
word = 'ok'
global b
b = 'hello'
print('locals = {},globals = {}'.format(locals(),globals()))
my_test()
print('函数外部a = {},word = {},b = {}'.format(a,word,b))
# 只有函数能分割作用域
# 全局变量,这种最好别用
# if 3 > 2:
# m = 'hi'
# print(m) | [
"[email protected]"
]
| |
278be94dc86a4923595fc1db156514e63a55f1c3 | 9abc2f4fbf1b31b5a56507437b4a8d9c3f3db7e6 | /deals/migrations/0001_initial.py | 4a8460e46f0e4b39cc2b66694382c60ac4a670ac | []
| no_license | odbalogun/ticketr | e9fe8461d66dabe395f0e1af8fbecc67dbb16e97 | 94f24c82f407f861f1614a151feb3fdd62b283e5 | refs/heads/master | 2022-11-30T22:40:30.931160 | 2019-08-09T14:34:38 | 2019-08-09T14:34:38 | 188,833,600 | 0 | 0 | null | 2022-11-22T03:50:30 | 2019-05-27T11:50:07 | Python | UTF-8 | Python | false | false | 3,263 | py | # Generated by Django 2.2.1 on 2019-05-06 23:17
import deals.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Categories',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('deleted', models.DateTimeField(editable=False, null=True)),
('name', models.CharField(max_length=100, unique=True, verbose_name='name')),
('slug', models.SlugField(max_length=100, unique=True, verbose_name='slug')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='DealCategories',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('deleted', models.DateTimeField(editable=False, null=True)),
('price', models.FloatField(verbose_name='price')),
('description', models.TextField(verbose_name='description')),
('image', models.ImageField(upload_to=deals.models.deals_image_path, verbose_name='image')),
('quantity', models.IntegerField(null=True, verbose_name='quantity')),
('available_quantity', models.IntegerField(null=True, verbose_name='available quantity')),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='deals.Categories')),
],
),
migrations.CreateModel(
name='Deals',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('deleted', models.DateTimeField(editable=False, null=True)),
('name', models.CharField(max_length=100, unique=True, verbose_name='name')),
('slug', models.SlugField(max_length=100, unique=True, verbose_name='slug')),
('description', models.TextField(verbose_name='description')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='created at')),
('expiry_date', models.DateField(null=True, verbose_name='expiry date')),
('is_active', models.BooleanField(default=True, verbose_name='is active')),
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('options', models.ManyToManyField(to='deals.DealCategories')),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='dealcategories',
name='deal',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='deals.Deals'),
),
migrations.AlterUniqueTogether(
name='dealcategories',
unique_together={('category', 'deal')},
),
]
| [
"[email protected]"
]
| |
04a2fa5b79e53367d1fa702e2c9297adc459942f | 16f9faf6665f5189a8561534bb4bd8b0951ba1aa | /codes/metrics/__init__.py | d2cda599af5afa1f5e55bab4d4b114afd37eab3e | []
| no_license | azuryl/LPTN | 4b36dba2a7f5b2bcc7dc35ac3734839054069ca2 | a1b2db50117a842abc1f44d805291032651014ab | refs/heads/main | 2023-07-01T02:59:17.916730 | 2021-08-12T19:49:46 | 2021-08-12T19:49:46 | 395,425,328 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 102 | py | from .psnr_ssim import calculate_psnr, calculate_ssim
__all__ = ['calculate_psnr', 'calculate_ssim']
| [
"[email protected]"
]
| |
62de88d56a77477d8991a96a5087929d2d3d2770 | 55d6de252e61c4b60688ebd8b1f637807acc1e7c | /usl_recived_forigin_purchased/models/inharitstockpicking.py | 0eebab826a9be7c89947980bd5f2d26cbf056f25 | []
| no_license | mosadiqit/eerna_erp_uslbd | b707a1d49a4fce7c1543b63e0120e8f9b77b26ce | 73e3994a9e32df7809d244eb6592513162ab7853 | refs/heads/main | 2023-06-30T14:53:04.837197 | 2021-08-04T11:30:46 | 2021-08-04T11:30:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,242 | py | from odoo import models, fields, api, _
from odoo.exceptions import UserError, ValidationError
from odoo.osv.osv import osv
from odoo.tools.float_utils import float_compare, float_is_zero, float_round
class StockPickingInharit(models.Model):
_inherit = 'stock.picking'
@api.onchange('commercial_invoice')
def onchange_commercial_invoice(self):
if self.commercial_invoice:
move_id = self.env['account.move'].search([('id','=',self.commercial_invoice.id)])
move_line_id = self.env['account.move.line'].search([('move_id','=',move_id.id),('account_internal_type','=','other')])
for rec in self:
lines = list()
for line in move_line_id:
vals = {
'product_id':line.product_id.id,
'branch_id':self.env.user.branch_id.id,
'product_uom_qty':line.quantity,
'reserved_availability':0,
'quantity_done':0,
'name':line.name,
'product_uom':line.product_id.uom_id.id
}
lines.append((0,0,vals))
rec.move_ids_without_package = lines
print('Hello')
def button_validate(self):
self.ensure_one()
if not self.move_lines and not self.move_line_ids:
raise UserError(_('Please add some items to move.'))
# Clean-up the context key at validation to avoid forcing the creation of immediate
# transfers.
# for rec in self.move_line_ids_without_package.lot_id:
# stock_reserved_check = self.env['stock.quant'].search([('lot_id','=',rec.id),('location_id','=',self.location_id.id)])
# if stock_reserved_check.reserved_quantity == 0:
# print(rec)
ctx = dict(self.env.context)
ctx.pop('default_immediate_transfer', None)
self = self.with_context(ctx)
# add user as a follower
self.message_subscribe([self.env.user.partner_id.id])
# If no lots when needed, raise error
picking_type = self.picking_type_id
precision_digits = self.env['decimal.precision'].precision_get('Product Unit of Measure')
no_quantities_done = all(float_is_zero(move_line.qty_done, precision_digits=precision_digits) for move_line in self.move_line_ids.filtered(lambda m: m.state not in ('done', 'cancel')))
no_reserved_quantities = all(float_is_zero(move_line.product_qty, precision_rounding=move_line.product_uom_id.rounding) for move_line in self.move_line_ids)
if no_reserved_quantities and no_quantities_done:
raise UserError(_('You cannot validate a transfer if no quantites are reserved nor done. To force the transfer, switch in edit more and encode the done quantities.'))
if picking_type.use_create_lots or picking_type.use_existing_lots:
lines_to_check = self.move_line_ids
if not no_quantities_done:
lines_to_check = lines_to_check.filtered(
lambda line: float_compare(line.qty_done, 0,
precision_rounding=line.product_uom_id.rounding)
)
for line in lines_to_check:
product = line.product_id
if product and product.tracking != 'none':
if not line.lot_name and not line.lot_id:
raise UserError(_('You need to supply a Lot/Serial number for product %s.') % product.display_name)
# Propose to use the sms mechanism the first time a delivery
# picking is validated. Whatever the user's decision (use it or not),
# the method button_validate is called again (except if it's cancel),
# so the checks are made twice in that case, but the flow is not broken
sms_confirmation = self._check_sms_confirmation_popup()
if sms_confirmation:
return sms_confirmation
if no_quantities_done:
view = self.env.ref('stock.view_immediate_transfer')
wiz = self.env['stock.immediate.transfer'].create({'pick_ids': [(4, self.id)]})
return {
'name': _('Immediate Transfer?'),
'type': 'ir.actions.act_window',
'view_mode': 'form',
'res_model': 'stock.immediate.transfer',
'views': [(view.id, 'form')],
'view_id': view.id,
'target': 'new',
'res_id': wiz.id,
'context': self.env.context,
}
if self._get_overprocessed_stock_moves() and not self._context.get('skip_overprocessed_check'):
view = self.env.ref('stock.view_overprocessed_transfer')
wiz = self.env['stock.overprocessed.transfer'].create({'picking_id': self.id})
return {
'type': 'ir.actions.act_window',
'view_mode': 'form',
'res_model': 'stock.overprocessed.transfer',
'views': [(view.id, 'form')],
'view_id': view.id,
'target': 'new',
'res_id': wiz.id,
'context': self.env.context,
}
# Check backorder should check for other barcodes
if self._check_backorder():
return self.action_generate_backorder_wizard()
self.action_done()
return
@api.onchange('is_nonsalealewarehouse_transfar')
def select_nonsale_ale_stock(self):
"""
this method is used for transfar page when select lim transfar then it show only lim transfar
:return:
"""
self.branch_id = self.env.user.branch_id
if self.is_nonsalealewarehouse_transfar:
self.is_nonsalealewarehouse_transfar = True
print('come to condition is_nonsalealewarehouse_transfar')
warehouse = self.env['stock.warehouse'].sudo().search([('is_non_saleable_warehouse', '=', True),('company_id', '=',self.env.user.company_id.id)], limit=1)
print(warehouse.id)
picking_type = self.env['stock.picking.type'].sudo().search(
[('warehouse_id', '=', warehouse.id), ('sequence_code', '=', 'INT')])
print(picking_type)
print(picking_type.warehouse_id.name)
self.picking_type_id = picking_type.id
return {
'domain': {
'picking_type_id': [('warehouse_id', '=', warehouse.id), ('sequence_code', '=', 'INT')]
},
# 'default_picking_type_id': [('warehouse_id', '=', warehouse.id), ('sequence_code', '=', 'INT')]
# lambda self: self.env['stock.picking.type'].browse(self._context.get('default_picking_type_id')).default_location_src_id
}
else:
return {
'domain': {
'picking_type_id': []
}
}
# def _do_partial_func_unreserved(self):
# print('_do_partial_unreserved')
# @api.onchange('fpo_order_id')
# def fpo_fall_into(self):
# print('work')
is_nonsalealewarehouse_transfar = fields.Boolean(string='Lim transfar ', default=False)
commercial_invoice = fields.Many2one('account.move',domain=[('type','=','in_invoice')],string="Commercial Invoice")
def action_assign(self):
""" Check availability of picking moves.
This has the effect of changing the state and reserve quants on available moves, and may
also impact the state of the picking as it is computed based on move's states.
@return: True
"""
res = {}
self.filtered(lambda picking: picking.state == 'draft').action_confirm()
moves = self.mapped('move_lines').filtered(lambda move: move.state not in ('draft', 'cancel', 'done'))
if not moves:
raise UserError(_('Nothing to check the availability for.'))
# If a package level is done when confirmed its location can be different than where it will be reserved.
# So we remove the move lines created when confirmed to set quantity done to the new reserved ones.
package_level_done = self.mapped('package_level_ids').filtered(
lambda pl: pl.is_done and pl.state == 'confirmed')
package_level_done.write({'is_done': False})
is_raise_validation_error = moves._action_assign()
package_level_done.write({'is_done': True})
if is_raise_validation_error:
# message = 'product is no available '
# raise osv.except_osv(_('warning'), _(message))
# res['warning'] = {'title': _('Warning'), 'message': message}
# raise ValueError('product not available')
raise ValidationError('product is no available ')
return True
# fpo_order_id = fields.Many2one('foreign.purchase.order', string= 'Foreign purchase order ')
# @api.onchange('move_ids_without_package.product_uom_qty')
# # def test(self):
# # print('***********************')
# # print('***********************')
# # print('***********************')
| [
"[email protected]"
]
| |
7349470c11f971143cef89c6ff43f9c2f26917ed | faf9e9119deece79c97587935518965d036ab889 | /mysql2sqlite.py | d9ad1f186af6825883182b2cb0ca3ba167571138 | []
| no_license | hahanonym/mysql2sqlite | 5092ceb99a2c7d71559caa5d385e4af5322c1eff | b37f1e2ff0d96ddf2d4623679c785328171eb6a1 | refs/heads/master | 2021-05-27T11:23:23.631364 | 2009-08-03T02:04:51 | 2009-08-03T02:04:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,492 | py |
from optparse import OptionParser
import re
import os
__doc__ = """
This is a script that will convert a MySQL schema generated by say MySQLWorkbench into a schema that can be used for sqlite.
"""
__author__ = 'Jimmy John'
__version__ = '1.0'
ignore_list = [
r'SET @.*?\=@@(?P<idv>.*?), (?P=idv)\=.*?\;',
r'CREATE SCHEMA IF NOT EXISTS.*?DEFAULT CHARACTER SET.*?\;',
r'USE \`.*?\`;',
r'SET .*?@.*?\;',
r'INDEX .*?',
r'ON DELETE NO ACTION,',
r'ON DELETE NO ACTION,',
]
modify_list = [
(r'CREATE TABLE IF NOT EXISTS (?P<idv>.*?\.).*?\(', ''),
(r'REFERENCES (?P<schema>.*?\.).*?', ''),
(r'(?P<engine>ENGINE = .*?)\;', ''),
(r'(?P<idv>ON DELETE NO ACTION)', ''),
(r'(?P<idv>ON UPDATE NO ACTION)',''),
]
ignore_list_compiled = []
modify_list_compiled = []
for ele in ignore_list:
ignore_list_compiled.append(re.compile(ele, re.I))
for ele in modify_list:
modify_list_compiled.append((re.compile(ele[0], re.I), ele[1]))
# ------------------------------
def translate(line, handle_c):
"""
accepts a line of input and matches it with regular expressions. If they match any, the line is discarded.
"""
match = False
for regexobj in ignore_list_compiled:
if regexobj.match(line):
match = True
if match:
handle_c.write('IGNORED:\n\t %s\n' % line)
return ''
for regextuple in modify_list_compiled:
try:
schema = regextuple[0].match(line).group(1)
except:
continue
handle_c.write('MODIFIED:\n\t %s => %s\n' % (line, re.sub(schema, regextuple[1], line)))
return '\n%s' % re.sub(schema, regextuple[1], line)
return '\n%s' % line
# -------------------
def process(options):
"""
iterates through each line in the input file and ignores, deletes (if not applicable to sqlite) or translates into valid sqlite
"""
handle_s = open(options.source, 'r')
handle_d = open(options.destination, 'w')
handle_c = open(options.changes, 'w')
for line in handle_s:
handle_d.write('%s' % translate(line.strip(), handle_c))
handle_c.close()
handle_d.close()
handle_s.close()
# ----------
def main():
"""
accept the cmd line args and do necessary error checking
"""
#read in args
usage = 'usage: %prog [options]'
parser = OptionParser(usage)
parser.add_option("-s", "--source", dest="source", default="mysql_schema_mysql2sqlite.txt",
help="name of file containing the MySQL schema as exported by MySQLWorkbench [default: %default]")
parser.add_option("-d", "--dest", dest="destination", default="sqlite_schema_mysql2sqlite.txt",
help="name of target file to contain the sql schema that can be used in Sqlite [default: %default]")
parser.add_option("-c", "--change", dest="changes", default="changes_schema_mysql2sqlite.txt",
help="differences b/w the two schemas. Lines from source can be IGNORED or MODIFIED [default: %default]")
(options, args) = parser.parse_args()
#check if i/p file exists
if not os.path.exists(options.source):
parser.error('input filename %s not found' % options.source)
process(options)
# --------------------------
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
9866afdf799b0295565428d97fc68db05e20462b | c5631592cf0d4512f0c6807d1c65613decc8e9de | /python/assign25.py | 1d7c01df5c58be33fc625b507df0d7e41e0d24d5 | []
| no_license | Iswaria-A/ishu | f676b5926a087721519cf5f5a7ac62a28b050a7c | b6139b060c046dd7df66e34403d4dd9889c9aa63 | refs/heads/master | 2022-10-28T20:40:48.745638 | 2019-12-13T13:25:01 | 2019-12-13T13:25:01 | 227,839,123 | 0 | 2 | null | 2022-10-11T22:49:12 | 2019-12-13T12:48:43 | JavaScript | UTF-8 | Python | false | false | 72 | py | f=open("text.txt","r")
print(f.readline())
print(f.readline())
f.close() | [
"[email protected]"
]
| |
75f85c94fa15463111f270dbb6aaaac6ab4a7186 | 257564cbf0f0482428e029c9129b1fb3688aabab | /personal/views.py | 1c21b7e7f6eea830ff4e12c8b18c508be2462b4e | []
| no_license | ash018/mysite | d3c1516c66a27057b90911ec641ad0344edf25cd | 635872b7870baf6ac70415d0607eecbfe20c0fdf | refs/heads/master | 2020-09-15T22:41:26.750365 | 2016-09-23T08:48:15 | 2016-09-23T08:48:15 | 67,899,564 | 0 | 0 | null | 2016-09-10T23:36:16 | 2016-09-10T23:28:08 | Python | UTF-8 | Python | false | false | 311 | py | from django.shortcuts import render
from django.http import HttpResponse
def index(request):
return render(request,'personal/home.html')
def contact(request):
return render(request,'personal/basic.html',{'content':['If you like to contact call me @ 01681355216 Or mail me @ [email protected]']})
| [
"[email protected]"
]
| |
ea8907a72872f8e0b774d3c2247d83298774e8e9 | 239ac24a9ae640e7fc103c06995c84259b1f175a | /BACKEND/flask_server/app.py | 162b02688d5e7ddeaac178d8b207abb17501d6ad | []
| no_license | Rahul140799/SIH2020 | 268838feb3708ba561a0d141e6326afb83e2aaeb | ce8c56b1813fcc7b0db152638147d02c052d0630 | refs/heads/master | 2023-04-15T20:19:18.611375 | 2021-04-11T13:36:00 | 2021-04-11T13:36:00 | 356,876,943 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,303 | py | from __future__ import print_function
from flask import Flask, request, jsonify
import random
import string
from flask_socketio import SocketIO
import sys
import spacy
from spacy.matcher import PhraseMatcher
import en_core_med7_lg
async_mode = None
med7 = en_core_med7_lg.load()
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app, async_mode=async_mode, ping_timeout=10000)
nlp = spacy.load('./medOP')
#helper functions
def randomString(stringLength=8):
letters = string.ascii_lowercase
return ''.join(random.choice(letters) for i in range(stringLength))
def handle_disconnect():
print('disconnected', file=sys.stderr)
@socketio.on('connect')
def handle_message():
print('received socket', file=sys.stderr)
socketio.emit('message', {'data': '12'})
socketio.on_event('disconnect', handle_disconnect)
return "Connected!!!"
def get_medicine_details(text):
prescription={}
doc = med7(text)
for ent in doc.ents:
prescription[ent.label_]=ent.text
return prescription
def get_final_prescription(response,transcript):
final_response={'medicines':[]}
for index,item in enumerate(response):
detailed_medicine={}
if ((index+1)>=len(response)):
end=len(transcript)
else:
end=response[index+1][2]
start=item[2]
medicine=item[0]
print("Test 3",transcript[start:end])
result=get_medicine_details(transcript[start:end])
detailed_medicine['medicine']=medicine
detailed_medicine['dosage']=result['DOSAGE'] if (result.get('DOSAGE')!=None) else ''
detailed_medicine['strength']=result['STRENGTH'] if (result.get('STRENGTH')!=None) else ''
detailed_medicine['form']=result['FORM'] if (result.get('FORM')!=None) else ''
detailed_medicine['route']=result['ROUTE'] if (result.get('ROUTE')!=None) else ''
detailed_medicine['frequency']=result['FREQUENCY'] if (result.get('FREQUENCY')!=None) else ''
detailed_medicine['duration']=result['DURATION'] if (result.get('DURATION')!=None) else ''
detailed_medicine['onone'] = onoone(detailed_medicine['frequency'])
print("Test 4",final_response)
print("Test 5",detailed_medicine)
print("Test 6",onoone(detailed_medicine['frequency']))
final_response['medicines'].append(detailed_medicine)
return final_response
def onoone(text):
x=[]
t=(text.find('morning'), text.find('afternoon'),text.find('night'))
for i in t:
if i>=0:
x.append("1")
else:
x.append("0")
return "-".join(x)
#API routes
@app.route('/')
def home():
print('BASE URL API ', file=sys.stderr)
return jsonify({"message": "BASE FLASK URL"})
@app.route('/api/test/<message>')
def model(message):
socketio.emit(message, {'data': 42})
return jsonify({"message": "running socket to emit message"})
@app.route('/api/model/process', methods=['POST'])
def modelProcess():
data = request.json
print("DATA", data)
socketId = data['doctor']['filename'][:-5]
print(socketId, file=sys.stderr)
socketio.emit('message', data)
data = request.json
transcript = data['doctor']['doc']['text']
doc=nlp(transcript)
preReady = [(ent.text, ent.label_,ent.start_char, ent.end_char) for ent in doc.ents]
print("Test",preReady)
socketio.emit('message', preReady)
resp = get_final_prescription(preReady,transcript)
print("Test2",resp)
socketio.emit('message', resp)
socketio.emit(socketId, resp)
return jsonify({
"message": "running socket to emit message",
"sockId": socketId
})
# def find_symptoms(data):
# print("Entered FIND SYMPTOMS")
# doc = nlp(data)
# response = {
# 'message': "This is your response"
# }
# symptoms = " "
# intensity = " "
# entities = [(ent.text, ent.label_) for ent in doc.ents]
# for item in entities:
# if item[1] == 'SYMPTOMS':
# symptoms += item[0] + ', '
# if item[1] == 'INTENSITY':
# intensity += item[0] + ', '
# if symptoms.strip:
# response['symptoms'] = symptoms[:-2].strip()
# if intensity.strip():
# response['intensity'] = intensity[:-2].strip()
# return response
if __name__ == "__main__":
socketio.run(app, host='0.0.0.0', port=5000)
| [
"[email protected]"
]
| |
39382fd79e7090be05bd74fab26cc30b09251711 | fbaf479c2ebddeee35f548d516a7adade35f64d5 | /csp/finite_set.py | caf92867d0bea8e3448b42584307c58f363d593d | []
| no_license | modelcheckutp/CSP-Z3 | 7b93b30c4525acd3cbdbf1b628ef44990a3d1015 | 94fd3735c239209f54ab8ad7af6b57f0e5c66b56 | refs/heads/master | 2020-03-20T14:33:23.908921 | 2018-06-28T22:10:39 | 2018-06-28T22:10:39 | 137,487,969 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,439 | py | ##################################################################
# The finite set theory based on BitVec
# Kun Wei 17/05/2017
##################################################################
from z3 import *
class FSetDecl():
def __init__(self, l):
self.alphabet = l
self.size = len(l)
def declare(self, name):
return BitVec(name, self.size)
def union(self, s1, s2):
assert (s1.sort() == s2.sort())
return s1|s2
def intersection(self, s1, s2):
assert (s1.sort() == s2.sort())
return s1&s2
def complement(self, s):
return ~s
def difference(self, s1, s2):
assert (s1.sort() == s2.sort())
return self.intersection(s1, self.complement(s2))
def member(self, e, s):
index = self.alphabet.index(e)
be = BitVecVal(1, self.size)<<index
#print(be)
return (be & s)!= 0
def add(self, e, s):
index = self.alphabet.index(e)
be = BitVecVal(1, self.size) << index
#print(be)
return (be | s)
def emptyset(self):
return BitVecVal(0, self.size)
def fullset(self):
return ~BitVecVal(0, self.size)
def toElements(self, b):
s = []
be = BitVecVal(1,self.size)
for i in range(self.size):
t = simplify(b&(be<<i))
if not (t == 0):
s.append(self.alphabet[i])
return s
def toSet(self,l):
s = self.emptyset()
for i in range(len(l)):
s = self.add(l[i], s)
return s
# define a finite set sort
def FSetSort(l): # l is a list of all elements in the finite set
return BitVecSort(len(l))
### for testing
#Channel, (a,b,c,d) = EnumSort('Channel', ('a','b','c','d'))
#FSet = FSetDecl([a,b,c,d])
#print(simplify(FSet.toSet([a,b,c])))
#s1 = FSet.declare('s1')
#s2 = FSet.declare('s2')
#s = Solver()
#s.add(s1== FSet.add(b,FSet.add(a,FSet.emptyset())))
#s.add(s2== FSet.add(c,FSet.add(a,FSet.emptyset())))
#print(FSet.toElements(BitVecVal(14,4)))
#s.add(FSet.union(s1,s2) == FSet.add(c, FSet.add(b,FSet.add(a,FSet.emptyset()))))
#s.add(FSet.intersection(s1,s2) == FSet.add(a,FSet.emptyset()) )
#s.add(FSet.complement(s1) == FSet.add(c, FSet.add(d, FSet.emptyset())))
#s.add(FSet.difference(s1,s2) == FSet.add(b, FSet.emptyset()))
#print(s.check()) | [
"[email protected]"
]
| |
82e143ab368a2df624d5ba0dd94ba697a8484a59 | 59e87634c67508bf7eba8c8b9845354aefa57bc7 | /DL/yolo/YOLOV1/py_cpu_nms.py | 2ddfa1b49d7699c559417a89c11064070d65ca39 | []
| no_license | Caohengrui/MLAndDL | 48729b94b2232e628b699cf8d0d4a6c6e81a36f5 | d0637f58f45e9c091cd90bbfe9c207223d0994f3 | refs/heads/master | 2023-03-16T01:06:03.316463 | 2020-04-14T07:44:15 | 2020-04-14T07:44:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,060 | py | # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
import numpy as np
def py_cpu_nms(dets,scores, thresh):
"""Pure Python NMS baseline."""
x1 = dets[:, 0]
y1 = dets[:, 1]
x2 = dets[:, 2]
y2 = dets[:, 3]
# scores = dets[:, 4]
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
order = scores.argsort()[::-1]
keep = []
while order.size > 0:
i = order[0]
keep.append(i)
xx1 = np.maximum(x1[i], x1[order[1:]])
yy1 = np.maximum(y1[i], y1[order[1:]])
xx2 = np.minimum(x2[i], x2[order[1:]])
yy2 = np.minimum(y2[i], y2[order[1:]])
w = np.maximum(0.0, xx2 - xx1 + 1)
h = np.maximum(0.0, yy2 - yy1 + 1)
inter = w * h
ovr = inter / (areas[i] + areas[order[1:]] - inter)
inds = np.where(ovr <= thresh)[0]
order = order[inds + 1]
return keep
| [
"[email protected]"
]
| |
219affeadc09cfc97eeb506de2fe49c9cd484bc3 | e977a6175fb72ee47abb1b8087d059817460f9fb | /control/urls.py | 6713fd31c07c18bb54ae9d598d04364782dc2248 | []
| no_license | sanbond/youtube | b0234e81456607e020aeb02256e831dd34dffc52 | b6a542c2f061659de536c16c9b806cd0e599c21b | refs/heads/master | 2021-01-12T06:40:41.782399 | 2013-01-24T08:30:11 | 2013-01-24T08:30:11 | 77,410,830 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 122 | py | from django.conf.urls.defaults import *
urlpatterns = patterns('control.views',
url(r'^$', 'index', name='index'),
)
| [
"[email protected]"
]
| |
95ea3a56c120bb0d2c831b76684d982b54b6c5aa | 68bad4b3d92872bb5b77b4ee503e588d20511a27 | /python/core/test_scripts_MPI/my_population_collect_spikes_mpi.py | ff1caea364466e952b5219ea999cbb2671552f87 | []
| no_license | mickelindahl/bgmodel | 647be626a7311a8f08f3dfc897c6dd4466fc0a92 | 78e6f2b73bbcbecd0dba25caf99f835313c914ee | refs/heads/master | 2023-08-29T13:57:04.122115 | 2022-02-11T14:28:23 | 2022-02-11T14:28:23 | 17,148,386 | 7 | 3 | null | null | null | null | UTF-8 | Python | false | false | 677 | py | '''
Created on Sep 22, 2014
@author: mikael
'''
import numpy
import pickle
import sys
from toolbox.data_to_disk import mkdir
from toolbox.my_nest import collect_spikes_mpi
from toolbox.parallelization import comm
print sys.argv
fileName, =sys.argv[1:]
fileName+='data'
s,e=numpy.ones(2)*comm.rank(),numpy.ones(2)*comm.rank()+1
s, e= collect_spikes_mpi(s, e)
mkdir('/'.join(fileName.split('/')[0:-1]))
if comm.rank()==0:
print 'File name'
print fileName
if 4<len(fileName) and fileName[-4:]!='.pkl':
fileName=fileName+'.pkl'
f=open(fileName, 'wb') #open in binary mode
pickle.dump([s,e], f, -1)
f.close()
| [
"[email protected]"
]
| |
c64bb122fa1b142b05e8315ac85b8ea4cec85786 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /gaussiana/ch3_2019_03_08_14_00_41_432668.py | 4bdc1e00e92765b8d5b29e95dceff6a7256f3781 | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 292 | py | import math
def calcula_gaussiana(x, mi, sigma):
if (sigma == 1 and x == 0 and mi == 0):
return 0
if (sigma == 0 or sigma == - math.sqrt(2*math.pi) or sigma == 1/math.sqrt(2*math.pi)):
return 0
return (1/sigma*math.sqrt(2*math.pi))**(-0.5((x - mi)/sigma)**2) | [
"[email protected]"
]
| |
34c06dc74f45348f0075ae426c9ad58a2c008486 | 9bdc2e9f0382bd96ef3af4f9eca94fa58c5a4dc1 | /keras/mnist-privacy/model/pipeline_train.py | 0687d543d7075f6d1210e6bc5a96b7c003608086 | [
"Apache-2.0"
]
| permissive | shicongisme/models | 90cf9a84b47c8d2a4de51fdfb7f6c4b9f796e317 | d8df07877aa8b10ce9b84983bb440af75e84dca7 | refs/heads/master | 2022-02-01T12:01:11.443827 | 2019-05-26T22:25:04 | 2019-05-26T22:25:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,112 | py | # Copyright 2018, The TensorFlow Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Training a CNN on MNIST with differentially private SGD optimizer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from privacy.analysis.rdp_accountant import compute_rdp
from privacy.analysis.rdp_accountant import get_privacy_spent
from privacy.optimizers import dp_optimizer
tf.flags.DEFINE_boolean('dpsgd', True, 'If True, train with DP-SGD. If False,'
'train with vanilla SGD.')
tf.flags.DEFINE_float('learning_rate', 0.08, 'Learning rate for training')
tf.flags.DEFINE_float('noise_multiplier', 1.12,
'Ratio of the standard deviation to the clipping norm')
tf.flags.DEFINE_float('l2_norm_clip', 1.0, 'Clipping norm')
tf.flags.DEFINE_integer('batch_size', 32, 'Batch size')
tf.flags.DEFINE_integer('epochs', 1, 'Number of epochs')
tf.flags.DEFINE_integer('microbatches', 32,
'Number of microbatches (must evenly divide batch_size')
tf.flags.DEFINE_string('model_dir', None, 'Model directory')
tf.flags.DEFINE_string('export_dir', './pipeline_tfserving/0', 'Export dir')
FLAGS = tf.flags.FLAGS
def cnn_model_fn(features, labels, mode):
"""Model function for a CNN."""
# Define CNN architecture using tf.keras.layers.
input_layer = tf.reshape(features['x'], [-1, 28, 28, 1])
y = tf.keras.layers.Conv2D(16, 8,
strides=2,
padding='same',
kernel_initializer='he_normal').apply(input_layer)
y = tf.keras.layers.MaxPool2D(2, 1).apply(y)
y = tf.keras.layers.Conv2D(32, 4,
strides=2,
padding='valid',
kernel_initializer='he_normal').apply(y)
y = tf.keras.layers.MaxPool2D(2, 1).apply(y)
y = tf.keras.layers.Flatten().apply(y)
y = tf.keras.layers.Dense(32, kernel_initializer='he_normal').apply(y)
logits = tf.keras.layers.Dense(10, kernel_initializer='he_normal').apply(y)
# Calculate loss as a vector (to support microbatches in DP-SGD).
vector_loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=labels, logits=logits)
# Define mean of loss across minibatch (for reporting through tf.Estimator).
scalar_loss = tf.reduce_mean(vector_loss)
# Configure the training op (for TRAIN mode).
if mode == tf.estimator.ModeKeys.TRAIN:
if FLAGS.dpsgd:
# Use DP version of GradientDescentOptimizer. For illustration purposes,
# we do that here by calling make_optimizer_class() explicitly, though DP
# versions of standard optimizers are available in dp_optimizer.
dp_optimizer_class = dp_optimizer.make_optimizer_class(
tf.train.GradientDescentOptimizer)
optimizer = dp_optimizer_class(
learning_rate=FLAGS.learning_rate,
noise_multiplier=FLAGS.noise_multiplier,
l2_norm_clip=FLAGS.l2_norm_clip,
num_microbatches=FLAGS.microbatches)
opt_loss = vector_loss
else:
optimizer = tf.train.GradientDescentOptimizer(
learning_rate=FLAGS.learning_rate)
opt_loss = scalar_loss
global_step = tf.train.get_global_step()
train_op = optimizer.minimize(loss=opt_loss, global_step=global_step)
# In the following, we pass the mean of the loss (scalar_loss) rather than
# the vector_loss because tf.estimator requires a scalar loss. This is only
# used for evaluation and debugging by tf.estimator. The actual loss being
# minimized is opt_loss defined above and passed to optimizer.minimize().
return tf.estimator.EstimatorSpec(mode=mode,
loss=scalar_loss,
train_op=train_op)
# Add evaluation metrics (for EVAL mode).
elif mode == tf.estimator.ModeKeys.EVAL:
eval_metric_ops = {
'accuracy':
tf.metrics.accuracy(
labels=labels,
predictions=tf.argmax(input=logits, axis=1))
}
return tf.estimator.EstimatorSpec(mode=mode,
loss=scalar_loss,
eval_metric_ops=eval_metric_ops)
def load_mnist():
"""Loads MNIST and preprocesses to combine training and validation data."""
train, test = tf.keras.datasets.mnist.load_data()
train_data, train_labels = train
test_data, test_labels = test
train_data = np.array(train_data, dtype=np.float32) / 255
test_data = np.array(test_data, dtype=np.float32) / 255
train_labels = np.array(train_labels, dtype=np.int32)
test_labels = np.array(test_labels, dtype=np.int32)
assert train_data.min() == 0.
assert train_data.max() == 1.
assert test_data.min() == 0.
assert test_data.max() == 1.
assert len(train_labels.shape) == 1
assert len(test_labels.shape) == 1
return train_data, train_labels, test_data, test_labels
def main(unused_argv):
tf.logging.set_verbosity(tf.logging.INFO)
if FLAGS.batch_size % FLAGS.microbatches != 0:
raise ValueError('Number of microbatches should divide evenly batch_size')
# Load training and test data.
train_data, train_labels, test_data, test_labels = load_mnist()
# Instantiate the tf.Estimator.
mnist_classifier = tf.estimator.Estimator(model_fn=cnn_model_fn,
model_dir=FLAGS.model_dir)
# Create tf.Estimator input functions for the training and test data.
train_input_fn = tf.estimator.inputs.numpy_input_fn(
x={'x': train_data},
y=train_labels,
batch_size=FLAGS.batch_size,
num_epochs=FLAGS.epochs,
shuffle=True)
eval_input_fn = tf.estimator.inputs.numpy_input_fn(
x={'x': test_data},
y=test_labels,
num_epochs=1,
shuffle=False)
# Define a function that computes privacy budget expended so far.
def compute_epsilon(steps):
"""Computes epsilon value for given hyperparameters."""
if FLAGS.noise_multiplier == 0.0:
return float('inf')
orders = [1 + x / 10. for x in range(1, 100)] + list(range(12, 64))
sampling_probability = FLAGS.batch_size / 60000
rdp = compute_rdp(q=sampling_probability,
noise_multiplier=FLAGS.noise_multiplier,
steps=steps,
orders=orders)
# Delta is set to 1e-5 because MNIST has 60000 training points.
return get_privacy_spent(orders, rdp, target_delta=1e-5)[0]
# Training loop.
steps_per_epoch = 60000 // FLAGS.batch_size
for epoch in range(1, FLAGS.epochs + 1):
# Train the model for one epoch.
mnist_classifier.train(input_fn=train_input_fn, steps=steps_per_epoch)
# Evaluate the model and print results
eval_results = mnist_classifier.evaluate(input_fn=eval_input_fn)
test_accuracy = eval_results['accuracy']
print('Test accuracy after %d epochs is: %.3f' % (epoch, test_accuracy))
# Compute the privacy budget expended so far.
if FLAGS.dpsgd:
eps = compute_epsilon(epoch * steps_per_epoch)
print('For delta=1e-5, the current epsilon is: %.2f' % eps)
else:
print('Trained with vanilla non-private SGD optimizer')
# Export the model
if FLAGS.export_dir is not None:
# [-1, 28, 28, 1]
image = tf.placeholder(tf.float32, [None, 28, 28])
input_fn = tf.estimator.export.build_raw_serving_input_receiver_fn({
'x': image,
})
mnist_classifier.export_savedmodel(FLAGS.export_dir, input_fn)
if __name__ == '__main__':
tf.app.run()
| [
"[email protected]"
]
| |
9eeb1275039275399a55eee9a0ae55d20a3de792 | 61a02aba5dde7c29ec65a87eb8a20af12d6c2b47 | /python basic/3118_최단경로2.py | ccce1ec174e94a7de53db843f1c74aeaad387cdd | []
| no_license | hksoftcorn/OfficialDocs | 0b4d0e2a71707e06ba7516e34ad176ee02726587 | cfd87d26efad484657f9493dead350cf0611a3e8 | refs/heads/master | 2023-06-30T07:09:33.641869 | 2021-07-31T14:35:28 | 2021-07-31T14:35:28 | 374,389,403 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 573 | py | V, E = map(int, input().split())
G = [[] for _ in range(V + 1)]
for _ in range(E):
u, v, w = map(int, input().split())
G[u].append((v, w))
visited = [False] * (V + 1)
dist = [0xfffffff] * (V + 1)
dist[1] = 0
p = [0] * (V + 1)
for _ in range(V):
u, min_key = 1, 0xfffffff
for i in range(1, V+1):
if not visited[i] and min_key > dist[i]:
u, min_key = i, dist[i]
visited[u] = True
for v, w in G[u]:
if not visited[v] and dist[v] > dist[u] + w:
dist[v] = dist[u] + w
p[v] = u
print(dist[V])
| [
"[email protected]"
]
| |
0e895adcd0cb0e4e2c255048f116fcc2ff22f71d | a7984f7540977663ed1f4adf0495ed7f1079494e | /leetcode/python/deleteNode/deleteNode.py | fef0c105112a287cdf8e0280f4b69a10a46280c8 | []
| no_license | gddh/practice | 79947e6e6d8b42b811797df0ad4197a4fd46a45b | 31329c671425aca8cb3209fab063c1cb71c79104 | refs/heads/master | 2020-03-11T06:21:39.055854 | 2019-10-27T01:32:36 | 2019-10-27T01:32:36 | 129,827,720 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 664 | py | class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
def add_to_end(self, x):
while self.next:
self = self.next
self.next = ListNode(x)
def print_lst(self):
while self:
print self.val, "-->",
self = self.next
print
def deleteNode(self, node):
while self.next and self.next.val != node:
self = self.next
self.next = self.next.next
if __name__ == "__main__":
node = ListNode(1)
for i in range(2, 5):
node.add_to_end(i)
node.print_lst()
node.deleteNode(3)
node.print_lst()
| [
"[email protected]"
]
| |
6f4373a988fbcd023ca39c1755c9d361c3e7daff | 2fd14347b7f43864d8153bd1c6d79198302d21ea | /ex.002 root finding/nr_problem_case.py | 3d33bede021e71d689a6e8c5cd4a3b1edf781a2e | []
| no_license | family9od/ECAre | 0fe27ff290eaa702c754fedef8953260a67592fc | ea875ea14be9d99a5e4f2191382e6eedc702b557 | refs/heads/master | 2020-06-17T02:33:30.651909 | 2016-11-15T07:45:31 | 2016-11-15T07:45:31 | 75,047,845 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 791 | py | # -*- coding: utf8 -*-
# 2010112033 이상형 9/20
"""
1변수 방정식의 근을 찾느 방법 중 Newton-Raphson method 를 사용하여
어떤 함수 g(x) 의 근을 찾고자 함
아래 예는 newton_raphson method 를 사용하기 곤란한 경우임
"""
# 1 변수 방정식의 근을 찾는 함수를 모아둔 rootfinding 모듈을 불러들임
import rootfinding as rf
def g(x):
# 근을 구하고자 하는 함수
return x ** 3 - 2 * x + 2
def dgdx(x):
# g(x) 의 x 에 대한 미분
return 3.0 * x ** 2.0 - 2.0
if "__main__" == __name__:
# 주어진 초기값에서 시작하여 g(x) = 0 인 x를 찾고자 함
# 생각보다 시간이 많이 걸릴 수 있음
x_nr = rf.newton(g, dgdx, 0)
print('x = %g, f(%g) = %g' % (x_nr, x_nr, g(x_nr)))
| [
"CAD Client"
]
| CAD Client |
68bef6e78561193cc324512fb5675481762ef02e | 5146e0cd99e28d6a0d96a3680783bf194d8c656d | /lesson_016/weathermaker.py | 4ee89f802ae9f2744c1ebab132620e5455da6898 | []
| no_license | Yuri-prog/python_base1 | 78e2e7cc07ecf4d0e58f265b3970189a02b278ee | 3c69d24b87839e726dc35faa52401ce2704fb05b | refs/heads/master | 2023-02-26T16:09:14.758918 | 2021-01-11T10:37:35 | 2021-01-11T10:37:35 | 332,156,808 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,734 | py | import datetime
import requests
from bs4 import BeautifulSoup
today = datetime.datetime.today()
class WeatherMaker:
def __init__(self):
self.table_date = None
self.list_of_values = []
def take_list(self):
response = requests.get('https://meteoinfo.ru/forecasts/russia/moscow-area/moscow')
if response.status_code == 200:
html_doc = BeautifulSoup(response.text, features='html.parser')
self.list_of_values = html_doc.find_all('td', {'class': "td_short_gr"})
def take_weather(self, date):
days = (date - today).days
weather_date = self.list_of_values[days + 1].text
result = {
weather_date: {
'День':
[self.list_of_values[17 + days].text, self.list_of_values[25 + days].text,
self.list_of_values[33 + days].text, self.list_of_values[41 + days].text, ],
'Ночь':
[self.list_of_values[58 + days].text, self.list_of_values[66 + days].text,
self.list_of_values[74 + days].text, self.list_of_values[82 + days].text, ],
},
}
w_list = [date, result[weather_date]['День'][0], result[weather_date]['День'][1],
result[weather_date]['День'][2], result[weather_date]['День'][3],
result[weather_date]['Ночь'][0], result[weather_date]['Ночь'][1],
result[weather_date]['Ночь'][2], result[weather_date]['Ночь'][3], ]
return w_list, result[weather_date]['День'][1]
def test():
weathermaker = WeatherMaker()
weathermaker.take_list()
print(weathermaker.take_weather(today))
# test()
| [
"[email protected]"
]
| |
993c86a8e53dcca1a61c43c777025674007a7e5a | 2f646730fee2338c96ec832cb4c8c2b0cf38d57d | /proctor/filter_set.py | 6f611fdac9d1a7cc31ac52d2a654a47a7c61abad | [
"MIT"
]
| permissive | jtallieu/proctor | 0d63927b64d0cfe59e863e5e1986bf25dc30e7d4 | 664e5faf8d2883cf6ca4d9ec2150c2e90a8f4468 | refs/heads/master | 2020-03-21T02:48:13.099928 | 2019-03-09T10:23:00 | 2019-03-09T10:23:00 | 138,020,255 | 0 | 0 | MIT | 2019-02-17T10:29:21 | 2018-06-20T10:57:31 | Python | UTF-8 | Python | false | false | 3,414 | py | import operator
class Filter(object):
"""
Filter class to filter on properties of a dict.
Arguments: property, op, value
Will type the existing value of the dictionary
property and attempt to coerce the compare value
to the property's value type.
"""
COMP_FUNCS = {
'like': lambda x, y: y in x, # Sub string
'in': lambda x, y: x in y # mostly for item in list
}
def __init__(self, prop, op, value):
self.property = prop
self.op = op
self.value = value
self.__func = self.generate_function(self.op)
def generate_function(self, op):
"""
Makes the function that tests for equality.
If it's not in the predefined functions, use the
'op' as the function
"""
return Filter.COMP_FUNCS.get(
op,
lambda x, y: getattr(operator, op)(x, y)
)
def __call__(self, item):
"""Tests the value of the property against the compare function"""
item_value = item.get(self.property)
typed_value = self.value
# Coerce boolean compare value
if isinstance(item_value, bool):
if self.value not in ['False', 'True', False, True]:
raise TypeError("Wrong type - expected bool for {}".format(self.property))
typed_value = False if self.value in ['False', False] else True
# Coerce strings - leave everything else alone
elif isinstance(self.value, basestring):
typed_value = type(item_value)(self.value)
return self.__func(item_value, typed_value)
class FilterSet(object):
"""
A set of filters to apply to a dictionary.
Initialized from a dict of:
{<key>__<op>: <value>}
ex: { "name__like": "Error", "level__lt": 20}
to create a set of Filters
Where op is a function defined in either the operator
module or by Filter.COMP_FUNCS
"""
def __init__(self, filter_spec):
self.__filters = []
self.__filter_spec = None
self.init(filter_spec)
def init(self, filter_spec):
self.__filters = []
for key, value in filter_spec.iteritems():
parts = key.split("__")
prop_name = key
_func = "eq"
if len(parts) == 2:
_func = parts[1]
prop_name = parts[0]
self.__filters.append(Filter(prop_name, _func, value))
def filter(self, item):
for check in self.__filters:
if not check(item):
return False
return True
if __name__ == "__main__":
from pprint import pprint
family = [
dict(age=44, first_name="Joey", last_name="Tallieu III", male=True),
dict(age=13, first_name="Vincent", last_name="Tallieu", male=True),
dict(age=34, first_name="Andrea", last_name="Mueller", male=False),
dict(age=14, first_name="Kirstin", last_name="Mueller", male=False)
]
print
print "The Tallieu's"
filter_set = FilterSet({
'last_name__like': "Tallieu"
})
tallieus = filter(filter_set.filter, family)
pprint(tallieus)
print
print "Girls:"
girls = filter(FilterSet({'male': "False"}).filter, family)
pprint(girls)
print
print "Women over 30"
fil = FilterSet({'age__gt': 30, 'male': False})
pprint(filter(fil.filter, family))
| [
"[email protected]"
]
| |
60683c2d38937f8deb20ebb916a8f5c41457bf7a | 1a597ec7f4a295e98aa231ad615dc5b03a17ef26 | /Regression/Random_Forest_Regression.py | ae87949dc47ed487ce7af60f70ba40ea46ca0218 | []
| no_license | GrismPatel/Machine_Learning_Python | 9039fdf946e2a24d6194f21b4308c38e381c2ec1 | f6e22600b052cffd00101a01f69127042005ef40 | refs/heads/master | 2021-01-20T15:54:31.055806 | 2018-01-30T01:47:40 | 2018-01-30T01:47:40 | 90,802,494 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 664 | py | import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
dataset = pd.read_csv('Position_Salaries.csv')
x = dataset.iloc[:,1:2].values
y = dataset.iloc[:,2].values
from sklearn.ensemble import RandomForestRegressor
a = RandomForestRegressor(n_estimators = 300,random_state = 0)
a.fit(x,y)
y_predict = a.predict(6.5)
x_grid = np.arange(min(x),max(x),0.01)
x_grid = x_grid.reshape((len(x_grid), 1))
plt.scatter(x,y,color = 'red')
plt.plot(x_grid, a.predict(x_grid),color = 'black')
plt.title('Position vs Salaries')
plt.xlabel('Position')
plt.ylabel('Salaries')
plt.show()
# [email protected]
# [email protected] | [
"[email protected]"
]
| |
91cd1d09cb66c6229b75a0e0c63d9b431a60af9b | ad76d4e3dc9b66108e0b0190c62496dbb67db6ab | /protos/raft_pb2_grpc.py | 5b843b74e966ddb7e7704e2b0fd772778910408c | []
| no_license | mgurdal/aioraft | 5a298f4ba81e73b5d8edfbe3e076d5c197dbb42a | 26d60eb2299d1e949c40092361d87da529bac3fb | refs/heads/master | 2023-07-26T09:21:36.331754 | 2020-04-17T12:02:08 | 2020-04-17T12:02:08 | 250,050,432 | 0 | 0 | null | 2023-07-05T21:01:00 | 2020-03-25T17:49:55 | Python | UTF-8 | Python | false | false | 2,252 | py | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
import protos.raft_pb2 as raft__pb2
class RaftServiceStub(object):
"""RaftService implements the raft pipelining receiver.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.AppendEntries = channel.unary_unary(
'/RaftService/AppendEntries',
request_serializer=raft__pb2.AppendEntriesRequest.SerializeToString,
response_deserializer=raft__pb2.AppendEntriesResponse.FromString,
)
self.RequestVote = channel.unary_unary(
'/RaftService/RequestVote',
request_serializer=raft__pb2.RequestVoteRequest.SerializeToString,
response_deserializer=raft__pb2.RequestVoteResponse.FromString,
)
class RaftServiceServicer(object):
"""RaftService implements the raft pipelining receiver.
"""
def AppendEntries(self, request, context):
"""AppendEntries performs a single append entries request / response.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def RequestVote(self, request, context):
"""RequestVote is the command used by a candidate to ask a Raft peer for a vote in an election.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_RaftServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'AppendEntries': grpc.unary_unary_rpc_method_handler(
servicer.AppendEntries,
request_deserializer=raft__pb2.AppendEntriesRequest.FromString,
response_serializer=raft__pb2.AppendEntriesResponse.SerializeToString,
),
'RequestVote': grpc.unary_unary_rpc_method_handler(
servicer.RequestVote,
request_deserializer=raft__pb2.RequestVoteRequest.FromString,
response_serializer=raft__pb2.RequestVoteResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'RaftService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| [
"[email protected]"
]
| |
7d5b09b7c1f6d62b3cf5a4410be34cf296b3c832 | d3f559c122f2c0fea41d26a558859ef5ede8799c | /model_7_copy.py | 3c6feab6fff4320d1ebf9455b698d5934d060197 | []
| no_license | yifengyiye/PythonModels | df05c47e2f9085ee5c3a45f18da3b5c976ed8876 | 086212b2ef9f58830816dd8313de39c974bfcb3e | refs/heads/master | 2020-12-11T07:25:49.430579 | 2016-08-16T09:28:22 | 2016-08-16T09:28:22 | 48,640,691 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 189 | py | # coding: utf-8
"""
题目:将一个列表的数据复制到另一个列表中。
程序分析:使用列表[:]。
"""
a = [1,3,4,5,67,7,8,5,23,2,24542,2]
b = a[:]
print b | [
"[email protected]"
]
| |
59bc931f4d9c594bffb4ae94dd353787b17253da | 260e5b8cf73e588004aa91465ca27c4640b2fefa | /home/apps.py | e8cb57208a4be72a0b90e48f75f3308efd2b76ea | []
| no_license | pankajganjale/ev_monitoring | 049ce52542d23f0f96d9bcf6830aa4a7dc0d57c4 | fabb0474b13fdcafb9d5c988c4183ea512b82e7e | refs/heads/master | 2023-08-11T02:06:59.051463 | 2021-09-18T21:10:48 | 2021-09-18T21:10:48 | 407,947,158 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 195 | py | from django.apps import AppConfig
class HomeConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'home'
verbose_name = 'Vehicle Monitoring Parameters'
| [
"[email protected]"
]
| |
20f92f2a34229092dedc98eed07107c4c1f9838e | 02ac11f6e39459e3295751b3b0823f1325206603 | /compiler/compiler_benchmark.py | eb895b337c0ae39f46ca3d5a127a3277d076413c | []
| no_license | rum-yasuhiro/experiments_crosstalk_multitasking | c04ece332063daa55e9ceb1fa8e7f031f897cac8 | 387a8c2563058999d704924722ba9e99f44efdad | refs/heads/master | 2023-07-13T00:08:22.354670 | 2021-08-23T06:45:35 | 2021-08-23T06:45:35 | 290,391,781 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,754 | py | from qiskit import IBMQ, Aer
from experiments.compiler.execute import run_experiments
from experiments.error_info_converter import value_to_ratio
from experiments.utils import pickle_dump, pickle_load
class CompilerBenchmark:
def __init__(self, backend_name=None, fake_device=None, reservations=False):
IBMQ.load_account()
if reservations:
provider = IBMQ.get_provider(
hub="ibm-q-keio", group="keio-internal", project="reservations"
)
else:
provider = IBMQ.get_provider(
hub="ibm-q-keio", group="keio-internal", project="keio-students"
)
self.backend = provider.get_backend(backend_name)
self.simulator = provider.get_backend("ibmq_qasm_simulator")
self.fake_device = fake_device
def run(
self,
multi_circuit_components,
path_to_save_jofile,
xtalk_prop=None,
xtalk_file_path=None,
):
"""
Args:
multi_circuit_components : benchmarking circuits
"""
# crosstalk prop
if xtalk_prop is None and xtalk_file_path is None:
raise XtalkNotDefinedError("Xtalk property is not defined!")
if xtalk_prop:
crosstalk_prop = xtalk_prop
else:
crosstalk_prop = pickle_load(xtalk_file_path)
circ = run_experiments(
path_to_save_jofile,
multi_circuit_components=multi_circuit_components,
backend=self.backend,
simulator=self.simulator,
crosstalk_prop=crosstalk_prop,
shots=8192,
fake_device=self.fake_device,
)
return circ
class XtalkNotDefinedError(Exception):
pass
| [
"[email protected]"
]
| |
f5b78e2ea8e7564fb714ce45ca0de4086e5ae6c9 | 95ce1fc4a3432230603ddbdfad73346d270a47b1 | /command_manager.py | ec80dbcbdf7e595ef31c4fb848e370deb6e3a262 | []
| no_license | mfontanini/shaplim-gui | e45620594a8139e95625629b0f1084a7fab73798 | d1663b3d8dcad4dbb860021cc09611b5c1255ece | refs/heads/master | 2021-01-19T13:00:02.808429 | 2014-05-31T13:57:03 | 2014-05-31T13:57:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,595 | py | # This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import threading
import copy
import Queue
import gobject
class Command:
def __init__(self, function, params=None, requires_timestamp=False, callback=None):
self.function = function
self.params = params if params is not None else []
self.requires_timestamp = requires_timestamp
self.callback = callback
class CommandManager:
def __init__(self, new_events_callback, api):
self.queue = Queue.Queue()
self.callback = new_events_callback
self.api = api
self.running = True
def run(self, last_timestamp):
self.last_timestamp = last_timestamp
self.thread = threading.Thread(target=self.run_loop)
self.thread.start()
def add(self, cmd):
self.queue.put(cmd)
def stop(self):
self.running = False
self.add(None)
self.thread.join()
def reload_state(self):
self.queue.put(None)
def get_new_events(self):
events_data = self.api.new_events(self.last_timestamp)
self.last_timestamp = events_data["timestamp"]
gobject.idle_add(self.callback, events_data["events"])
def run_loop(self):
while True:
try:
cmd = self.queue.get(timeout=0.2)
if cmd is None:
self.get_new_events()
else:
params = cmd.params
if cmd.requires_timestamp:
params.append(self.last_timestamp)
result = apply(cmd.function, params)
if cmd.callback:
gobject.idle_add(cmd.callback, result)
self.queue.task_done()
except Queue.Empty as ex:
if not self.running:
return
else:
self.get_new_events()
| [
"[email protected]"
]
| |
cf1a7a73139f8b463f88a8519060ed3c679bf328 | 8a320a21c9e672cdc9ae778e62435a6d8531baaf | /lib/core.py | 820d5d2ac54d693906aeb9184c5de2dc3dff0710 | [
"MIT"
]
| permissive | stuz32/aws-waf | d90f3ebeb27936e6b9c7f92c86a498d054f6c1d0 | 5628b6c44216f562174864a22df191759bbde24a | refs/heads/master | 2021-01-18T12:15:37.978816 | 2016-07-06T15:18:45 | 2016-07-06T15:18:45 | 62,712,889 | 0 | 0 | null | 2016-07-06T10:31:32 | 2016-07-06T10:31:32 | null | UTF-8 | Python | false | false | 11,416 | py | # standard libraries
import argparse
import os
import urllib2
# 3rd party libraries
import boto3
import boto3.session
# project libraries
import lib.deepsecurity as deepsecurity
def get_arg_parser(prog='ds-to-aws-waf.py', description=None, add_help=False):
"""
Create a standardized argument parser
"""
if not description:
description = """
Create and update AWS WAF WACL rules based on information from a Deep Security installation
"""
parser = argparse.ArgumentParser(prog=prog, description=description, add_help=add_help)
# Deep Security arguments
parser.add_argument('-d', '--dsm', action='store', default='app.deepsecurity.trendmicro.com', required=False, help='The address of the Deep Security Manager. Defaults to Deep Security as a Service')
parser.add_argument('--dsm-port', action='store', default='4119', dest='dsm_port', required=False, help='The address of the Deep Security Manager. Defaults to an AWS Marketplace/software install (:4119). Automatically configured for Deep Security as a Service')
parser.add_argument('-u', '--dsm-username', action='store', dest='dsm_username', required=True, help='The Deep Security username to access the IP Lists with. Should only have read-only rights to IP lists and API access')
parser.add_argument('-p', '--dsm-password', action='store', dest='dsm_password', required=True, help='The password for the specified Deep Security username. Should only have read-only rights to IP lists and API access')
parser.add_argument('-t', '--dsm-tenant', action='store', dest='dsm_tenant', required=False, default=None, help='The name of the Deep Security tenant/account')
# AWS arguments
parser.add_argument('-a', '--aws-access-key', action='store', dest='aws_access_key', required=False, help='The access key for an IAM identity in the AWS account to connect to')
parser.add_argument('-s', '--aws-secret-key', action='store', dest='aws_secret_key', required=False, help='The secret key for an IAM identity in the AWS account to connect to')
parser.add_argument('-r', '--aws-region', action='store', dest='aws_region', required=False, default='us-east-1', help='The name of AWS region to connect to')
# general structure arguments
parser.add_argument('--ignore-ssl-validation', action='store_true', dest='ignore_ssl_validation', required=False, help='Ignore SSL certification validation. Be careful when you use this as it disables a recommended security check. Required for Deep Security Managers using a self-signed SSL certificate')
parser.add_argument('--dryrun', action='store_true', required=False, help='Do a dry run of the command. This will not make any changes to your AWS WAF service')
parser.add_argument('--verbose', action='store_true', required=False, help='Enabled verbose output for the script. Useful for debugging')
return parser
class StoreNameValuePairOnEquals(argparse.Action):
"""
Store a set of name value pairs as an argument
"""
def __init__(self, option_strings, dest, nargs=None, const=None, default=None, type=None, choices=None, required=False, help=None, metavar=None):
self.dest = dest
argparse.Action.__init__(self, option_strings, dest, nargs=nargs, const=const, default=default, type=type, choices=choices, required=required, help=help, metavar=metavar)
# cribbed from http://stackoverflow.com/questions/5154716/using-argparse-to-parse-arguments-of-form-arg-val
# response by @chepner (http://stackoverflow.com/users/1126841/chepner)
def __call__(self, parser, namespace, values, dest, option_string=None):#
pairs = {}
for val in values:
if '=' in val:
n, v = val.split('=')
pairs[n] = v # matches key:pair
else:
pairs[v] = '' # matches key:
attr_key = option_string.strip('-') if option_string else ""
if self.dest: attr_key = self.dest
current_pairs = getattr(namespace, attr_key)
if attr_key in dir(namespace) and current_pairs != None:
new_pairs = current_pairs.copy()
new_pairs.update(pairs)
setattr(namespace, attr_key, new_pairs)
else:
setattr(namespace, attr_key, pairs)
class ScriptContext():
"""
Context for a command line script.
Using an object makes is easy to avoid any globals and clarifies
the intention of the script
"""
def __init__(self, args, parser):
self.parser = parser
self._passed_args = args
self.args = parser.parse_args(self._passed_args)
self.dsm = None
def __del__(self): self.clean_up() # clean up on object destruction
def clean_up(self):
"""
Gracefully dispose of the script's context
"""
if 'dsm' in dir(self) and self.dsm:
try:
self.dsm.finish_session()
except Exception, err: pass
def update_user(self, message):
"""
Update the update
"""
print(message)
def _log(self, msg, err=None, priority=False):
"""
Create a log entry for the specified event
"""
# @TODO add actual logging :-)
if priority or self.args.verbose or err:
if err:
print("{}. Threw an exception:\n{}".format(msg, err))
else:
print(msg)
def print_help(self):
"""
Print the command line syntax available to the user
"""
self.parser.print_help()
def _get_aws_credentials(self):
"""
Get a set of AWS credentials from a pre-configured AWS CLI installation
"""
credentials = None
# were credentials directly passed?
if (self.args.aws_access_key and not self.args.aws_secret_key) or (self.args.aws_secret_key and not self.args.aws_access_key):
self._log("When specifying AWS credentials via command line arguments both an access key and a secret key are required", priority=True)
elif self.args.aws_access_key and self.args.aws_secret_key:
self._log("Using AWS credentials specified via command line arguments")
credentials = {
'aws_access_key_id': self.args.aws_access_key,
'aws_secret_access_key': self.args.aws_secret_key,
}
else:
# check locally for an AWS CLI installation
aws_credentials_path = [ '{}/.aws/credentials'.format(os.environ['HOME']), "{}\.aws\credentials".format(os.environ['HOME']) ]
for path in aws_credentials_path:
if os.path.exists(path) and not credentials:
self._log("Reading AWS credentials from {}".format(path))
with open(path) as fh:
for line in fh:
if line.startswith('aws_access_key_id'):
credentials = { 'aws_access_key_id': line.split('=')[-1].strip() }
elif line.startswith('aws_secret_access_key'):
credentials['aws_secret_access_key'] = line.split('=')[-1].strip()
return credentials
def _get_aws_region_from_config(self):
"""
Get the default region from a pre-configured AWS CLI installation
"""
region = None
aws_config_path = [ '{}/.aws/config'.format(os.environ['HOME']), "{}\.aws\config".format(os.environ['HOME']) ]
for path in aws_config_path:
if os.path.exists(path):
self._log("Reading AWS config from {}".format(path))
with open(path) as fh:
for line in fh:
if line.startswith('region'):
region = line.split('=')[-1].strip()
return region
def _connect_to_deep_security(self):
dsm = None
if self.args.ignore_ssl_validation:
self._log("""************************************************************************
* IGNORING SSL CERTIFICATE VALIDATION
* ===================================
* You have requested to ignore SSL certificate validation. This is a
* less secure method of connecting to a Deep Security Manager (DSM).
* Please ensure that you have other mitigations and security controls
* in place (like restricting IP space that can access the DSM,
* implementing least privilege for the Deep Security user/role
* accessing the API, etc).
*
* During script execution, you'll see a number of
* "InsecureRequestWarning" messages. These are to be expected when
* operating without validation.
************************************************************************""", priority=True)
try:
dsm_port = self.args.dsm_port if not self.args.dsm == 'app.deepsecurity.trendmicro.com' else 443
self._log("Attempting to connect to Deep Security at {}:{}".format(self.args.dsm, dsm_port))
dsm = deepsecurity.dsm.Manager(hostname=self.args.dsm, port=dsm_port, username=self.args.dsm_username, password=self.args.dsm_password, tenant=self.args.dsm_tenant, ignore_ssl_validation=self.args.ignore_ssl_validation)
dsm.sign_in()
except Exception, err:
self._log("Could not connect to the Deep Security", err=err)
if not dsm._sessions['REST'] and not dsm._sessions['SOAP']:
self._log("Unable to connect to the Deep Security Manager. Please check your settings")
if not self.args.ignore_ssl_validation:
self._log("You did not ask to ignore SSL certification validation. This is a common error when connect to a Deep Security Manager that was installed via software or the AWS Marketplace. Please set the flag (--ignore-ssl-validation), check your other settings, and try again")
else:
self._log("Connected to the Deep Security Manager at {}".format(self.args.dsm))
return dsm
def _connect_to_aws_service(self, service_name):
"""
Connect to the specified AWS service via explicit credentials
(shared by the AWS CLI) or an instance role
"""
service = None
region = self.args.aws_region # prefer explicit region vs. CLI config
if not region: region = self._get_aws_region_from_config()
try:
aws = boto3.session.Session(aws_access_key_id=self.aws_credentials['aws_access_key_id'], aws_secret_access_key=self.aws_credentials['aws_secret_access_key'], region_name=region)
service = aws.client(service_name)
self._log("Connected to AWS {}".format(service_name))
except Exception, err:
self._log("Could not connect to AWS {} using local CLI credentials".format(service_name), err=err)
try:
service = boto3.client(service_name)
self._log("Connected to AWS {}".format(service_name))
except Exception, err:
self._log("Could not connect to AWS {} using an instance role".format(service_name), err=err)
return service
def _connect_to_aws_waf(self): return self._connect_to_aws_service('waf')
def _connect_to_aws_ec2(self): return self._connect_to_aws_service('ec2')
def _connect_to_aws_elb(self): return self._connect_to_aws_service('elb')
def _connect_to_aws_cloudfront(self): return self._connect_to_aws_service('cloudfront')
def get_available_aws_sets(self):
"""
Get a list of the available IP Sets in AWS WAF
"""
ip_sets = []
if self.waf:
response = self.waf.list_ip_sets(Limit=100)
if response and response.has_key('IPSets'):
for ip_set in response['IPSets']:
ip_sets.append(ip_set)
self.ip_sets = ip_sets
return ip_sets
def _get_aws_waf_change_token(self):
"""
Get a new AWS WAF change token (required for any changes)
"""
response = self.waf.get_change_token()
change_token = None
if response and response.has_key('ChangeToken'):
change_token = response['ChangeToken']
self._log("New AWS WAF change token [{}]".format(change_token))
return change_token | [
"[email protected]"
]
| |
2dc557642b309a0f561939be02ff6f63ec04e811 | b591df88857448bba648e5b714bcf24dcccfd4c2 | /fauxcaml/tests/test_type_duplication.py | 6cb05495cd033d9baec74a437198830cbac6e34a | [
"MIT"
]
| permissive | eignnx/fauxcaml | 09192cd5944e3d3cfb596f40b2d977679224800c | 082625f5803d6f676c0d63b6ce45b03a6069d720 | refs/heads/master | 2021-06-10T19:53:43.026369 | 2021-04-20T18:25:56 | 2021-04-20T18:25:56 | 170,390,725 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,320 | py | import typing
from fauxcaml.semantics import check, typ
def test_generic_var_duplication():
checker = check.Checker()
T = checker.fresh_var()
assert checker.duplicate_type(T) != T
def test_non_generic_var_duplication():
checker = check.Checker()
T = checker.fresh_var(non_generic=True)
assert checker.duplicate_type(T) == T
def test_simple_compound_type_duplication():
checker = check.Checker()
fn = typ.Fn(typ.Int, typ.Bool)
assert checker.duplicate_type(fn) == fn
def test_compound_type_with_type_vars():
checker = check.Checker()
generic = checker.fresh_var()
non_generic = checker.fresh_var(non_generic=True)
tup = typ.Tuple(non_generic, non_generic, generic, generic)
actual = checker.duplicate_type(tup)
assert type(actual) is typ.Tuple
actual = typing.cast(typ.Tuple, actual)
a, b, c, d = actual.vals
assert a == non_generic
assert b == non_generic
assert c != non_generic
assert c != generic
assert d != non_generic
assert d != generic
def test_deep_type_duplication():
checker = check.Checker()
G1 = checker.fresh_var()
G2 = checker.fresh_var()
N1 = checker.fresh_var(non_generic=True)
N2 = checker.fresh_var(non_generic=True)
orig = typ.Tuple(G1, N1, N1, N2, typ.Tuple(G1, N1, N2, G2, typ.Int), typ.Int)
duplicated = checker.duplicate_type(orig)
assert type(duplicated) is typ.Tuple
duplicated = typing.cast(typ.Tuple, duplicated)
g1_outer, n1_outer_1, n1_outer_2, n2_outer, tup, i_outer = duplicated.vals
assert g1_outer != G1
assert n1_outer_1 == N1 == n1_outer_2
assert n2_outer == N2
assert i_outer == typ.Int
assert type(tup) is typ.Tuple
g1_inner, n1_inner, n2_inner, g2_inner, i_inner = tup.vals
assert g1_inner == g1_outer
assert n1_inner == n1_outer_1 == N1
assert n2_inner == n2_outer == N2
assert g2_inner != G2
assert i_inner == typ.Int
def test_making_compound_type_non_generic():
checker = check.Checker()
T, U = checker.fresh_var(), checker.fresh_var()
assert checker.is_generic(T)
assert checker.is_generic(U)
tup = typ.Tuple(T, typ.Int, typ.Tuple(U))
checker.unifiers.make_non_generic(tup)
assert checker.is_non_generic(T)
assert checker.is_non_generic(U)
| [
"[email protected]"
]
| |
356dd57758e9dcb98c6585c8808c5cad720dc373 | 403d8ad07b39b785397c846ab7bb72a8edae9c3f | /Address.py | 68648c25c2d2a80dbab121c1a5c0325c75d48f40 | []
| no_license | AoiKujira/CNproject | cf9019c2b0eb2ebda180eb13830df9be80732ad2 | 3623c6bf89f803128cc17ec0d5a945f8580a90a7 | refs/heads/master | 2023-06-20T15:24:16.267782 | 2021-07-20T03:38:58 | 2021-07-20T03:38:58 | 386,186,470 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 158 | py | class Address:
def __init__(self, host: str, port: int, identifier: int):
self.host = host
self.port = port
self.id = identifier
| [
"[email protected]"
]
| |
0e235e6cd34ad3ee7438817040fdec91048ac6ba | 7ea6c77b6135368438647a591ac975bc292cc6d1 | /benchmark/helper.py | b6fd85951fb1ba6ea2c8bee050fa8779ee39392d | []
| no_license | bunjj/Advanced-Systems-Lab | a8c3f7135fd9932c3842b956c3cb5c30f113db41 | 322897a6c52b9882444facf703811b1ed994171a | refs/heads/master | 2023-08-16T11:40:18.112675 | 2021-10-01T14:23:49 | 2021-10-01T14:23:49 | 412,495,195 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,360 | py | def vector(vec = [0,0,0]):
v = {}
v["x"] = vec[0]
v["y"] = vec[1]
v["z"] = vec[2]
return v
def camera(fov= 30, position = [0,0,0], rotation=[0,0,0]):
cam = {}
cam["fov"] = fov
cam["position"] = vector(position)
cam["rotation"] = vector(rotation)
return cam
def pointlight(position = [0,100,0], emission = [200,200,200]):
light = {}
light["position"] = vector(position)
light["emission"] = vector(emission)
return light
#returns parameters which all objects have in common
def object(kind, position =[0,0,0], rotation = [0,0,0], reflection = 0, shininess = 15, color =[1,1,1]):
o = {}
o["kind"] = kind
o["position"] = vector(position)
o["rotation"] = vector(rotation)
o["reflection"] = reflection
o["shininess"] = shininess
o["color"] = vector(color)
return o
#the following functions return the individual params for a kind of objects
def plane(normal =[0,1,0], displacement = -3):
p = {}
p["normal"] = vector(normal)
p["displacement"] = displacement
return p
def box(extents = [0.25, 0.5, 1]):
return {"extents" : vector(extents)}
def sphere(radius = 3):
return {"radius" : radius}
def cone(params = [1, 0.5, 1]):
return params
def torus(r1 = 1, r2 = 0.5):
return {"r1": r1, "r2": r2}
def octahedron( s = 1):
return {"s": s}
| [
"[email protected]"
]
| |
eab651e6fb48a6e0b2d57a0c83266c24ae522a42 | aaca13b7e6640fe190eae8686193326afcb425df | /Game/Pieces/King.py | 09b48b3a5fc88a21e2af593e9c0daa13c9248b09 | [
"MIT"
]
| permissive | AhmedAlzubairi1/Chess | c497811b689d15095f087fffb8595fc218637ec1 | c56c2549bf13538d89b001357f658ae04e5f3f8d | refs/heads/master | 2023-02-07T06:05:52.796482 | 2020-12-23T04:24:37 | 2020-12-23T04:24:37 | 294,804,119 | 2 | 2 | MIT | 2020-12-04T02:57:06 | 2020-09-11T20:17:22 | Python | UTF-8 | Python | false | false | 1,829 | py | from .piece import Piece
class King(Piece):
def __init__(self, board, color, group, row, col, game):
"""This is an init for a king piece. It creates the piece
:param board: A 2d list representing a board state
:type board: [list]
:param color: A string that is either 'BLACK' or 'WHITE'
:type color: str
:param group: A set representing all the pieces of the player
:type group: set
:param row: a number from 1 to 8 representing the row
:type row: row
:param col: a letter from 'A' to 'H' representing the col of the piece
:type col: str
"""
super().__init__(board, color, group, "King", row, col, game)
def availableMoves(self):
"""Returns a set of the avaiable moves the king can make
:return: A set of the available moves the king can make. It is a set
of tuples of the rows and colms that the move can move to
:rtype: {(int,str)}
"""
moves = set()
tempRow = self.row - 1
tempCol = self.possibleCol[self.col]
direction = [(1, 0), (-1, 0), (0, -1), (0, 1),
(1, 1), (1, -1), (-1, 1), (-1, -1)]
for (i, k) in direction:
if 0 <= tempRow + i <= 7 and 0 <= tempCol + \
k <= 7 and (self.board[tempRow + i][tempCol + k] is None or self.validMove(tempRow + i, tempCol + k)):
moves.add((tempRow + i, tempCol + k))
return moves
def possibleCapturesCheck(self):
"""Returns a set of the avaiable moves the king can make for a king check
:return: A set of the available moves the king can make for check. It is a set of tuples of the rows and colms that the move can move to
:rtype: {(int,str)}
"""
return set()
| [
"[email protected]"
]
| |
9ae009652986c6b459f9d867a41a6f768070ebda | e28ce5cca66c56ee7446a46e18375430d0d404eb | /toys/12_go/gopy/go/location.py | 3b4f56c0d82834ba26f9afa924ca8d0bbcdfb3a8 | [
"MIT"
]
| permissive | git4robot/PyKids | 4fb60c5b107527336d9e686a98988ba7a8354f31 | 866e45e13171322ad1892d604508cfee9f8086c8 | refs/heads/master | 2020-04-17T20:45:26.741363 | 2020-04-14T02:21:55 | 2020-04-14T02:21:55 | 166,919,523 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 665 | py | from go.utils import bold
class LocationError(Exception):
pass
class Location(object):
TYPES = {
'black': bold('*'),
'white': bold('o'),
'empty': '.',
}
def __init__(self, type):
if type not in self.TYPES:
raise LocationError('Type must be one of the following: {0}'.format(
self.TYPES.keys(),
))
self._type = type
def __eq__(self, other):
return self._type == other._type
def __hash__(self):
return hash(self._type)
def __str__(self):
return self.TYPES[self._type]
def __repr__(self):
return self._type.title()
| [
"[email protected]"
]
| |
eed3459302eac577610e47a166c838188d062b0d | 005a061a37e90b1ae644461fffcaef86a5072314 | /class/Collaborateur.py | b5714b3c703a347637239732fbedbd9d32634f25 | []
| no_license | BadzeGit/pythonDEV | 26b79675bfb292911d594114688ddcc1cabcd94c | 2a911e2622d8d8b73866c028738883ebbfe2f6e7 | refs/heads/master | 2020-03-21T22:08:20.418923 | 2018-07-02T05:56:25 | 2018-07-02T05:56:25 | 138,686,115 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,724 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from Entreprise import *
class Collaborateur(Entreprise):
def __init__(self, nom, prenom, poste, mail, telephone, informationCollab, raisonSociale, informationEntrep):
# Importation de la classe mère Entreprise
Entreprise.__init__(self, raisonSociale, informationEntrep)
self.__nom = nom
self.__prenom = prenom
self.__poste = poste
self.__mail = mail
self.__telephone = telephone
self.__informationCollab = informationCollab
####################################
@property
def nom(self):
return self.__nom
@nom.setter
def nom(self, v):
self.__nom = v
@property
def prenom(self):
return self.__prenom
@prenom.setter
def prenom(self, v):
self.__prenom = v
@property
def poste(self):
return self.__poste
@poste.setter
def poste(self, v):
self.__poste = v
@property
def mail(self):
return self.__mail
@mail.setter
def mail(self, v):
self.__mail = v
@property
def telephone(self):
return self.__telephone
@telephone.setter
def telephone(self, v):
self.__telephone = v
@property
def informationCollab(self):
return self.__informationCollab
@informationCollab.setter
def informationCollab(self, v):
self.__informationCollab = v
#####################################################
def voirCollaborateur(self):
print("Nom : {}\nPrénom : {}\nPoste : {}\nMail : {}\nTéléphone : {}\nInformations Collaborateur : {}\nRaison Sociale : {}\nInformation Entreprise : {}".format(self.nom, self.prenom, self.poste, self.mail, self.telephone, self.informationCollab, self.raisonSociale, self.informationEntrep))
| [
"[email protected]"
]
| |
40df1841df3d2850f8c6be1e8b440414dad3967a | 5f439ed55b85615d9ded137b0b9f76d0ddfa7190 | /project6/s1.py | 8c413a6615259fc86cf0ba494cd77d60c3ec8838 | []
| no_license | OleksandrBieliakov/miw | 48e8c253e927cff2910d00c0837007cb50d2869a | a180d7341e7e41a563e22342098f492c939e0e94 | refs/heads/main | 2023-05-14T01:34:58.298264 | 2021-06-06T19:01:20 | 2021-06-06T19:01:20 | 347,735,555 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,187 | py | from keras import layers
from keras import models
model = models.Sequential()
model.add(layers.Conv2D(32, (3, 3), activation='relu', input_shape=(28, 28, 1)))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.Flatten())
model.add(layers.Dense(64, activation='relu'))
model.add(layers.Dense(10, activation='softmax'))
model.summary()
from keras.datasets import mnist
from keras.utils import to_categorical
(train_images, train_labels), (test_images, test_labels) = mnist.load_data()
train_images = train_images.reshape((60000, 28, 28, 1))
train_images = train_images.astype('float32') / 255
test_images = test_images.reshape((10000, 28, 28, 1))
test_images = test_images.astype('float32') / 255
train_labels = to_categorical(train_labels)
test_labels = to_categorical(test_labels)
model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])
model.fit(train_images, train_labels, epochs=5, batch_size=64)
test_loss, test_acc = model.evaluate(test_images, test_labels)
print('test_acc = ', test_acc)
| [
"[email protected]"
]
| |
e782b2c1446761dd990e12e9c4a8b49355234b7d | 4d2a2f58640d62c32515fc6c345c28e2b3c47b14 | /app/server.py | 7071ad0723596265f5c2021cfa16031b17c57e08 | []
| no_license | teamhide/fastapi-boilerplate | 3ae3912e6704d06d7a1188588b1186ee02ac85a4 | 21ecdc5267dfdbe52720834b84b74c12a67997e8 | refs/heads/master | 2023-05-02T20:54:41.676027 | 2022-12-21T11:31:29 | 2022-12-21T11:31:29 | 248,507,413 | 686 | 123 | null | 2023-04-24T09:21:46 | 2020-03-19T13:15:06 | Python | UTF-8 | Python | false | false | 2,506 | py | from typing import List
from fastapi import FastAPI, Request, Depends
from fastapi.middleware import Middleware
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse
from api import router
from api.home.home import home_router
from core.config import config
from core.exceptions import CustomException
from core.fastapi.dependencies import Logging
from core.fastapi.middlewares import (
AuthenticationMiddleware,
AuthBackend,
SQLAlchemyMiddleware,
ResponseLogMiddleware,
)
from core.helpers.cache import Cache, RedisBackend, CustomKeyMaker
def init_routers(app_: FastAPI) -> None:
app_.include_router(home_router)
app_.include_router(router)
def init_listeners(app_: FastAPI) -> None:
# Exception handler
@app_.exception_handler(CustomException)
async def custom_exception_handler(request: Request, exc: CustomException):
return JSONResponse(
status_code=exc.code,
content={"error_code": exc.error_code, "message": exc.message},
)
def on_auth_error(request: Request, exc: Exception):
status_code, error_code, message = 401, None, str(exc)
if isinstance(exc, CustomException):
status_code = int(exc.code)
error_code = exc.error_code
message = exc.message
return JSONResponse(
status_code=status_code,
content={"error_code": error_code, "message": message},
)
def make_middleware() -> List[Middleware]:
middleware = [
Middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
),
Middleware(
AuthenticationMiddleware,
backend=AuthBackend(),
on_error=on_auth_error,
),
Middleware(SQLAlchemyMiddleware),
Middleware(ResponseLogMiddleware),
]
return middleware
def init_cache() -> None:
Cache.init(backend=RedisBackend(), key_maker=CustomKeyMaker())
def create_app() -> FastAPI:
app_ = FastAPI(
title="Hide",
description="Hide API",
version="1.0.0",
docs_url=None if config.ENV == "production" else "/docs",
redoc_url=None if config.ENV == "production" else "/redoc",
dependencies=[Depends(Logging)],
middleware=make_middleware(),
)
init_routers(app_=app_)
init_listeners(app_=app_)
init_cache()
return app_
app = create_app()
| [
"[email protected]"
]
| |
922d5e1fc8f30798c9af755861ec09cd9b964b77 | 14cc1ab99bc426bd7eff8bd4bea86f7ece176978 | /problems/longest_common_prefix_014/solution_2.py | b65080c14de621f90f034b158a388d21d63cdfea | []
| no_license | x1He/leetcode_practice | 95fc450dd1cff614866811fd0c3ce723241a19c8 | 40d541255196fcfe79ada00d08a91e416138f9b0 | refs/heads/master | 2020-04-09T06:39:51.456091 | 2019-07-15T00:48:36 | 2019-07-15T00:48:36 | 160,121,967 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 383 | py | class Solution:
def longestCommonPrefix(self, strs):
"""
:type strs: List[str]
:rtype: str
"""
if not strs:
return ""
else:
count = 0
for i in zip(*strs):
if len(set(i)) > 1:
return strs[0][:count]
count += 1
return strs[0][:count] | [
"[email protected]"
]
| |
d4003466ce2310a0e55df241b52060605e731742 | 95707c6481c8aa7be8326bd763132ab7d67c7229 | /solution/producers/models/producer.py | 5a74c19bff4c45dc5e8db26e3c8b970d82f54c8e | []
| no_license | yogesh17/Optimizing-Public-Transportation | 8a08e0005d7b107e66e5c3e7d32efb024ef7e81d | eeb4db4a9b7271d297e002222956accfa44fa593 | refs/heads/master | 2023-08-21T14:30:47.432196 | 2020-08-02T13:53:23 | 2020-08-02T13:53:23 | 284,468,813 | 0 | 0 | null | 2023-08-14T22:10:25 | 2020-08-02T13:38:48 | Python | UTF-8 | Python | false | false | 3,907 | py | """Producer base-class providing common utilites and functionality"""
import logging
import time
from confluent_kafka import avro
from confluent_kafka.admin import AdminClient, NewTopic
from confluent_kafka.avro import AvroProducer
logger = logging.getLogger(__name__)
SCHEMA_REGISTRY_URL = "http://localhost:8081"
BROKER_URL = "PLAINTEXT://localhost:9092"
class Producer:
"""Defines and provides common functionality amongst Producers"""
# Tracks existing topics across all Producer instances
existing_topics = set([])
def __init__(
self,
topic_name,
key_schema,
value_schema=None,
num_partitions=1,
num_replicas=1,
):
"""Initializes a Producer object with basic settings"""
self.topic_name = topic_name
self.key_schema = key_schema
self.value_schema = value_schema
self.num_partitions = num_partitions
self.num_replicas = num_replicas
#
#
# TODO: Configure the broker properties below. Make sure to reference the project README
# and use the Host URL for Kafka and Schema Registry!
#
#
self.broker_properties = {
"bootstrap.servers": BROKER_URL,
"schema.registry.url": SCHEMA_REGISTRY_URL
}
# If the topic does not already exist, try to create it
if self.topic_name not in Producer.existing_topics:
self.create_topic(self.topic_name)
Producer.existing_topics.add(self.topic_name)
# TODO: Configure the AvroProducer
self.producer = AvroProducer(
self.broker_properties,
default_key_schema=self.key_schema,
default_value_schema=self.value_schema
)
def topic_exists(client, topic_name):
"""Checks if the given topic exists"""
topic_metadata = client.list_topics(timeout=5)
return topic_name in set(t.topic for t in iter(topic_metadata.topics.values()))
def create_topic(self,topic_name):
"""Creates the producer topic if it does not already exist"""
#
#
# TODO: Write code that creates the topic for this producer if it does not already exist on
# the Kafka Broker.
#
#
client = AdminClient({"bootstrap.servers": self.broker_properties["bootstrap.servers"]})
exists = topic_exists(client, topic_name)
if exists is True:
logger.info(f"Topic already exists: {topic_name}")
return
topic = NewTopic(
topic = topic_name,
num_partitions = self.num_partitions,
replication_factor = self.num_replicas,
config = {
"cleanup.policy" : "compact",
"compression.type": "lz4",
"delete.retention.ms": 100,
"file.delete.delay.ms": 100
},
)
futures = client.create_topics([topic])
for topic, future in futures.items():
try:
future.result()
logger.info("topic created")
except Exception as e:
logger.debug(f"failed to create topic {topic_name}: {e}")
raise
def time_millis(self):
return int(round(time.time() * 1000))
def close(self):
"""Prepares the producer for exit by cleaning up the producer"""
#
#
# TODO: Write cleanup code for the Producer here
#
#
if self.producer is not None:
logger.info("flushing producer")
self.producer.flush()
def time_millis(self):
"""Use this function to get the key for Kafka Events"""
return int(round(time.time() * 1000))
| [
"[email protected]"
]
| |
3652f4d252652605a8f6ef2c32218b505955d203 | 627094b5e463bd113f626450eaceb01dfa4ff5d5 | /test/client/test_link_control.py | 0e4014b88ba456056e0e54eef493cfb4f701e752 | [
"MIT"
]
| permissive | DaleChen0351/python-udsoncan | 49eefcb299e2a4fabe0bf168905cc86ef43d6f62 | c495e872c69c4ea05e3b477d2a1088cb83167a17 | refs/heads/master | 2020-04-20T06:10:25.252315 | 2019-03-28T07:38:17 | 2019-03-28T07:38:17 | 168,675,483 | 0 | 0 | MIT | 2019-03-28T07:38:19 | 2019-02-01T09:42:02 | Python | UTF-8 | Python | false | false | 6,890 | py | from udsoncan.client import Client
from udsoncan import services, Baudrate
from udsoncan.exceptions import *
from test.ClientServerTest import ClientServerTest
class TestLinkContorl(ClientServerTest):
def __init__(self, *args, **kwargs):
ClientServerTest.__init__(self, *args, **kwargs)
def test_linkcontrol_verify_fixed(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, b"\x87\x01\x11")
self.conn.fromuserqueue.put(b"\xC7\x01") # Positive response
def _test_linkcontrol_verify_fixed(self):
baudrate = Baudrate(250000, baudtype=Baudrate.Type.Fixed)
response = self.udsclient.link_control(control_type=1, baudrate=baudrate)
self.assertTrue(response.valid)
self.assertTrue(response.positive)
self.assertEqual(response.service_data.control_type_echo, 1)
def test_linkcontrol_verify_fixed_spr(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, b"\x87\x81\x11")
self.conn.fromuserqueue.put("wait") # Synchronize
def _test_linkcontrol_verify_fixed_spr(self):
baudrate = Baudrate(250000, baudtype=Baudrate.Type.Fixed)
with self.udsclient.suppress_positive_response:
response = self.udsclient.link_control(control_type=1, baudrate=baudrate)
self.assertEqual(response, None)
self.conn.fromuserqueue.get(timeout=0.2) #Avoid closing connection prematurely
def test_linkcontrol_verify_fixed_from_specific(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, b"\x87\x01\x11")
self.conn.fromuserqueue.put(b"\xC7\x01") # Positive response
def _test_linkcontrol_verify_fixed_from_specific(self):
baudrate = Baudrate(250000, baudtype=Baudrate.Type.Specific)
response = self.udsclient.link_control(control_type=1, baudrate=baudrate)
self.assertTrue(response.valid)
self.assertTrue(response.positive)
self.assertEqual(response.service_data.control_type_echo, 1)
def test_linkcontrol_verify_specific(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, b"\x87\x02\x12\x34\x56")
self.conn.fromuserqueue.put(b"\xC7\x02") # Positive response
def _test_linkcontrol_verify_specific(self):
baudrate = Baudrate(0x123456, baudtype=Baudrate.Type.Specific)
response = self.udsclient.link_control(control_type=2, baudrate=baudrate)
self.assertTrue(response.valid)
self.assertTrue(response.positive)
self.assertEqual(response.service_data.control_type_echo, 2)
def test_linkcontrol_verify_specific_from_fixed(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, b"\x87\x02\x07\xA1\x20")
self.conn.fromuserqueue.put(b"\xC7\x02") # Positive response
def _test_linkcontrol_verify_specific_from_fixed(self):
baudrate = Baudrate(500000, baudtype=Baudrate.Type.Fixed)
response = self.udsclient.link_control(control_type=2, baudrate=baudrate)
self.assertTrue(response.valid)
self.assertTrue(response.positive)
def test_linkcontrol_custom_control_type(self):
request = self.conn.touserqueue.get(timeout=0.2)
self.assertEqual(request, b"\x87\x55")
self.conn.fromuserqueue.put(b"\xC7\x55") # Positive response
def _test_linkcontrol_custom_control_type(self):
response = self.udsclient.link_control(control_type=0x55)
self.assertTrue(response.valid)
self.assertTrue(response.positive)
def test_linkcontrol_negative_response_exception(self):
self.wait_request_and_respond(b"\x7F\x87\x31") # Request Out Of Range
def _test_linkcontrol_negative_response_exception(self):
with self.assertRaises(NegativeResponseException):
self.udsclient.link_control(control_type=0x55)
def test_linkcontrol_negative_response_no_exception(self):
self.wait_request_and_respond(b"\x7F\x87\x31") # Request Out Of Range
def _test_linkcontrol_negative_response_no_exception(self):
self.udsclient.config['exception_on_negative_response'] = False
response = self.udsclient.link_control(control_type=0x55)
self.assertTrue(response.valid)
self.assertFalse(response.positive)
def test_linkcontrol_invalidservice_exception(self):
self.wait_request_and_respond(b"\x00\x22") # Request Out Of Range
def _test_linkcontrol_invalidservice_exception(self):
with self.assertRaises(InvalidResponseException):
self.udsclient.link_control(control_type=0x55)
def test_linkcontrol_invalidservice_no_exception(self):
self.wait_request_and_respond(b"\x00\x22") # Request Out Of Range
def _test_linkcontrol_invalidservice_no_exception(self):
self.udsclient.config['exception_on_invalid_response'] = False
response = self.udsclient.link_control(control_type=0x55)
self.assertFalse(response.valid)
def test_linkcontrol_wrongservice_exception(self):
self.wait_request_and_respond(b"\x7E\x22") # Valid but wrong service (Tester Present)
def _test_linkcontrol_wrongservice_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.udsclient.link_control(control_type=0x55)
def test_linkcontrol_wrongservice_no_exception(self):
self.wait_request_and_respond(b"\x7E\x22") # Valid but wrong service (Tester Present)
def _test_linkcontrol_wrongservice_no_exception(self):
self.udsclient.config['exception_on_unexpected_response'] = False
response = self.udsclient.link_control(control_type=0x55)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_linkcontrol_bad_control_type_exception(self):
self.wait_request_and_respond(b"\xC7\x08") # Valid but bad control type
def _test_linkcontrol_bad_control_type_exception(self):
with self.assertRaises(UnexpectedResponseException):
self.udsclient.link_control(control_type=0x55)
def test_linkcontrol_bad_control_type_no_exception(self):
self.wait_request_and_respond(b"\xC7\x08") # Valid but bad control type
def _test_linkcontrol_bad_control_type_no_exception(self):
self.udsclient.config['exception_on_unexpected_response'] = False
response = self.udsclient.link_control(control_type=0x55)
self.assertTrue(response.valid)
self.assertTrue(response.unexpected)
def test_bad_param(self):
pass
def _test_bad_param(self):
with self.assertRaises(ValueError):
self.udsclient.link_control(control_type='x')
with self.assertRaises(ValueError):
self.udsclient.link_control(control_type=0x80)
with self.assertRaises(ValueError):
self.udsclient.link_control(control_type=1) # Missing Baudrate
with self.assertRaises(ValueError):
self.udsclient.link_control(control_type=2) # Missing Baudrate
with self.assertRaises(ValueError):
self.udsclient.link_control(control_type=0, baudrate=Baudrate(500000)) # Baudrate is not needed
with self.assertRaises(ValueError):
self.udsclient.link_control(control_type=1, baudrate=1) # Baudrate should be Baudrate instance
with self.assertRaises(ValueError):
self.udsclient.link_control(control_type=1, baudrate='x') # Baudrate should be Baudrate instance
| [
"[email protected]"
]
| |
b812740b37900b34544b586bf0a3e7b1acb08cd5 | e88db6bb162aa9273b06c9a4359ed954c5a76964 | /spider_selenium/views/MyWin.py | b29a4e19aaafd6efe1699aaee7c7380e1963f642 | []
| no_license | konghirt/spider_mys | 38544396fb5c0197fa047bd300e2b8d77fd5c188 | 2b4595173271bfce214b22e2af69b6bf1e8bb744 | refs/heads/master | 2023-02-04T00:38:28.512146 | 2020-12-19T10:06:36 | 2020-12-19T10:06:36 | 317,728,675 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,738 | py | import sys
import os
from PyQt5.QtWidgets import QMainWindow, QApplication, QFileDialog
from PyQt5.QtCore import pyqtSlot
from datetime import datetime
import time
import requests
import re
from views.MainWindow import Ui_MainWindow
from utils.mihoyoEnum import *
from script.spider_mihoyo import SpiderMihoyo
class MainWin(QMainWindow, Ui_MainWindow):
# 初始化界面
def __init__(self):
self.spider_plate = '' # 板块
self.spider_type = '' # 类型
self.start_num = 1 # 开始张数
self.scroll_count = 0 # 滚动次数
self.cwd = os.getcwd() # 获取当前程序文件位置
self.spider = SpiderMihoyo() # 爬取类实例
super(MainWin, self).__init__()
self.setupUi(self)
self.init_data()
self.addEventListener()
self.show()
# 事件监听
def addEventListener(self):
self.plateBtn1.clicked.connect(self.plate_checked)
self.plateBtn2.clicked.connect(self.plate_checked)
self.plateBtn3.clicked.connect(self.plate_checked)
self.plateBtn4.clicked.connect(self.plate_checked)
self.typeBtn1.clicked.connect(self.type_checked)
self.typeBtn2.clicked.connect(self.type_checked)
self.typeBtn3.clicked.connect(self.type_checked)
self.typeBtn4.clicked.connect(self.type_checked)
self.startSpinBox.valueChanged.connect(self.spinBox_start)
self.countSpinBox.valueChanged.connect(self.spinBox_count)
self.pushButton.clicked.connect(self.confirm)
self.chooseFileBtn.clicked.connect(self.choose_dir)
# 执行
def confirm(self):
# 启动网页
self.log('打开浏览器...')
self.spider.start()
self.log('开始爬取...')
data_list = self.spider.data_spider(self.spider_plate, self.spider_type, self.scroll_count)
self.log('爬取完成,开始下载...')
desk = self.create_dir()
self.download(data_list, desk)
# 批量下载
def download(self, data_list, desk):
count = 0
for i, data in enumerate(data_list):
if i < self.start_num - 1:
continue
try:
img = data['src'].partition('?')[0]
suffix = re.findall(r'(.jpg|.jpeg|.png|.gif)$', img)[-1]
file = f'{desk}/{count + 1}{suffix}'
with open(file, 'wb') as f:
self.log(f'下载 {img} ...')
f.write(requests.get(img).content)
count = count + 1
time.sleep(0.5)
except Exception:
self.log(f'下载失败, {img}')
self.log(f'下载完成, 共下载{count}张图片')
# 选择目录
def choose_dir(self):
dir_choose = QFileDialog.getExistingDirectory(self, "选取文件夹", self.cwd) # 起始路径
if dir_choose == "":
return
self.saveEditText.setText(dir_choose)
# 创建目录
def create_dir(self):
desk = self.saveEditText.text()
if not os.path.isdir(desk):
os.makedirs(desk)
return desk
# 初始化数据
def init_data(self):
self.spinBox_start()
self.spinBox_count()
self.plate_checked()
self.type_checked()
# 开始位置
def spinBox_start(self):
self.start_num = self.startSpinBox.value()
# 滚动次数
def spinBox_count(self):
self.scroll_count = self.countSpinBox.value()
if self.scroll_count == '':
self.scroll_count = 0
# 板块事件
def plate_checked(self):
if self.plateBtn1.isChecked():
self.spider_plate = GameType.BH2.value
elif self.plateBtn2.isChecked():
self.spider_plate = GameType.BH3.value
elif self.plateBtn3.isChecked():
self.spider_plate = GameType.YS.value
elif self.plateBtn4.isChecked():
self.spider_plate = GameType.DBY.value
# 类型事件
def type_checked(self):
if self.typeBtn1.isChecked():
self.spider_type = SearchType.LATEST_REPLY.value
elif self.typeBtn2.isChecked():
self.spider_type = SearchType.LATEST_RELEASE.value
elif self.typeBtn3.isChecked():
self.spider_type = SearchType.HOT.value
elif self.typeBtn4.isChecked():
self.spider_type = SearchType.GOOD.value
# 在文本框打印
def log(self, str):
self.textEdit.append(str)
QApplication.processEvents() # 刷新界面
# 窗口关闭事件
def closeEvent(self,e):
self.spider.quit() | [
"[email protected]"
]
| |
ac07a53e15aef0bb493402d8d4c3712a747239bb | 3a6cbe6940b657ac6b608ce93d8d41ffeb6b9e65 | /rocon_python_comms/src/rocon_python_comms/service_pair_server.py | 8263080327cd5dc872bcd1d3fefb91715b3bd6bf | []
| no_license | robotics-in-concert/rocon_tools | cdfc4ccfc04b79262fb151640966a33bd0b5f498 | 1f182537b26e8622eefaf6737d3b3d18b1741ca6 | refs/heads/devel | 2021-01-17T01:58:12.163878 | 2018-02-06T15:20:29 | 2018-02-06T15:20:29 | 15,774,638 | 7 | 22 | null | 2017-08-16T06:39:47 | 2014-01-09T18:02:42 | Python | UTF-8 | Python | false | false | 6,195 | py | #
# License: BSD
# https://raw.github.com/robotics-in-concert/rocon_tools/license/LICENSE
#
##############################################################################
# Description
##############################################################################
"""
.. module:: service_pair_server
:platform: Unix
:synopsis: Server side api for communicating across a rocon service pair.
This module contains the server side api for communicating across a rocon
service pair. A `facade pattern`_ is used here to simplify the interaction with
the server side publisher and subscriber.
.. include:: weblinks.rst
----
"""
##############################################################################
# Imports
##############################################################################
import rospy
import threading
# Local imports
from .exceptions import ServicePairException
##############################################################################
# Server Class
##############################################################################
class ServicePairServer(object):
'''
The server side of a pubsub service pair. This class provides a simplified
api for handling requests/responses on the pubsub pair. There are two
modes of operation - 1) blocking and 2) threaded.
**Non-Threaded**
In the first, the users' callback function directly runs whenever an
incoming request is received. In this case, your callbacks should be
very minimal so that incoming requests don't get blocked and queued up.
.. code-block:: python
#!/usr/bin/env python
import rospy
from chatter.msg import ChatterRequest, ChatterResponse, ChatterPair
from rocon_python_comms import ServicePairServer
class ChatterServer(object):
def __init__(self):
self.server = ServicePairServer('chatter', self.callback, ChatterPair)
def callback(self, request_id, msg):
rospy.loginfo("Server : I heard %s" % msg.babble)
response = ChatterResponse()
response.reply = "I heard %s" % msg.babble
self.server.reply(request_id, response)
if __name__ == '__main__':
rospy.init_node('chatter_server', anonymous=True)
chatter_server = ChatterServer()
rospy.spin()
**Threaded**
In the second, we spawn a background thread and shunt the callback into this thread.
Just toggle the ``use_threads`` flag when constructing the server:
.. code-block:: python
self.server = ServicePairServer('chatter', self.callback, ChatterPair, use_threads=True)
'''
__slots__ = [
'_publisher',
'_subscriber',
'_callback',
'_use_threads',
#'_request_handlers', # initiate, track and execute requests with these { hex string ids : dic of RequestHandler objects (Blocking/NonBlocking) }
'ServicePairSpec',
'ServicePairRequest',
'ServicePairResponse',
]
##########################################################################
# Initialisation
##########################################################################
def __init__(self, name, callback, ServicePairSpec, use_threads=False, queue_size=5):
'''
:param str name: resource name of service pair (e.g. testies for pair topics testies/request, testies/response)
:param callback: function invoked when a request arrives
:param ServicePairSpec: the pair type (e.g. rocon_service_pair_msgs.msg.TestiesPair)
:param bool use_threads: put the callback function into a fresh background thread when a request arrives.
:param int queue_size: size of the queue to configure the publisher with.
'''
self._callback = callback
self._use_threads = use_threads
try:
p = ServicePairSpec()
self.ServicePairSpec = ServicePairSpec
"""Base message type for this pair."""
self.ServicePairRequest = type(p.pair_request)
"""Request msg type for this pair <ServicePairSpec>Request."""
self.ServicePairResponse = type(p.pair_response)
"""Response msg type for this pair <ServicePairSpec>Response."""
except AttributeError:
raise ServicePairException("Type is not an pair spec: %s" % str(ServicePairSpec))
self._subscriber = rospy.Subscriber(name + "/request", self.ServicePairRequest, self._internal_callback)
self._publisher = rospy.Publisher(name + "/response", self.ServicePairResponse, queue_size=queue_size)
##########################################################################
# Public Methods
##########################################################################
def reply(self, request_id, msg):
'''
Send a reply to a previously received request (identified by request_id). Use this
instead of writing directly to the publisher - just pass the content of the
response data and the id that was issued with the request.
:param uuid_msgs.UniqueID request_id: the request id to associate with this response.
:param ServiceResponse msg: the response
'''
pair_response = self.ServicePairResponse()
pair_response.id = request_id
pair_response.response = msg
self._publisher.publish(pair_response)
##########################################################################
# Callbacks
##########################################################################
def _internal_callback(self, msg):
'''
:param ServicePairRequest msg: message returned from the server (with pair id etc)
'''
# Check if it is a blocking call that has requested it.
if self._use_threads:
thread = threading.Thread(target=self._callback, args=(msg.id, msg.request))
thread.start()
else:
self._callback(msg.id, msg.request)
| [
"[email protected]"
]
| |
6e912f44afec75e11cbc6a6f8420b0786814f200 | 4e0f96da38a9b08164c1798e1f959287a67a8e76 | /Graph_Testing/plot_noise_data.py | a4a99c0f4ac78e03fbb15e0337154149af64e61a | []
| no_license | dlevenstein/SOSpikingModel_Nest | 899c090e6ee64df6d7e269191b00df038a5ab4ae | abeed3f70b87249acb0fa250a90112d0b933eb25 | refs/heads/master | 2021-06-13T19:57:32.250912 | 2017-03-09T23:42:49 | 2017-03-09T23:42:49 | 67,539,355 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 939 | py | import csv
import matplotlib.pyplot as plt
import numpy
csv_file = open("Noise_Data.csv", "rb")
data = csv.reader(csv_file, delimiter=",")
mean_spike_list = []
for line in data:
line_data = []
for l in line:
line_data.append(numpy.float64(l))
mean_spike_list.append(line_data)
print(mean_spike_list)
analysis_list = numpy.asarray(mean_spike_list)
#numpy.fliplr(analysis_list)
plt.figure("Mean Spike Rate for Noise")
heatmap = plt.imshow(analysis_list, cmap='plasma', interpolation='nearest', aspect='auto')
plt.xticks(range(0,60,10),range(0,600,100))
plt.yticks(range(0,12,2),range(0,120,20))
plt.xlabel("Noise Standard Deviation (pA)")
plt.ylabel("Synaptic Weight")
cbr = plt.colorbar(heatmap)
cbr.set_label("Neuron Spikes per sec")
plt.title("Mean Spike Rate for Noise")
#plt.gca().invert_xaxis()
plt.gca().invert_yaxis()
plt.savefig("noise_figure.png")
plt.rcParams.update({'font.size': 30})
plt.show()
| [
"[email protected]"
]
| |
29fd9997d18f422931eee8bd66197d7ff7608426 | 4ce7dffa293abec5127a1674df266f003fc92677 | /program-files/hauptfenster.py | f303236b96596989cb2e9e41533c75322443d1d5 | []
| no_license | kleintk/MeinTestRepo | ac2c723b97f6bf276ca051846c5ae31138fbe47c | 3df80f9cb446aa9420c8f160e43fe719f18375de | refs/heads/master | 2021-01-23T06:39:02.506774 | 2014-01-17T17:38:49 | 2014-01-17T17:38:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,187 | py | # -*- coding: utf-8 -*-
#gehoert zum programmAdressbuch.py
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import (QAction, QApplication, QFileDialog, QMainWindow, QMessageBox, QTextEdit) #hinzufuegen!--------------------
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(808, 621)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(MainWindow.sizePolicy().hasHeightForWidth())
MainWindow.setSizePolicy(sizePolicy)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("images/adressbook-32.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
MainWindow.setWindowIcon(icon)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.groupBox = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox.setGeometry(QtCore.QRect(230, 30, 541, 521))
self.groupBox.setObjectName("groupBox")
self.layoutWidget = QtWidgets.QWidget(self.groupBox)
self.layoutWidget.setGeometry(QtCore.QRect(30, 30, 481, 441))
self.layoutWidget.setObjectName("layoutWidget")
self.gridLayout = QtWidgets.QGridLayout(self.layoutWidget)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.label = QtWidgets.QLabel(self.layoutWidget)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.vornamelineEdit = QtWidgets.QLineEdit(self.layoutWidget)
self.vornamelineEdit.setObjectName("vornamelineEdit")
self.gridLayout.addWidget(self.vornamelineEdit, 0, 1, 1, 2)
self.label_2 = QtWidgets.QLabel(self.layoutWidget)
self.label_2.setObjectName("label_2")
self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1)
self.nachnamelineEdit = QtWidgets.QLineEdit(self.layoutWidget)
self.nachnamelineEdit.setObjectName("nachnamelineEdit")
self.gridLayout.addWidget(self.nachnamelineEdit, 1, 1, 1, 2)
self.label_4 = QtWidgets.QLabel(self.layoutWidget)
self.label_4.setObjectName("label_4")
self.gridLayout.addWidget(self.label_4, 2, 0, 1, 1)
self.emaillineEdit = QtWidgets.QLineEdit(self.layoutWidget)
self.emaillineEdit.setObjectName("emaillineEdit")
self.gridLayout.addWidget(self.emaillineEdit, 2, 1, 1, 2)
self.label_6 = QtWidgets.QLabel(self.layoutWidget)
self.label_6.setObjectName("label_6")
self.gridLayout.addWidget(self.label_6, 3, 0, 1, 1)
self.handylineEdit = QtWidgets.QLineEdit(self.layoutWidget)
self.handylineEdit.setObjectName("handylineEdit")
self.gridLayout.addWidget(self.handylineEdit, 3, 1, 1, 2)
self.label_5 = QtWidgets.QLabel(self.layoutWidget)
self.label_5.setObjectName("label_5")
self.gridLayout.addWidget(self.label_5, 4, 0, 1, 1)
self.tellineEdit = QtWidgets.QLineEdit(self.layoutWidget)
self.tellineEdit.setObjectName("tellineEdit")
self.gridLayout.addWidget(self.tellineEdit, 4, 1, 1, 2)
self.label_8 = QtWidgets.QLabel(self.layoutWidget)
self.label_8.setObjectName("label_8")
self.gridLayout.addWidget(self.label_8, 5, 0, 1, 1)
self.geburtstagdateEdit = QtWidgets.QDateEdit(self.layoutWidget)
self.geburtstagdateEdit.setObjectName("geburtstagdateEdit")
self.gridLayout.addWidget(self.geburtstagdateEdit, 5, 1, 1, 1)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout.addItem(spacerItem, 5, 2, 1, 1)
self.label_3 = QtWidgets.QLabel(self.layoutWidget)
self.label_3.setObjectName("label_3")
self.gridLayout.addWidget(self.label_3, 6, 0, 1, 1)
self.adressetextEdit = QtWidgets.QTextEdit(self.layoutWidget)
self.adressetextEdit.setObjectName("adressetextEdit")
self.gridLayout.addWidget(self.adressetextEdit, 6, 1, 1, 2)
self.label_7 = QtWidgets.QLabel(self.layoutWidget)
self.label_7.setObjectName("label_7")
self.gridLayout.addWidget(self.label_7, 7, 0, 1, 1)
self.notizentextEdit = QtWidgets.QTextEdit(self.layoutWidget)
self.notizentextEdit.setObjectName("notizentextEdit")
self.gridLayout.addWidget(self.notizentextEdit, 7, 1, 1, 2)
self.layoutWidget1 = QtWidgets.QWidget(self.groupBox)
self.layoutWidget1.setGeometry(QtCore.QRect(220, 480, 294, 25))
self.layoutWidget1.setObjectName("layoutWidget1")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.layoutWidget1)
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.hinzufuegenAbbrechenpushButton = QtWidgets.QPushButton(self.layoutWidget1)
self.hinzufuegenAbbrechenpushButton.setObjectName("hinzufuegenAbbrechenpushButton")
self.horizontalLayout_2.addWidget(self.hinzufuegenAbbrechenpushButton)
self.aenderungenSpeichernpushButton = QtWidgets.QPushButton(self.layoutWidget1)
self.aenderungenSpeichernpushButton.setObjectName("aenderungenSpeichernpushButton")
self.horizontalLayout_2.addWidget(self.aenderungenSpeichernpushButton)
self.hinzufuegenpushButton = QtWidgets.QPushButton(self.layoutWidget1)
self.hinzufuegenpushButton.setObjectName("hinzufuegenpushButton")
self.horizontalLayout_2.addWidget(self.hinzufuegenpushButton)
self.layoutWidget2 = QtWidgets.QWidget(self.centralwidget)
self.layoutWidget2.setGeometry(QtCore.QRect(20, 30, 171, 25))
self.layoutWidget2.setObjectName("layoutWidget2")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.layoutWidget2)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.auswahlcomboBox = QtWidgets.QComboBox(self.layoutWidget2)
self.auswahlcomboBox.setObjectName("auswahlcomboBox")
self.horizontalLayout.addWidget(self.auswahlcomboBox)
self.anzeigenpushButton = QtWidgets.QPushButton(self.layoutWidget2)
self.anzeigenpushButton.setObjectName("anzeigenpushButton")
self.horizontalLayout.addWidget(self.anzeigenpushButton)
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.toolBar = QtWidgets.QToolBar(MainWindow)
self.toolBar.setObjectName("toolBar")
MainWindow.addToolBar(QtCore.Qt.TopToolBarArea, self.toolBar)
self.actionNeu = QtWidgets.QAction(MainWindow, triggered=self.klickAufNeu) #hinzufuegen!!!! , triggered=self.klickAufNeu)--------------------
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap("images/new.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionNeu.setIcon(icon1)
self.actionNeu.setObjectName("actionNeu")
self.actionLoeschen = QtWidgets.QAction(MainWindow, triggered=self.klickAufLoeschen) #!---------------
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap("images/cut.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionLoeschen.setIcon(icon2)
self.actionLoeschen.setObjectName("actionLoeschen")
self.actionHinzufuegen = QtWidgets.QAction(MainWindow, triggered=self.listeSpeichern) #!------------------
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap("images/save.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionHinzufuegen.setIcon(icon3)
self.actionHinzufuegen.setObjectName("actionHinzufuegen")
self.actionLaden = QtWidgets.QAction(MainWindow, triggered=self.listeLaden) #!----------------------------------
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap("images/open.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionLaden.setIcon(icon4)
self.actionLaden.setObjectName("actionLaden")
self.toolBar.addAction(self.actionNeu)
self.toolBar.addSeparator()
self.toolBar.addAction(self.actionLaden)
self.toolBar.addAction(self.actionHinzufuegen)
self.toolBar.addSeparator()
self.toolBar.addAction(self.actionLoeschen)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Adressbuch"))
self.groupBox.setTitle(_translate("MainWindow", "Angaben"))
self.label.setText(_translate("MainWindow", "Vorname"))
self.label_2.setText(_translate("MainWindow", "Nachname"))
self.label_4.setText(_translate("MainWindow", "E-Mail"))
self.label_6.setText(_translate("MainWindow", "Handynummer"))
self.label_5.setText(_translate("MainWindow", "Telefonnummer"))
self.label_8.setText(_translate("MainWindow", "Geburtstag"))
self.label_3.setText(_translate("MainWindow", "Adresse"))
self.label_7.setText(_translate("MainWindow", "Notizen"))
self.hinzufuegenAbbrechenpushButton.setText(_translate("MainWindow", "Abbrechen"))
self.aenderungenSpeichernpushButton.setText(_translate("MainWindow", "Änderungen übernehmen"))
self.hinzufuegenpushButton.setText(_translate("MainWindow", "Hinzufügen"))
self.anzeigenpushButton.setText(_translate("MainWindow", "Anzeigen"))
self.toolBar.setWindowTitle(_translate("MainWindow", "toolBar"))
self.actionNeu.setText(_translate("MainWindow", "Neuer Eintrag"))
self.actionLoeschen.setText(_translate("MainWindow", "Loeschen"))
self.actionHinzufuegen.setText(_translate("MainWindow", "Speichern"))
self.actionLaden.setText(_translate("MainWindow", "Laden"))
| [
"[email protected]"
]
| |
75204bbfc5d050883078af710ce97469e69c1335 | a089fab4b0e363ba48bff57b3948c32172570e8f | /home_connect_sdk/models/__init__.py | 311a2dad6bac50ae69888c78797c9a6745803fa0 | []
| no_license | jeroenvdwaal/home-connect-sdk | ed2e44a01b72d64d17d41af8400eb2e42792232c | 3c0ab6791bb0e9df95154f8f177d889ebef0c749 | refs/heads/master | 2022-04-23T01:20:32.621570 | 2020-04-26T09:40:16 | 2020-04-26T09:40:16 | 255,988,008 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,060 | py | # coding: utf-8
# flake8: noqa
"""
Home Connect API
This API provides access to home appliances enabled by Home Connect (https://home-connect.com). Through the API programs can be started and stopped, or home appliances configured and monitored. For instance, you can start a cotton program on a washer and get a notification when the cycle is complete. To get started with this web client, visit https://developer.home-connect.com and register an account. An application with a client ID for this API client will be automatically generated for you. In order to use this API in your own client, you need an OAuth 2 client implementing the authorization code grant flow (https://developer.home-connect.com/docs/authorization/flow). More details can be found here: https://www.rfc-editor.org/rfc/rfc6749.txt Authorization URL: https://api.home-connect.com/security/oauth/authorize Token URL: https://api.home-connect.com/security/oauth/token # noqa: E501
The version of the OpenAPI document: 1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
# import models into model package
from home_connect_sdk.models.active_program_not_set_error import ActiveProgramNotSetError
from home_connect_sdk.models.array_of_available_programs import ArrayOfAvailablePrograms
from home_connect_sdk.models.array_of_available_programs_data import ArrayOfAvailableProgramsData
from home_connect_sdk.models.array_of_available_programs_data_constraints import ArrayOfAvailableProgramsDataConstraints
from home_connect_sdk.models.array_of_available_programs_data_programs import ArrayOfAvailableProgramsDataPrograms
from home_connect_sdk.models.array_of_events import ArrayOfEvents
from home_connect_sdk.models.array_of_events_items import ArrayOfEventsItems
from home_connect_sdk.models.array_of_home_appliances import ArrayOfHomeAppliances
from home_connect_sdk.models.array_of_home_appliances_data import ArrayOfHomeAppliancesData
from home_connect_sdk.models.array_of_home_appliances_data_homeappliances import ArrayOfHomeAppliancesDataHomeappliances
from home_connect_sdk.models.array_of_images import ArrayOfImages
from home_connect_sdk.models.array_of_images_data import ArrayOfImagesData
from home_connect_sdk.models.array_of_images_data_images import ArrayOfImagesDataImages
from home_connect_sdk.models.array_of_options import ArrayOfOptions
from home_connect_sdk.models.array_of_options_data import ArrayOfOptionsData
from home_connect_sdk.models.array_of_programs import ArrayOfPrograms
from home_connect_sdk.models.array_of_programs_data import ArrayOfProgramsData
from home_connect_sdk.models.array_of_programs_data_constraints import ArrayOfProgramsDataConstraints
from home_connect_sdk.models.array_of_programs_data_programs import ArrayOfProgramsDataPrograms
from home_connect_sdk.models.array_of_settings import ArrayOfSettings
from home_connect_sdk.models.array_of_settings_data import ArrayOfSettingsData
from home_connect_sdk.models.array_of_settings_data_settings import ArrayOfSettingsDataSettings
from home_connect_sdk.models.array_of_status import ArrayOfStatus
from home_connect_sdk.models.array_of_status_data import ArrayOfStatusData
from home_connect_sdk.models.command import Command
from home_connect_sdk.models.command_data import CommandData
from home_connect_sdk.models.command_definition import CommandDefinition
from home_connect_sdk.models.command_definition_data import CommandDefinitionData
from home_connect_sdk.models.conflict import Conflict
from home_connect_sdk.models.conflict_error import ConflictError
from home_connect_sdk.models.forbidden_error import ForbiddenError
from home_connect_sdk.models.get_setting import GetSetting
from home_connect_sdk.models.get_setting_data import GetSettingData
from home_connect_sdk.models.get_setting_data_constraints import GetSettingDataConstraints
from home_connect_sdk.models.home_appliance import HomeAppliance
from home_connect_sdk.models.home_appliance_data import HomeApplianceData
from home_connect_sdk.models.interal_server_error import InteralServerError
from home_connect_sdk.models.no_program_active_error import NoProgramActiveError
from home_connect_sdk.models.no_program_selected_error import NoProgramSelectedError
from home_connect_sdk.models.not_acceptable_error import NotAcceptableError
from home_connect_sdk.models.not_found_error import NotFoundError
from home_connect_sdk.models.option import Option
from home_connect_sdk.models.option_data import OptionData
from home_connect_sdk.models.program import Program
from home_connect_sdk.models.program_data import ProgramData
from home_connect_sdk.models.program_data_options import ProgramDataOptions
from home_connect_sdk.models.program_definition import ProgramDefinition
from home_connect_sdk.models.program_definition_data import ProgramDefinitionData
from home_connect_sdk.models.program_definition_data_constraints import ProgramDefinitionDataConstraints
from home_connect_sdk.models.program_definition_data_options import ProgramDefinitionDataOptions
from home_connect_sdk.models.program_not_available_error import ProgramNotAvailableError
from home_connect_sdk.models.put_setting import PutSetting
from home_connect_sdk.models.put_setting_data import PutSettingData
from home_connect_sdk.models.put_setting_data_constraints import PutSettingDataConstraints
from home_connect_sdk.models.request_timeout_error import RequestTimeoutError
from home_connect_sdk.models.selected_program_not_set_error import SelectedProgramNotSetError
from home_connect_sdk.models.status import Status
from home_connect_sdk.models.status_data import StatusData
from home_connect_sdk.models.too_many_requests_error import TooManyRequestsError
from home_connect_sdk.models.unauthorized_error import UnauthorizedError
from home_connect_sdk.models.unauthorized_error_error import UnauthorizedErrorError
from home_connect_sdk.models.unsupported_media_type_error import UnsupportedMediaTypeError
from home_connect_sdk.models.wrong_operation_state_error import WrongOperationStateError
| [
"[email protected]"
]
| |
25dd2d1ba4f36e6b14892f071c138d1675b53d69 | 241cfe341c3f661d95e78b628cb186f6d224401c | /models/order.py | 5e22ba0865fbe9ba3c0996c98f27f9ec5eb97984 | []
| no_license | gin0i/TradeServices | 8b559318eedacf71dbd665e917ea98442512fd15 | 9ef4bc4a1cb74b65f9dde1c70c7d80e903b4a719 | refs/heads/master | 2020-04-16T13:03:08.664071 | 2019-01-14T06:39:09 | 2019-01-14T06:39:09 | 165,608,566 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,043 | py | from sqlalchemy import Column, Integer, String, DateTime, Float, TIMESTAMP
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
from config import postgre
engine = create_engine('postgresql+psycopg2://'+postgre.user+':'+postgre.password+'@'+postgre.db, pool_recycle=3600)
Base = declarative_base()
class Order(Base):
__tablename__ = 'orders'
id = Column(Integer, primary_key=True)
exchange = Column(String)
symbol = Column(String)
fee = Column(Float)
side = Column(String)
price = Column(String)
datetime = Column(DateTime)
type = Column(String)
cost = Column(Float)
amount = Column(Float)
filled = Column(Float)
average = Column(Float)
remaining = Column(Float)
status = Column(String)
timestamp = Column(TIMESTAMP)
def __repr__(self):
return "<Position(symbol='%s', amount='%s', price='%s', datetime='%s', type='%s')>" % (self.symbol, self.amount, self.price, self.datetime, self.type)
Base.metadata.create_all(engine) | [
"[email protected]"
]
| |
a58d95ff10c64a986aefc8c3385661c66f156099 | 217817a8a701131ba911a2b27ef134f6fa90bbd1 | /routes.py | f57d5b050953c12771227c43dbb5f9824fcf5544 | []
| no_license | anemitz/sent | da17c4af6b2104f0042e9eeb14f0321c5c1d7d3a | 9cbfe07187bc2287177c8ed3ff57f9785892317e | refs/heads/master | 2020-05-20T19:31:38.109379 | 2015-04-10T06:39:10 | 2015-04-10T06:39:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,721 | py | import os
from flask import Flask, render_template, redirect, request, g, jsonify, url_for, Response
from flask.ext.sqlalchemy import SQLAlchemy
from sqlalchemy import and_, update, between
import model
from model import session
from datetime import datetime, date, timedelta
import json
import pytz
app = Flask(__name__)
#PAGE ROUTES
@app.route("/")
def index():
return render_template("index.html")
@app.route('/inbox/', defaults={'label':'all'})
@app.route('/inbox/<label>')
def show_inbox(label):
return render_template("inbox.html", label = label)
# SERVER API ENDPOINTS
@app.route('/sent/api/tickets/', methods=['POST'])
def update_ticket_sentiment():
data = request.form
target_sentiment = data['newSentiment']
changing_tickets = request.form.getlist('selections[]')
for ticket_num in changing_tickets:
session.query(model.Ticket).filter(model.Ticket.ticket_id == ticket_num).update({
"sentiment_label" : target_sentiment,
"update_date" : datetime.utcnow()
})
session.commit()
js = json.dumps({"status":"success"})
response = Response(js, mimetype='application/json', status = 200)
return response
@app.route('/sent/api/tickets/<label>/', methods=['GET'])
def tickets(label):
if request.method == 'GET':
page = int(request.args.get('page'))
display_qty = 20
query_qty = 21
if label == "all":
ticket_results = model.Ticket.query.order_by(model.Ticket.priority).order_by(model.Ticket.timestamp.desc()).offset((page - 1)*display_qty).limit(query_qty).all()
else:
ticket_results = model.Ticket.query.filter(model.Ticket.sentiment_label == label).order_by(model.Ticket.priority).order_by(model.Ticket.timestamp.desc()).offset((page - 1)*display_qty).limit(query_qty).all()
#to check to see if we will need another paginated page after this page
if len(ticket_results) > 20:
next_page = True
else:
next_page = False
json_results = []
cursor = page
for result in ticket_results[0:20]:
ticket_day = (result.timestamp - timedelta(hours = 7)).replace(hour = 0, minute = 0, second = 0, microsecond = 0)
today_day = datetime.now().replace(hour = 0, minute = 0, second = 0, microsecond = 0)
if ticket_day == today_day:
today = True
else:
today = False
d = {
'ticket_id': result.ticket_id,
'user_id': result.user_id,
'user_name': result.user.name,
'user_organization': result.user.organization_name,
'date': result.timestamp,
'subject': result.subject,
'content': result.content,
'status': result.status,
'source': result.source,
'sentiment': result.sentiment_label,
'today': today
}
json_results.append(d)
all_tickets = model.Ticket.list_all_tickets()
positive_tickets = []
upset_tickets = []
neutral_tickets = []
total_message_count = []
for ticket in all_tickets:
if ticket.sentiment_label == "positive":
positive_tickets.append(ticket)
elif ticket.sentiment_label == "upset":
upset_tickets.append(ticket)
elif ticket.sentiment_label == "neutral":
neutral_tickets.append(ticket)
if label == "positive":
sentiment_message_count = len(positive_tickets)
elif label == "upset":
sentiment_message_count = len(upset_tickets)
elif label == "neutral":
sentiment_message_count = len(neutral_tickets)
else:
sentiment_message_count = len(positive_tickets) + len(upset_tickets) + len(neutral_tickets)
total_message_count.append(len(upset_tickets))
total_message_count.append(len(neutral_tickets))
total_message_count.append(len(positive_tickets))
return jsonify(items=json_results, cursor = page, next_page = next_page, total_count = total_message_count, sentiment_count = sentiment_message_count)
@app.route('/sent/api/data/', methods=['GET'])
def counts():
time_period = request.args.get('time')
today = datetime.now()
today_for_query = datetime.utcnow()
# Query and collect data for each sentiment for the given time range
json_count_results = []
columns = []
source_data = {}
labels = ["upset", "neutral", "positive", "total"]
source_options = ["api", "twitter", "facebook"]
if time_period == "today":
datetime_threshold = (today_for_query - timedelta(hours = 24)).replace(minute = 0, second = 0, microsecond = 0)
#create a list of all hours in 24 to query for
datetime_points = []
hour = datetime_threshold - timedelta(hours=7)
this_hour = today.hour
for each_hour in range(24):
hour = hour + timedelta(hours = 1)
datetime_points.append(hour)
query_datetime_points = [(time + timedelta(hours=7)) for time in datetime_points]
x_axis = ['x'] + [d.strftime("%Y-%m-%d %H:%M:%S") for d in datetime_points]
columns.append(x_axis)
if time_period == "week":
datetime_threshold = (today_for_query - timedelta(days = 7)).replace(hour=0, minute = 0, second = 0, microsecond = 0)
#create a list of all days of a week to query for and display
datetime_points = []
day = (today - timedelta(days = 7)).replace(hour=0, minute = 0, second = 0, microsecond = 0)
for each_day in range(7):
day = day + timedelta(days = 1)
datetime_points.append(day)
query_datetime_points = datetime_points
x_axis = ['x'] + [d.strftime("%Y-%m-%d %H:%M:%S") for d in datetime_points]
columns.append(x_axis)
if time_period == "month":
datetime_threshold = (today_for_query - timedelta(days = 30)).replace(hour=0, minute = 0, second = 0, microsecond = 0)
#create a list of all days of a month to query for
datetime_points = []
day = (today - timedelta(days = 30)).replace(hour=0, minute = 0, second = 0, microsecond = 0)
for each_day in range(30):
day = day + timedelta(days = 1)
datetime_points.append(day)
query_datetime_points = datetime_points
x_axis = ['x'] + [d.strftime("%Y-%m-%d %H:%M:%S") for d in datetime_points]
columns.append(x_axis)
#create data points according to timeframe
all_tickets = model.Ticket.list_tickets(datetime_threshold)
positive_tickets = []
positive_sources = []
upset_tickets = []
upset_sources = []
neutral_tickets = []
neutral_sources = []
for ticket in all_tickets:
if time_period == "today":
date_cleaned = ticket.timestamp.replace(minute = 0, second = 0, microsecond = 0)
else:
date_cleaned = ticket.timestamp.replace(hour=0, minute = 0, second = 0, microsecond = 0)
if ticket.sentiment_label == "positive":
positive_tickets.append(date_cleaned)
positive_sources.append(ticket.source)
elif ticket.sentiment_label == "upset":
upset_tickets.append(date_cleaned)
upset_sources.append(ticket.source)
elif ticket.sentiment_label == "neutral":
neutral_tickets.append(date_cleaned)
neutral_sources.append(ticket.source)
#counts by label
upset_count = {'label':'upset', 'count':len(upset_tickets)}
json_count_results.append(upset_count)
neutral_count = {'label':'neutral', 'count':len(neutral_tickets)}
json_count_results.append(neutral_count)
pos_count = {'label':'positive', 'count':len(positive_tickets)}
json_count_results.append(pos_count)
#initialize data headers
positive_data_points = ["positive"]
upset_data_points = ["upset"]
neutral_data_points = ["neutral"]
#populate data points
for date_and_time in query_datetime_points:
count = positive_tickets.count(date_and_time)
positive_data_points.append(count)
for date_and_time in query_datetime_points:
count = upset_tickets.count(date_and_time)
upset_data_points.append(count)
for date_and_time in query_datetime_points:
count = neutral_tickets.count(date_and_time)
neutral_data_points.append(count)
columns.append(upset_data_points)
columns.append(neutral_data_points)
columns.append(positive_data_points)
#function to get ticket source breakdowns for pie charts
for label in labels:
all_source_data = []
for source in source_options:
if source == "api":
single_source_data = ["email"]
else:
single_source_data = [source]
if label == "positive":
single_source_data.append(positive_sources.count(source))
all_source_data.append(single_source_data)
elif label == "upset":
single_source_data.append(upset_sources.count(source))
all_source_data.append(single_source_data)
elif label == "neutral":
single_source_data.append(neutral_sources.count(source))
all_source_data.append(single_source_data)
elif label =="total":
single_source_data.append(positive_sources.count(source) + neutral_sources.count(source) + upset_sources.count(source))
all_source_data.append(single_source_data)
source_data[label] = all_source_data
return jsonify(time_period = time_period, counts=json_count_results, columns = columns, source_data = source_data)
if __name__ == "__main__":
app.run(port = 10000, debug = True)
| [
"[email protected]"
]
| |
4be1a74484158c614d1bc32607a891d1931f5e2c | 10f1620a694ba283ce64e16d40b77cf4b51e90a8 | /dataProcessor.py | 06d86d6c73f79801fecf2fad314c6a88d7c57db8 | []
| no_license | pandeyGCt/Reliablity-over-UDP | 429653b57a047c081f962b7639cbba0b3ebcaa7e | 1ab95ec21ccdc40c528a11ed7f587cbaf9dd4909 | refs/heads/main | 2023-06-07T19:29:06.340277 | 2021-06-28T16:37:33 | 2021-06-28T16:37:33 | 381,097,741 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,683 | py | '''
Saksham Pandey 2018A7PS0259H
Vanshaj Aggarwal 2018A7PS0309H
Arpit Adlakha 2018A7PS0250H
Surinder Singh Virk 2018A7PS0234H
Aditya Sharma 2018A7PS0315H
'''
import struct
import socket
from array import array
def myCheckSum(data):
if len(data) % 2:
data += b'\x00'
s = sum(array('H',data))
s = (s & 0xffff) + (s >> 16)
s += (s >> 16)
return socket.ntohs(~s & 0xffff)
def getFileData(name):
'''
This method gets the data and breaks it into chunks.
'''
try:
f=open(name,"rb")
file_data=f.read()
file_data_size=len(file_data)
pack_size=1000
data=[]
for i in range(0,file_data_size,pack_size):
if(file_data_size-i>pack_size):
data.append(file_data[i:i+pack_size])
else:
data.append(file_data[i:file_data_size])
return data
except IOError:
print("Filen not found or incorrect path")
finally:
print("EXIT")
def makePacketArr(name):
'''
This method creates a list containing packets to be sent.
'''
data=getFileData(name)
packet_array=[]
for i in range(0,len(data)):
packer = struct.Struct('I I {}s'.format(len(data[i])))
frame=(i,myCheckSum(data[i]+bytes(i)),data[i])
packet_array.append(packer.pack(*frame))
return packet_array
def convertString(seq,string):
'''
This method creates a given seq and string into a packet to be sent to the server
'''
string= string.encode('UTF-8')
packer = struct.Struct('I I {}s'.format(len(string)))
frame=(seq,myCheckSum(string),string)
d=packer.pack(*frame)
return d
def convertFilename(string):
string=string.encode('UTF-8')
packer=struct.Struct('I {}s'.format(len(string)))
frame=(myCheckSum(string),string)
d=packer.pack(*frame)
return d
| [
"[email protected]"
]
| |
54b9938a700d226aff9ff94b59ef75f55ad138e6 | 97dfcdff827fb2dd1d4cf60ebbc65bbffb0f3104 | /np2img.py | 16d9510521c141ed31e665e182b85404fad99150 | []
| no_license | erynet/NpyTool | 25e4fb8292b819979661780a9087c1bc3ab49135 | 255ba11a7a8a65418daa13aebb426e491b0ece73 | refs/heads/master | 2020-12-31T07:33:12.306949 | 2016-05-02T09:32:47 | 2016-05-02T09:32:47 | 56,485,952 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,401 | py | # -*- coding:utf-8 -*-
import sys, os
import multiprocessing
import imp
imp.reload(sys)
sys.setdefaultencoding("utf-8")
import time
import argparse
try:
import ujson as json
except ImportError:
import pip
pip.main(["install", "ujson==1.35"])
import ujson as json
from lib import CatParser, Log
class Np2Img(object):
def __init__(self, input_cat, output_path, worker_process_count, io_thread_per_worker, \
output_image_format, output_image_colorspace, buffer_size, logger):
try:
self.c = CatParser(input_cat).export()
except Exception, e:
print str(e)
raise
self.source_path, _ = os.path.split(input_cat)
self.np_files = self.c["np_files"]
self.output_path = output_path
if worker_process_count > 0:
self.worker_process_count = worker_process_count
else:
self.worker_process_count = multiprocessing.cpu_count()
self.io_thread_per_worker = io_thread_per_worker
self.output_image_format = output_image_format
self.output_image_colorspace = output_image_colorspace
self.buffer_size = buffer_size
self.l = logger
self.worker_pool = []
# self.consumer = None
def _stand_by(self):
# 미리 타겟 폴더들을 다 만든다.
self.l.i("[Np2Img] Stand by ... !!")
self.target_paths = []
for rel_path in self.c["dict_src_paths"]:
abs_path = os.path.join(self.output_path, rel_path)
# print self.out_path, rel_path, abs_path
self.target_paths.append(abs_path)
try:
if os.path.exists(abs_path):
continue
os.makedirs(abs_path, mode=755)
self.l.i("[Np2Img] Directory Created, %s" % (abs_path,))
except Exception, e:
self.l.i("[Np2Img] Directory Creation failed, reason : %s" % (e.message,))
continue
def _clean_up(self):
# self.consumer.join()
for p in self.worker_pool:
p.join()
self.l.i("[Np2Img] Cleaning Ok")
def export(self):
def make_chunk(l, n):
k, m = len(l) / n, len(l) % n
return (l[i * k + min(i, m):(i + 1) * k + min(i + 1, m)] for i in xrange(n))
from lib import Np2ImgWorker
self._stand_by()
luggage = []
for i in range(self.c["catalog"].__len__()):
luggage.append((i, self.c["catalog"][i]))
worker_kwargs = {"source_path": self.source_path,
"np_files": self.np_files,
"base_path": self.output_path,
"rel_paths": self.target_paths,
"filenames": self.c["dict_filenames"],
"augmentaion_flags": self.c["dict_augmentation_flags"],
"io_thread_count": self.io_thread_per_worker,
"logger": self.l,
# "compressed": self.c["compress"],
"image_format": self.output_image_format,
"colorspace": self.output_image_colorspace,
"luggage": None,
"entry_per_file": self.c["entry_per_file"],
"instance_index": 0}
idx = 0
for l in (l for l in make_chunk(luggage, self.worker_process_count)):
worker_kwargs["luggage"] = l
worker_kwargs["instance_index"] = idx
self.worker_pool.append(multiprocessing.Process(target=Np2ImgWorker, kwargs=worker_kwargs))
idx += 1
for p in self.worker_pool:
p.start()
self._clean_up()
if __name__ == "__main__":
ap = argparse.ArgumentParser(prog="img2np")
file_group = ap.add_argument_group("Essential")
file_group.add_argument("-i", "--input_cat", type=str, default=None)
file_group.add_argument("-o", "--output_path", type=str, default=None)
ap.add_argument("-w", "--worker_process_count", type=int, default=0)
ap.add_argument("-t", "--io_thread_per_worker", type=int, default=2)
ap.add_argument("-f", "--output_image_format", type=str, default="PNG")
ap.add_argument("-c", "--output_image_colorspace", type=str, default="L")
ap.add_argument("-b", "--buffer_size", type=int, default=65536)
# ap.add_argument("-a", "--augmentation", type=str, default="")
# ap.add_argument("-pp", "--postprocessing", type=str, default="")
ap.add_argument("-l", "--logto", type=str, default="np2img.log")
ap.add_argument("-L", "--loglevel", type=str, default="INFO")
args = ap.parse_args()
if not args.input_cat or not args.output_path:
ap.print_help()
sys.exit(0)
if args.output_image_colorspace not in ("L", "RGB", "CMYK"):
print "colorspace is must one of `L, RGB, CMYK`"
sys.exit(0)
try:
with Log(args.logto, True, args.loglevel) as L:
N2I = Np2Img(args.input_cat, args.output_path, args.worker_process_count, args.io_thread_per_worker, \
args.output_image_format, args.output_image_colorspace, args.buffer_size, L)
N2I.export()
except KeyboardInterrupt as e:
# killing Signal
pass
# np2img.py --input_cat g:\\t01.cat --output_path g:\\outimg
| [
"[email protected]"
]
| |
3e865ff8ba54efeccf0945858bdb43e9be54a743 | 837762524db70b805fbf46f62a14be32e32dabd9 | /scripts/train.py | df35cdfb4e3c068ebba443e31d700f6c49358b2b | [
"Apache-2.0"
]
| permissive | jordancaraballo/nga-deep-learning | 832e54afb978a84875d1c09a7c00055e698f2a7b | 752266ccc06efacdef2423214998ecfced7eafb7 | refs/heads/master | 2023-06-30T14:39:49.448265 | 2021-07-27T20:00:52 | 2021-07-27T20:00:52 | 343,627,410 | 23 | 4 | null | null | null | null | UTF-8 | Python | false | false | 5,992 | py | # --------------------------------------------------------------------------
# Preprocessing and dataset creation from NGA data. This assumes you provide
# a configuration file with required parameters and files.
# --------------------------------------------------------------------------
import os # system modifications
import sys # system modifications
import time # tracking time
import numpy as np # for arrays modifications
import cupy as cp # for arrays modifications
import tensorflow as tf # deep learning framework
from core.unet import unet_batchnorm # unet network to work with
from core.utils import get_training_dataset # getting training dataset
from core.utils import get_tensorslices # getting tensor slices
from core.utils import gen_callbacks # generate callbacks
# tensorflow imports
# from tensorflow.keras.mixed_precision import experimental as mixed_precision
from tensorflow.keras import mixed_precision
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.optimizers import Adadelta
# define configuration object
from config import Config
config = Config.Configuration()
__author__ = "Jordan A Caraballo-Vega, Science Data Processing Branch"
__email__ = "[email protected]"
__status__ = "Development"
# Define some environment variables to help refining randomness.
# Note: there might still be some randomness since most of the code
# is ran on GPU and sometimes parallelization brings changes.
np.random.seed(config.SEED)
tf.random.set_seed(config.SEED)
cp.random.seed(config.SEED)
print(f"Tensorflow ver. {tf.__version__}")
# verify GPU devices are available and ready
os.environ['CUDA_VISIBLE_DEVICES'] = config.CUDA
devices = tf.config.list_physical_devices('GPU')
assert len(devices) != 0, "No GPU devices found."
# ------------------------------------------------------------------
# System Configurations
# ------------------------------------------------------------------
if config.MIRROR_STRATEGY:
strategy = tf.distribute.MirroredStrategy()
print('Multi-GPU enabled')
if config.MIXED_PRECISION:
policy = mixed_precision.Policy('mixed_float16')
mixed_precision.set_global_policy(policy)
print('Mixed precision enabled')
if config.XLA_ACCELERATE:
tf.config.optimizer.set_jit(True)
print('Accelerated Linear Algebra enabled')
# Disable AutoShard, data lives in memory, use in memory options
options = tf.data.Options()
options.experimental_distribute.auto_shard_policy = \
tf.data.experimental.AutoShardPolicy.OFF
# ---------------------------------------------------------------------------
# script train.py
# ---------------------------------------------------------------------------
def main():
# Main function to collect configuration file and run the script
print(f'GPU REPLICAS: {strategy.num_replicas_in_sync}')
t0 = time.time()
print(f'Train dir: {config.TRAIN_DATADIR}')
print(f'Validation dir: {config.VAL_DATADIR}')
# Initialize Callbacks
callbacks = gen_callbacks(config, config.CALLBACKS_METADATA)
# open files and get dataset tensor slices
train_images, train_labels = get_tensorslices(
data_dir=config.TRAIN_DATADIR, img_id='x', label_id='y'
)
# open files and get dataset tensor slices
val_images, val_labels = get_tensorslices(
data_dir=config.VAL_DATADIR, img_id='x', label_id='y'
)
# extract values for training
NUM_TRAINING_IMAGES = train_images.shape[0]
NUM_VALIDATION_IMAGES = val_images.shape[0]
STEPS_PER_EPOCH = NUM_TRAINING_IMAGES // config.BATCH_SIZE
print(f'{NUM_TRAINING_IMAGES} training images')
print(f'{NUM_VALIDATION_IMAGES} validation images')
# generate training dataset
train_dataset = \
tf.data.Dataset.from_tensor_slices((train_images, train_labels))
# generate validation dataset
val_dataset = tf.data.Dataset.from_tensor_slices((val_images, val_labels))
val_dataset = val_dataset.batch(config.VAL_BATCH_SIZE)
# Create model output directory
os.system(f'mkdir -p {config.MODEL_SAVEDIR}')
# Initialize and compile model
with strategy.scope():
# initialize UNet model
model = unet_batchnorm(
nclass=config.N_CLASSES, input_size=config.INPUT_SIZE,
maps=config.MODEL_METADATA['network_maps']
)
# initialize optimizer, exit of not valid optimizer
if config.MODEL_METADATA['optimizer_name'] == 'Adadelta':
optimizer = Adadelta(lr=config.MODEL_METADATA['lr'])
elif config.MODEL_METADATA['optimizer_name'] == 'Adam':
optimizer = Adam(lr=config.MODEL_METADATA['lr'])
else:
sys.exit('Optimizer provided is not supported.')
# enabling mixed precision to avoid underflow
optimizer = mixed_precision.LossScaleOptimizer(optimizer)
# compile model to start training
model.compile(
optimizer,
loss=config.MODEL_METADATA['loss'],
metrics=config.MODEL_METADATA['metrics']
)
model.summary()
# Disable AutoShard, data lives in memory, use in memory options
train_dataset = train_dataset.with_options(options)
val_dataset = val_dataset.with_options(options)
# Train the model and save to disk
model.fit(
get_training_dataset(
train_dataset,
config,
do_aug=config.MODEL_METADATA['do_aug']
),
initial_epoch=config.START_EPOCH,
epochs=config.N_EPOCHS,
steps_per_epoch=STEPS_PER_EPOCH,
validation_data=val_dataset,
callbacks=callbacks,
verbose=2
)
print(f'Execution time: {time.time() - t0}')
# -------------------------------------------------------------------------------
# main
# -------------------------------------------------------------------------------
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
b67056872a7437bd215bbd55010776a5e3c4c513 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/DECHUB900-HRIP-MIB-V3-0.py | 4affb4dd03a0dfee8d6e74ef3a888a878b9e33bf | [
"Apache-2.0",
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 11,491 | py | #
# PySNMP MIB module DECHUB900-HRIP-MIB-V3-0 (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/DECHUB900-HRIP-MIB-V3-0
# Produced by pysmi-0.3.4 at Wed May 1 12:37:38 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsIntersection, SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsUnion")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, enterprises, Counter32, IpAddress, NotificationType, Counter64, TimeTicks, ModuleIdentity, Unsigned32, Integer32, Gauge32, MibIdentifier, ObjectIdentity, iso, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "enterprises", "Counter32", "IpAddress", "NotificationType", "Counter64", "TimeTicks", "ModuleIdentity", "Unsigned32", "Integer32", "Gauge32", "MibIdentifier", "ObjectIdentity", "iso", "Bits")
TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString")
dec = MibIdentifier((1, 3, 6, 1, 4, 1, 36))
ema = MibIdentifier((1, 3, 6, 1, 4, 1, 36, 2))
decMIBextension = MibIdentifier((1, 3, 6, 1, 4, 1, 36, 2, 18))
decHub900 = MibIdentifier((1, 3, 6, 1, 4, 1, 36, 2, 18, 11))
mgmtAgent = MibIdentifier((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1))
mgmtAgentVersion1 = MibIdentifier((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1))
hrip = MibIdentifier((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2))
hripPubRingCfgTable = MibTable((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 1), )
if mibBuilder.loadTexts: hripPubRingCfgTable.setStatus('mandatory')
if mibBuilder.loadTexts: hripPubRingCfgTable.setDescription('Defines a table for ring speeds. The table has 2 rows. Row 1 defines ring speed for ring A and row 2 defines the ring speed for ring B.')
hripPubRingCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 1, 1), ).setIndexNames((0, "DECHUB900-HRIP-MIB-V3-0", "hripRingCfgIndex"))
if mibBuilder.loadTexts: hripPubRingCfgEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hripPubRingCfgEntry.setDescription('An entry in the hripPubRingCfgTable.')
hripRingCfgIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ringA", 1), ("ringB", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hripRingCfgIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hripRingCfgIndex.setDescription('Identifies the ring being accessed ie the row of the table being referred to.')
hripRingCfgSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2, 3))).clone(namedValues=NamedValues(("speed4", 2), ("speed16", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hripRingCfgSpeed.setStatus('mandatory')
if mibBuilder.loadTexts: hripRingCfgSpeed.setDescription('The speed of each of the token rings on the backplane. speed4(1) indicates a speed of 4 Mbits per second while speed16(2) indicates 16 Mbits per second. The value of this object is maintained across power cycles and resets.')
hripPubSlotCfgTable = MibTable((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 2), )
if mibBuilder.loadTexts: hripPubSlotCfgTable.setStatus('mandatory')
if mibBuilder.loadTexts: hripPubSlotCfgTable.setDescription('Defines a table for Slot Configurations. Each row in the table corresponds to a backplane slot (hripSlotIndex).')
hripPubSlotCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 2, 1), ).setIndexNames((0, "DECHUB900-HRIP-MIB-V3-0", "hripSlotCfgIndex"))
if mibBuilder.loadTexts: hripPubSlotCfgEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hripPubSlotCfgEntry.setDescription('An entry in the hripPubSlotCfgTable.')
hripSlotCfgIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hripSlotCfgIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hripSlotCfgIndex.setDescription('Index into the table of slot configurations.')
hripSlotCfgDisable = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("enabled-1", 1), ("disabled-1", 2), ("enabled-2", 3), ("disabled-4", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hripSlotCfgDisable.setStatus('mandatory')
if mibBuilder.loadTexts: hripSlotCfgDisable.setDescription('Locks out the corresponding backplane port in that slot. -2 is used for linecards like the MIPPY that have multiple physical token ring backplane ports. The default setting is enable (for ports 1 & 2) The value of this object is maintained across power cycles and resets.')
hripSlotCfgForce = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 2, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("noForce-1", 1), ("forceRingA-1", 2), ("forceRingB-1", 3), ("noForce-2", 4), ("forceRingA-2", 5), ("forceRingB-2", 6)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: hripSlotCfgForce.setStatus('mandatory')
if mibBuilder.loadTexts: hripSlotCfgForce.setDescription('Describes a slot/ring pairing. -2 is used for linecards like the MIPPY that have multiple physical token ring backplane ports. The value of this object is maintained across power cycles and resets.')
hripPubRingStatTable = MibTable((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 3), )
if mibBuilder.loadTexts: hripPubRingStatTable.setStatus('mandatory')
if mibBuilder.loadTexts: hripPubRingStatTable.setDescription('A table describing the number of modules on each ring.')
hripPubRingStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 3, 1), ).setIndexNames((0, "DECHUB900-HRIP-MIB-V3-0", "hripRingStatIndex"))
if mibBuilder.loadTexts: hripPubRingStatEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hripPubRingStatEntry.setDescription('An entry describing the number of modules on each ring.')
hripRingStatIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ringA", 1), ("ringB", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hripRingStatIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hripRingStatIndex.setDescription('An index into the hripPubRingStatTable.')
hripRingStatNumModInserted = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hripRingStatNumModInserted.setStatus('mandatory')
if mibBuilder.loadTexts: hripRingStatNumModInserted.setDescription('The number of modules inserted onto the ring.')
hripPubSlotStatTable = MibTable((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 4), )
if mibBuilder.loadTexts: hripPubSlotStatTable.setStatus('mandatory')
if mibBuilder.loadTexts: hripPubSlotStatTable.setDescription('The status of modules inserted on each slot of backplane.')
hripPubSlotStatEntry = MibTableRow((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 4, 1), ).setIndexNames((0, "DECHUB900-HRIP-MIB-V3-0", "hripSlotStatIndex"))
if mibBuilder.loadTexts: hripPubSlotStatEntry.setStatus('mandatory')
if mibBuilder.loadTexts: hripPubSlotStatEntry.setDescription('An entry in the hripPubSlotStatTable.')
hripSlotStatIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 8))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hripSlotStatIndex.setStatus('mandatory')
if mibBuilder.loadTexts: hripSlotStatIndex.setDescription('The index into slot status table.')
hripSlotStatRingAInsertCount = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 4, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hripSlotStatRingAInsertCount.setStatus('mandatory')
if mibBuilder.loadTexts: hripSlotStatRingAInsertCount.setDescription('The number of times that the module has transitioned between inserted/wrapped states on backplane ring A, since the module was last reset/power-cycled.')
hripSlotStatRingBInsertCount = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 4, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: hripSlotStatRingBInsertCount.setStatus('mandatory')
if mibBuilder.loadTexts: hripSlotStatRingBInsertCount.setDescription('The number of times that the module has transitioned between inserted/wrapped states on backplane ring B, since the module was last reset/power-cycled.')
hripSlotStatTcuA = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 4, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("inserted", 1), ("wrapped", 2), ("notTR", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hripSlotStatTcuA.setStatus('mandatory')
if mibBuilder.loadTexts: hripSlotStatTcuA.setDescription('Status of the TCU on ring A. If there is a non Token Ring linecard plugged into the hub, the value reported should be nonTR(3). For a Token Ring line-card the value is inserted or wrapped')
hripSlotStatTcuB = MibTableColumn((1, 3, 6, 1, 4, 1, 36, 2, 18, 11, 1, 1, 2, 4, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("inserted", 1), ("wrapped", 2), ("notTR", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: hripSlotStatTcuB.setStatus('mandatory')
if mibBuilder.loadTexts: hripSlotStatTcuB.setDescription('Status of the TCU on ring B. If there is a non Token Ring linecard plugged into the hub, the value reported should be nonTR(3). For a Token Ring line-card the value is inserted or wrapped ')
mibBuilder.exportSymbols("DECHUB900-HRIP-MIB-V3-0", hripRingStatIndex=hripRingStatIndex, hripRingCfgIndex=hripRingCfgIndex, hripPubSlotStatTable=hripPubSlotStatTable, decMIBextension=decMIBextension, hripPubSlotStatEntry=hripPubSlotStatEntry, mgmtAgentVersion1=mgmtAgentVersion1, hripRingStatNumModInserted=hripRingStatNumModInserted, dec=dec, hripPubRingStatTable=hripPubRingStatTable, hrip=hrip, hripSlotStatRingAInsertCount=hripSlotStatRingAInsertCount, hripSlotStatTcuB=hripSlotStatTcuB, mgmtAgent=mgmtAgent, hripSlotStatIndex=hripSlotStatIndex, ema=ema, hripSlotCfgDisable=hripSlotCfgDisable, hripRingCfgSpeed=hripRingCfgSpeed, hripSlotStatRingBInsertCount=hripSlotStatRingBInsertCount, hripPubSlotCfgEntry=hripPubSlotCfgEntry, hripSlotCfgForce=hripSlotCfgForce, hripPubRingStatEntry=hripPubRingStatEntry, decHub900=decHub900, hripPubRingCfgEntry=hripPubRingCfgEntry, hripSlotStatTcuA=hripSlotStatTcuA, hripPubSlotCfgTable=hripPubSlotCfgTable, hripSlotCfgIndex=hripSlotCfgIndex, hripPubRingCfgTable=hripPubRingCfgTable)
| [
"[email protected]"
]
| |
e22da16a3630862721200de043c23202f838489d | e906fe8237e5b55b7bef1f7a87884c5924ccd8b1 | /contactmps/migrations/0024_committee.py | b8cb77c7198df94f7b6f8955173bff4743b0fb99 | [
"MIT"
]
| permissive | OpenUpSA/contact-mps | ac9a88ef166769d6305e213f3d77191f385c962a | 63d7f86e1b6c9319a4d0344a6125cd22770f34c7 | refs/heads/master | 2022-12-11T07:22:20.942567 | 2020-01-15T13:11:59 | 2020-01-15T13:11:59 | 93,042,651 | 0 | 2 | MIT | 2022-12-08T02:08:08 | 2017-06-01T09:52:56 | JavaScript | UTF-8 | Python | false | false | 894 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2018-05-14 15:42
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contactmps', '0023_campaign_include_link_in_email'),
]
operations = [
migrations.CreateModel(
name='Committee',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=300, unique=True)),
('slug', models.CharField(max_length=300, unique=True)),
('email_address', models.CharField(max_length=255)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
],
),
]
| [
"[email protected]"
]
| |
3732f226a3b4b5005ef4762aeffa21e675e5ee05 | 7443848b72277645c3c0cc9e88e404961a41daf5 | /stream/rtsp_player.py | 0042aecddc4d0e7774e2ae43a0459c63f57def82 | []
| no_license | alborzjafari/RTSP_Player | f57e2329c0b5d183e5dcd346d0e7084da1dc8a01 | e3c5525c8562d544b47254c53c5a116f9f13d9a5 | refs/heads/main | 2023-03-05T10:21:16.098756 | 2021-02-13T21:25:33 | 2021-02-13T21:25:33 | 338,671,725 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,664 | py | import gi
gi.require_version('Gst', '1.0')
gi.require_version('GstVideo', '1.0')
from gi.repository import Gst, GstVideo, GstBase
Gst.init(None)
from . import utils
class RtspPlayer:
"""
Streams media from RTSP source.
This class can display media from the source and take snapshot.
"""
def __init__(self, window_id, url):
self.window_id = window_id
self.url = url
self.buffer_map = None
self.player = None
self.info_handler = None
self.setup_pipeline()
def setup_pipeline(self):
"""
Create pipeline string and launch it.
"""
pipeline = "rtspsrc protocols=tcp location={} name=src src. ! "\
"application/x-rtp, media=(string)audio"" !"\
"decodebin ! audioconvert ! pulsesink src. !"\
"application/x-rtp, media=(string)video"" !"\
"decodebin ! tee name=t ! queue ! videoconvert !"\
"videoscale ! queue ! jpegenc ! "\
"appsink name=sink t.! queue ! "\
"autovideosink".format(self.url)
self.player = Gst.parse_launch(pipeline)
bus = self.player.get_bus()
bus.enable_sync_message_emission()
bus.connect('sync-message::element', self.on_sync_message)
bus.add_signal_watch()
bus.connect('message', self.on_watch_message)
def set_info_event_handler(self, handler):
"""
Sets handler to notify media information.
"""
self.info_handler = handler
def on_watch_message(self, bus, msg):
"""
This method calls when streaming started.
"""
msg_struct = msg.get_structure()
if msg_struct:
if msg_struct.get_name() == 'GstMessageTag':
codec_name = ((msg_struct["taglist"].nth_tag_name(0)))
codec_value = msg_struct["taglist"].get_string(codec_name)
info_name = codec_name
c_result, info_value = codec_value
if c_result:
self.info_handler(info_name, info_value)
if codec_name == "video-codec":
self.info_handler(codec_name, info_value)
r_result, width, height = self.get_resolution()
if r_result:
info_name = "resolution"
info_value = "[{}x{}]".format(width, height)
self.info_handler(info_name, info_value)
bus.remove_signal_watch()
def on_sync_message(self, _, msg):
"""
This method when sync message arrives and prepares the window handler
for displaying properly on GUI.
"""
message_name = msg.get_structure().get_name()
print(message_name)
if message_name == "prepare-window-handle":
win_id = self.window_id
assert win_id
imagesink = msg.src
imagesink.set_window_handle(win_id)
def stop(self):
"""
Stops the player and destroys the player object.
"""
self.set_state_null()
self.player = None
def play(self):
"""
Plays the video.
"""
self.player.set_state(Gst.State.PLAYING)
def pause(self):
"""
Pauses the video.
"""
self.player.set_state(Gst.State.PAUSED)
def get_resolution(self):
"""
Used for getting media resolution using a sample from stream.
"""
ret_val = False
width = 0
height = 0
try:
sink = self.player.get_by_name('sink')
sample = GstBase.BaseSink.get_last_sample(sink)
caps = Gst.Sample.get_caps(sample)
struct = Gst.Caps.get_structure(caps, 0)
h_result, height = Gst.Structure.get_int(struct, "height")
w_result, width = Gst.Structure.get_int(struct, "width")
if h_result and w_result:
ret_val = True
except:
ret_val = False
return ret_val, width, height
def take_snapshot(self, path, file_name):
"""
Take snapshot from last sample.
path: path for storing image file.
file_name: image file name.
"""
pipelie_state = self.player.get_state(1)
p_state = pipelie_state.state
if p_state not in (Gst.State.PLAYING, Gst.State.PAUSED):
print("Stream is not ready")
else:
try:
sink = self.player.get_by_name('sink')
sample = GstBase.BaseSink.get_last_sample(sink)
image_buffer = Gst.Sample.get_buffer(sample)
buffer_map = Gst.Buffer.map(image_buffer, Gst.MapFlags.READ)
image_binary_data = bytearray(buffer_map.info.data)
utils.store_image(image_binary_data, path, file_name + ".jpeg")
except:
print("Capturing image failed.")
def set_state_null(self):
self.player.set_state(Gst.State.NULL)
def get_url(self):
"""
Gets current URL
"""
return self.url
def set_url(self, source_url):
"""
Set streaming source URL.
source_url: The URL of streaming source, the initial value is:
"rtsp://wowzaec2demo.streamlock.net/vod/mp4:BigBuckBunny_115k.mov"
"""
if utils.validate_url(source_url, "rtsp"):
self.url = source_url
self.set_state_null()
self.setup_pipeline()
self.play()
else:
print("Invalid URL")
| [
"[email protected]"
]
| |
09076400f64c690beed02e2cada86f7b9df12e19 | 4efe1c3417294a1b47da0827e1b7d071622874ef | /day12/solution.py | 33cb61ba888dfb4f694197efd20057f8df73dd16 | []
| no_license | kmamykin/adventofcode2019 | e73017d94904d8dccdd99f45a9ed37b04071cfc2 | 9815a62212df4885a2a7185abef298101eaa29f1 | refs/heads/master | 2020-09-25T03:45:11.854490 | 2020-01-05T08:27:24 | 2020-01-05T08:27:24 | 225,910,212 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,843 | py | import math
from dataclasses import dataclass
from enum import Enum
from itertools import combinations
from functools import reduce
from datetime import datetime
@dataclass
class Point3D:
x: int
y: int
z: int
def __add__(self, other):
return Point3D(self.x + other.x, self.y + other.y, self.z + other.z)
def __sub__(self, other):
return Point3D(self.x - other.x, self.y - other.y, self.z - other.z)
def __mul__(self, other: int):
return Point3D(self.x * other, self.y * other, self.z * other)
def __repr__(self):
return f"<x={self.x: >3}, y={self.y: >3}, z={self.z: >3}>"
def __hash__(self):
return hash((self.x, self.y, self.z))
def P(x, y, z) -> Point3D:
return Point3D(x, y, z)
class Moons(Enum):
Io = 0
Europa = 1
Ganymede = 2
Callisto = 3
@dataclass
class MoonState:
position: Point3D
velocity: Point3D
def __repr__(self):
return f"pos={self.position}, vel={self.velocity}"
def __hash__(self):
return hash((self.position, self.velocity))
def potential_energy(self):
return abs(self.position.x) + abs(self.position.y) + abs(self.position.z)
def kinetic_energy(self):
return abs(self.velocity.x) + abs(self.velocity.y) + abs(self.velocity.z)
def total_energy(self):
return self.potential_energy() * self.kinetic_energy()
def apply_forces(self, forces: [Point3D]):
total_force = reduce(lambda s, e: s + e, forces, P(0, 0, 0))
velocity = self.velocity + total_force
position = self.position + velocity
return MoonState(position, velocity)
@dataclass
class SystemState:
moons: (MoonState, MoonState, MoonState, MoonState)
@classmethod
def from_positions(cls, positions: [Point3D]):
return SystemState(tuple([MoonState(p, P(0,0,0)) for p in positions]))
def __repr__(self):
return " ".join([f"{m}" for m in self.moons])
def __hash__(self):
return hash(self.moons)
def potential_energy(self):
return sum([m.potential_energy() for m in self.moons])
def kenetic_energy(self):
return sum([m.kinetic_energy() for m in self.moons])
def total_energy(self):
return sum([m.total_energy() for m in self.moons])
def simulate_step(state: SystemState):
forces = {m: [] for m in Moons}
for m1, m2 in combinations(Moons, 2):
g1, g2 = gravity(state.moons[m1.value].position, state.moons[m2.value].position)
forces[m1].append(g1)
forces[m2].append(g2)
# debug here
# force = lambda fs: reduce(lambda s, e: s + e, fs, P(0, 0, 0))
# print([force(forces[m]) for m in Moons])
return SystemState(tuple([state.moons[m.value].apply_forces(forces[m]) for m in Moons]))
def gravity(pos1: Point3D, pos2: Point3D) -> (Point3D, Point3D):
def diff(v1, v2):
if v1 < v2:
return 1
elif v1 > v2:
return -1
else:
return 0
return P(diff(pos1.x, pos2.x), diff(pos1.y, pos2.y), diff(pos1.z, pos2.z)), P(diff(pos2.x, pos1.x), diff(pos2.y, pos1.y), diff(pos2.z, pos1.z))
def simulate_steps(state: SystemState, number_of_stemps: int, debug = False):
for i in range(number_of_stemps):
if debug:
print(f"After {i} stop")
print(state)
state = simulate_step(state)
if debug:
print(f"After {number_of_stemps} stop")
print(state)
return state
def system_period(state: SystemState, max_steps = None) -> int:
step = 1
starting_state = state
print(f"pot={state.potential_energy()}, kin={state.kenetic_energy()}, total={state.total_energy()}")
state = simulate_step(state)
while (state != starting_state) and (max_steps is None or step <= max_steps):
print(f"pot={state.potential_energy()}, kin={state.kenetic_energy()}, total={state.total_energy()}")
state = simulate_step(state)
step += 1
if step % 1000000 == 0:
print(f"Step {step} at {datetime.now()}")
print(f"pot={state.potential_energy()}, kin={state.kenetic_energy()}, total={state.total_energy()}")
return step
# # EXAMPLE 1
# # <x=-1, y=0, z=2>
# # <x=2, y=-10, z=-7>
# # <x=4, y=-8, z=8>
# # <x=3, y=5, z=-1>
# example1 = SystemState.from_positions([
# P(-1, 0, 2),
# P(2, -10, -7),
# P(4, -8, 8),
# P(3, 5, -1)
# ])
# example1_copy = SystemState.from_positions([
# P(-1, 0, 2),
# P(2, -10, -7),
# P(4, -8, 8),
# P(3, 5, -1)
# ])
# print(hash(example1))
# assert hash(example1) == hash(example1_copy)
# assert example1 == example1_copy
# assert example1 in {example1_copy}
# assert example1_copy in {example1}
# assert example1 in {example1, example1_copy}
# assert len({example1, example1_copy}) == 1
# example1_2772 = simulate_steps(example1, 2772)
# # print(example1)
# # print(example1_2772)
# assert example1 == example1_2772
# assert hash(example1) == hash(example1_2772)
# # assert simulate_steps(example1, 10, debug = False).total_energy() == 179
# print(system_period(example1))
# # assert system_period(example1) == 2772
#
# # EXAMPLE 2
# # <x=-8, y=-10, z=0>
# # <x=5, y=5, z=10>
# # <x=2, y=-7, z=3>
# # <x=9, y=-8, z=-3>
# example2 = SystemState.from_positions([
# P(-8, -10, 0),
# P(5, 5, 10),
# P(2, -7, 3),
# P(9, -8, -3)
# ])
# # assert simulate_steps(example2, 100, debug = False).total_energy() == 1940
# # assert system_period(example2) == 4686774924
#
# # INPUT 1
# # <x=-3, y=10, z=-1>
# # <x=-12, y=-10, z=-5>
# # <x=-9, y=0, z=10>
# # <x=7, y=-5, z=-3>
#
# problem1 = SystemState.from_positions([
# P(-3, 10, -1),
# P(-12, -10, -5),
# P(-9, 0, 10),
# P(7, -5, -3)
# ])
#
# # print(simulate_steps(problem1, 1000, debug = False).total_energy())
#
# # INPUT 2
# # <x=-3, y=10, z=-1>
# # <x=-12, y=-10, z=-5>
# # <x=-9, y=0, z=10>
# # <x=7, y=-5, z=-3>
# problem2 = SystemState.from_positions([
# P(-3, 10, -1),
# P(-12, -10, -5),
# P(-9, 0, 10),
# P(7, -5, -3)
# ])
# # print(system_period(problem2, max_steps=100))
def gravity_1d(pos1, pos2):
def diff(v1, v2):
if v1 < v2:
return 1
elif v1 > v2:
return -1
else:
return 0
return diff(pos1, pos2), diff(pos2, pos1)
def simulate_step_1d(positions, velocities):
new_velocities = velocities.copy()
for i1, i2 in combinations(range(len(positions)), 2):
g1, g2 = gravity_1d(positions[i1], positions[i2])
new_velocities[i1] += g1
new_velocities[i2] += g2
new_positions = [positions[i] + new_velocities[i] for i in range(len(positions))]
return new_positions, new_velocities
def find_system_period_1d(starting_positions):
starting_velocities = [0 for p in starting_positions]
positions = starting_positions
velocities = starting_velocities
step = 0
while True:
positions, velocities = simulate_step_1d(positions, velocities)
step += 1
# if step % 1 == 0:
# print(f"Step {step}: pos={positions} vel={velocities}")
if positions == starting_positions and velocities == starting_velocities:
break
return step
def lcm(v1, v2, v3):
"""
Least common multiple
:param v1:
:param v2:
:param v3:
:return:
"""
lcm12 = abs(v1 * v2) // math.gcd(v1, v2)
return abs(lcm12 * v3) // math.gcd(lcm12, v3)
assert lcm(1, 2, 3) == 6
assert lcm(7, 11, 13) == 7*11*13
x_period = find_system_period_1d([-3, -12, -9, 7])
y_period = find_system_period_1d([10, -10, 0, -5])
z_period = find_system_period_1d([-1, -5, 10, -3])
print(x_period)
print(y_period)
print(z_period)
print(f"Finally!: {lcm(x_period, y_period, z_period)}")
# # <x=-3, y=10, z=-1>
# # <x=-12, y=-10, z=-5>
# # <x=-9, y=0, z=10>
# # <x=7, y=-5, z=-3>
| [
"[email protected]"
]
| |
81f7ea6ba42d4ee18d39650fa7de3474ed999af4 | f656bae2f0fbfe58a980612729b7a54dba7b9873 | /4. Información_celular.py | 0f5d1b0dd232c2ebad3d7871a9446fcbd3368c91 | []
| no_license | neamedina73/Ejercicios_java | a5715ed4deb638c0e0e700f02aee8bd7a24b596e | 427cf54d241945d15a1f05bfbffcdcb4431d86de | refs/heads/main | 2023-05-31T18:24:12.873844 | 2021-07-03T00:30:34 | 2021-07-03T00:30:34 | 382,456,483 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,146 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Jun 9 09:39:41 2021
@author: Alejandro AJ
"""
class celular:
def __init__(self, marca, modelo, tamaño, color, peso):
self.marca = marca
self.modelo = modelo
self.tamaño = tamaño
self.color = color
self.peso = peso
def gama(self):
print('Su celular es de gama alta.')
def estado(self):
print('Su celular se encuentra en perfecto estado')
def precio(self):
if self.peso > 200:
print(f'su celular {micelu.marca} es pesado.')
else:
print(f'su celular {micelu.marca} es liviano.')
micelu = celular("Iphone","11 PRO", "7 pulgadas", "gris", 130) # Instanciando la clase celular()
print(micelu.marca) #Imprimir el atributo "marca" del objeto "celular"
print(micelu.modelo) #Imprimir el atributo "modelo" del objeto "celular"
print(micelu.tamaño) #Imprimir el atributo "tamaño" del objeto "celular"
print(micelu.color) #Imprimir el atributo "color" del objeto "celular"
print(micelu.peso) | [
"[email protected]"
]
| |
0972afda3f044c8377dad065401dd2eaa43d2c89 | 67977fcf332b3a05cf35ce618046f3add2b772e3 | /new_mysite/wsgi.py | a25b977e44ad23f8ba6e0b729dd82ddf3a58ba81 | []
| no_license | finafily526/web_demo_django | d164c055455217df360b973660649493574e1fa4 | 9ab0d9433410f70391363cf96a3259967fa45db7 | refs/heads/master | 2020-06-27T22:37:49.472230 | 2019-08-02T01:43:46 | 2019-08-02T01:43:46 | 200,071,124 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 398 | py | """
WSGI config for new_mysite project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "new_mysite.settings")
application = get_wsgi_application()
| [
"[email protected]"
]
| |
8da660f070b07b415bccb53e3e24e567578e7032 | c7dd8ff40f2df2adf10a48e3a9de1009bdce228e | /findAndCopyScript.py | 70f3e6f54c758dcdbff038fd5396170a16e0c86e | []
| no_license | KateKapranova/Scripts | 1dec6a83beba56120c22b60c8e2b49e6ea4623f1 | 567b144af9eb202af82036b6fd08e4ed6e66cba6 | refs/heads/master | 2020-05-01T02:05:31.013713 | 2019-03-24T21:21:07 | 2019-03-24T21:21:07 | 177,210,335 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 658 | py | #script to iterate over files in subdirectories to find specific file names
#files satisfying the search query are copied into the destination folder
#for Windows OS
import os
from shutil import copy
directory="C:\\Users\\Kapranova\\Documents\\KDEF_and_AKDEF\\KDEF"
dst= "C:\\Users\\Kapranova\\Documents\\KDEF_and_AKDEF\\KDEF\\Front"
for subdir in os.listdir(directory):
if subdir != "Front":
for file in os.listdir(os.path.join(directory,subdir)):
if file.endswith("S.JPG"):
source=os.path.join(directory,subdir,file)
copy(source,dst)
else:
continue
| [
"[email protected]"
]
| |
e13a56c1c1103c1f2e330d3e963d68ad05997a4f | eb0c5ccf942ab3b6b137b0e7dd531d2c96f83756 | /1_pop_randomly_connected.py | de68f467c7c7a95a8fe411b2a6e45760d103eaf0 | [
"MIT"
]
| permissive | adrian-tymorek/NEURON_simulations_2019-01 | b336f4fa1c8ca83877a6214c7f45ea6c37bbe986 | 04de515d80b519e2e6f99ac7f0b7d4ec25c68c31 | refs/heads/master | 2020-12-10T15:58:55.673150 | 2020-01-14T01:31:27 | 2020-01-14T01:31:27 | 233,639,482 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,074 | py | import numpy as np
import random
import matplotlib.pyplot as plt
# For 3D plotting of cell positions
from mpl_toolkits.mplot3d import Axes3D
# Neuron-specific
from neuron import h, gui
from neuron.units import ms, mV
h.load_file('stdrun.hoc')
from populations import RandomPopulation
# In[]
# Create a population
excitatory = RandomPopulation(N=200, N_interconnect=0.1, probability_tau=20, syn_w=0.00043/(0.1 * 200))
excitatory.add_current_random_cells(delay=50, duration=500, amp=0.04)
vvec = h.Vector().record(excitatory.cells[0].soma(0.5)._ref_v)
t = h.Vector().record(h._ref_t)
h.finitialize(-65 * mV)
h.continuerun(700 * ms)
fig = plt.figure()
plt.plot(t, vvec, ':')
plt.title('Single neuron activity')
plt.xlabel('t (ms)')
plt.ylabel('V$_m$ (mV)')
plt.show()
# In[]
# Plot activity of whole population
plt.figure("Population avtivity")
for i, cell in enumerate(excitatory.cells):
try:
plt.vlines(cell.spike_times, i - 0.45, i + 0.45)
except:
pass
plt.title('Populaton activity')
plt.xlabel('t (ms)')
plt.ylabel('neuron id')
plt.show()
| [
"[email protected]"
]
| |
1a2e459516d17e76c8e8efc32ea7d2d1db94e6e0 | 55b514a04ee65b5369e4b1ec4a7b5e14fb673e80 | /tests/project/views.py | 5b2b24761de964cf05b4dee8c22cf043ae149255 | [
"MIT"
]
| permissive | maximilianhurl/bravado-django-test-client | 04bf8ad27f89f0cddeba18a5baaa0441604b0959 | f2c0eb21403bdda5906df3b5a84bc5b8b963ee51 | refs/heads/master | 2022-06-17T04:44:19.929932 | 2021-01-21T23:10:31 | 2021-01-21T23:10:31 | 171,361,600 | 3 | 0 | MIT | 2022-04-22T21:01:54 | 2019-02-18T21:53:32 | Python | UTF-8 | Python | false | false | 1,255 | py | from uuid import uuid4
from django.http import JsonResponse
from rest_framework.decorators import api_view
from rest_framework.response import Response
def get_params_view(request):
return JsonResponse({
"paramInt": int(request.GET.get('paramInt', None)),
"paramString": request.GET.get('paramString', None)
})
cat_1 = {
"id": "1",
"name": "Tar tar sauce",
"color": "Brown",
"age": 1,
"friends": [],
}
cat_2 = {
"id": "2",
"name": "Minky",
"color": "White",
"age": 2,
"friends": [cat_1],
}
cat_3 = {
"id": "3",
"name": "Minky",
"color": "White",
"age": 1,
"friends": [],
}
dog = {
"id": uuid4(),
"name": "fido"
}
@api_view(['GET', 'POST'])
def cats_list(request):
if request.method == 'POST':
return Response(request.data, status=201)
return Response({
"count": None,
"CAT_HEADER": request.META.get('CAT_HEADER', None),
"results": [cat_1, cat_2, cat_3]
}, status=200)
@api_view(['PUT'])
def cat_update(request, cat_name):
return Response({
**request.data,
"name": cat_name,
}, status=200)
@api_view(['GET'])
def dog_detail(request, dog_name):
return Response(dog, status=200)
| [
"[email protected]"
]
| |
f4d1e396f6b6423541a2c069f47460728dcacd1c | d9f065b24e5387ee71bd236e1dff298dfa02f60c | /nba/features.py | d8d34041a2ced3d6e09ee0f4f589e019e1a8e3b0 | []
| no_license | laimis/gamepredictions | 40972ca04f8161d7a6769bd99c06b0d1a9c06db3 | fb4bb312fa4160602b38336f1b3b0b8c03f88f9d | refs/heads/master | 2020-04-01T06:49:18.483387 | 2019-06-09T01:30:12 | 2019-06-09T01:30:12 | 152,965,209 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,368 | py | import nba.domain as domain
from typing import List
tracked_stats = ["wins", "scored", "allowed", "date", "fg%", "tp%", "ft%", "rebs", "assists", "turnovers", "streak"]
GAMES_TO_ROLL = 20
STREAK_NUMBER = 10
def get_label_column_names():
return ["year", "date", "counter", "away", "home", "home_win"]
def get_feature_column_names():
return ["away_pct", "home_pct", "away_diff", "home_diff", "away_tpm", "home_tpm", "away_todiff", "home_todiff", "away_rebs", "home_rebs", "away_streak", "home_streak"]
def get_line_column_names():
return ["line_team", "line_spread", "spread_correct", "spread_covered"]
def get_data_header():
combined = get_label_column_names() + get_feature_column_names() + get_line_column_names()
return ",".join(combined)
def to_stats_home(rd:domain.NBAGame):
for_streak = rd.home_win
if for_streak == 0:
for_streak = -1
return [
rd.home_win,
rd.home_pts,
rd.away_pts,
rd.date,
rd.home_fgm/rd.home_fga,
rd.home_tpm/rd.home_tpa,
rd.home_ftm/rd.home_fta,
rd.home_oreb + rd.home_dreb,
rd.home_assists,
rd.home_turnovers,
for_streak,
]
def to_stats_away(rd:domain.NBAGame):
if rd.away_fga == 0:
print("zero fga",rd.date,rd.away_pts,rd.home_pts)
for_streak = 1 - rd.home_win
if for_streak == 0:
for_streak = -1
return [
1 - rd.home_win,
rd.away_pts,
rd.home_pts,
rd.date,
rd.away_fgm/rd.away_fga,
rd.away_tpm/rd.home_tpa,
rd.away_ftm/rd.away_ftm,
rd.away_oreb + rd.away_dreb,
rd.away_assists,
rd.away_turnovers,
for_streak,
]
def add_to_stats(stats, rd):
def add_to_stats_internal(stats, team, to_add):
if team not in stats:
stats[team] = dict([(x,[]) for x in tracked_stats])
for idx,s in enumerate(tracked_stats):
stats[team][s].append(to_add[idx])
add_to_stats_internal(stats, rd.home, to_stats_home(rd))
add_to_stats_internal(stats, rd.away, to_stats_away(rd))
def calc_features(stats, game_info):
home_pct, home_pts, home_allowed, home_games, home_fg, home_tp, home_ft, home_rebs, home_assists, home_turnovers, home_streak = __calc_features__(stats, game_info.home, game_info.date)
away_pct, away_pts, away_allowed, away_games, away_fg, away_tp, away_ft, away_rebs, away_assists, away_turnovers, away_streak = __calc_features__(stats, game_info.away, game_info.date)
return [
away_pct,
home_pct,
away_pts - away_allowed,
home_pts - home_allowed,
away_tp,
home_tp,
away_assists - away_turnovers,
home_assists - home_turnovers,
away_rebs,
home_rebs,
away_streak,
home_streak
]
def number_of_games_within_date(dates, date, number_of_days):
number_of_games = 0
# print(dates)
for d in dates[-5:]:
if (date - d).days <= number_of_days:
number_of_games += 1
return number_of_games
def __calc_features__(stats, team, date):
def do_calculation(team_stats, stat):
if stat == "date":
return number_of_games_within_date(team_stats[stat], date, 1)
if stat == "streak":
streak = 0
recent_outcomes = team_stats[stat][-STREAK_NUMBER:]
recent_outcomes.reverse()
current = recent_outcomes[0]
for w in recent_outcomes:
if w == current:
streak = streak + current
else:
break
return streak
return sum(team_stats[stat][-GAMES_TO_ROLL:]) / len(team_stats[stat][-GAMES_TO_ROLL:])
team_stats = stats[team]
return tuple([do_calculation(team_stats, x) for x in tracked_stats]) | [
"[email protected]"
]
| |
8d838ad1b17dd0480a189e316ae027e1fd5cb5b4 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /LR98GCwLGYPSv8Afb_1.py | 51545319e53c872c2a1520d669972a99be80e25f | []
| no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 77 | py |
def pluralize(lst):
return {i+'s' if lst.count(i)>1 else i for i in lst}
| [
"[email protected]"
]
| |
8fa3d155dfb3ad674ecdfb942b55ba95f138f59b | f1efa3f00d06a12e4bce5ac8742dbe45a82d8553 | /MarkovModel.py | 6d62ff7cd5d4793627aceef34b8fe720772324c0 | []
| no_license | kd536/HPM573S18_DESOBRY_HW10- | 655ac942f00ea221181e069ce0f4835a0472d8e2 | 9b9e643700b292e626f1b494767a50bbbe3f32b5 | refs/heads/master | 2020-03-11T11:55:17.486633 | 2018-04-18T03:46:57 | 2018-04-18T03:46:57 | 129,982,493 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,071 | py | import scr.SamplePathClasses as PathCls
import scr.StatisticalClasses as StatCls
import scr.RandomVariantGenerators as rndClasses
import ParameterClasses as P
import InputData as Data
# patient class simulates patient, patient monitor follows patient, cohort simulates a cohort,
# cohort outcome extracts info from simulation and returns it back
class Patient: # when you store in self then all the things in that class have access to it
def __init__(self, id, parameters):
""" initiates a patient
:param id: ID of the patient
:param parameters: parameter object
"""
self._id = id
# random number generator
self._rng = None
# parameters
self._param = parameters
# state monitor
self._stateMonitor = PatientStateMonitor(parameters)
# simulate time step
self._delta_t = parameters.get_delta_t() # length of time step!
def simulate(self, sim_length):
""" simulate the patient over the specified simulation length """
# random number generator for this patient
self._rng = rndClasses.RNG(self._id) # from now on use random number generator from support library
k = 0 # current time step
# while the patient is alive and simulation length is not yet reached
while self._stateMonitor.get_if_alive() and k*self._delta_t < sim_length:
# find transition probabilities of future state
trans_prob = self._param.get_transition_prob(self._stateMonitor.get_current_state())
# create an empirical distribution
empirical_dist = rndClasses.Empirical(trans_prob)
# sample from the empirical distribution to get a new state
# (return an intger from {0, 1, 2, ...}
new_state_index = empirical_dist.sample(self._rng) # pass RNG
# update health state
self._stateMonitor.update(k, P.HealthStats(new_state_index))
# increment time step
k += 1
def get_survival_time(self):
""" returns the patient's survival time"""
return self._stateMonitor.get_survival_time()
def get_number_of_strokes(self):
""" returns the patient's time to the POST_STROKE state """
return self._stateMonitor.get_num_of_STROKE()
class PatientStateMonitor:
""" to update patient outcomes (years survived, cost, etc.) throughout the simulation """
def __init__(self, parameters):
"""
:param parameters: patient parameters
"""
# current health state
self._currentState = parameters.get_initial_health_state()
self._delta_t = parameters.get_delta_t()
self._survivalTime = 0
self._ifDevelopedStroke = False
self._strokecount = 0
def update(self, k, next_state):
"""
:param k: current time step
:param next_state: next state
"""
# updates state of patient
# if the patient has died, do nothing
if not self.get_if_alive():
return
# update survival time
if next_state is P.HealthStats.DEATH:
self._survivalTime = (k+0.5) * self._delta_t # k is number of steps its been, delta t is length of time
# step, the 0.5 is a half cycle correction
if self._currentState == P.HealthStats.STROKE:
self._ifDevelopedStroke = True
self._strokecount += 1
self._currentState = next_state
def get_if_alive(self):
result = True
if self._currentState == P.HealthStats.DEATH:
result = False
return result
def get_current_state(self):
return self._currentState
def get_survival_time(self):
""" returns the patient survival time """
# return survival time only if the patient has died
if not self.get_if_alive():
return self._survivalTime
else:
return None
def get_num_of_STROKE(self):
return self._strokecount
class Cohort:
def __init__(self, id, therapy):
""" create a cohort of patients
:param id: an integer to specify the seed of the random number generator
"""
self._initial_pop_size = Data.POP_SIZE
self._patients = [] # list of patients
# populate the cohort
for i in range(self._initial_pop_size):
# create a new patient (use id * pop_size + i as patient id)
patient = Patient(id * self._initial_pop_size + i, P.ParametersFixed(therapy))
# add the patient to the cohort
self._patients.append(patient)
def simulate(self):
""" simulate the cohort of patients over the specified number of time-steps
:returns outputs from simulating this cohort
"""
# simulate all patients
for patient in self._patients:
patient.simulate(Data.SIM_LENGTH)
# return the cohort outputs
return CohortOutputs(self)
def get_initial_pop_size(self):
return self._initial_pop_size
def get_patients(self):
return self._patients
class CohortOutputs:
def __init__(self, simulated_cohort):
""" extracts outputs from a simulated cohort
:param simulated_cohort: a cohort after being simulated
"""
self._survivalTimes = [] # patients' survival times
self._times_to_Stroke = [] # patients' times to stroke
self._count_strokes = []
# survival curve
self._survivalCurve = \
PathCls.SamplePathBatchUpdate('Population size over time', id, simulated_cohort.get_initial_pop_size())
# find patients' survival times
for patient in simulated_cohort.get_patients():
# get the patient survival time
survival_time = patient.get_survival_time()
if not (survival_time is None):
self._survivalTimes.append(survival_time) # store the survival time of this patient
self._survivalCurve.record(survival_time, -1) # update the survival curve
count_strokes = patient.get_number_of_strokes()
self._count_strokes.append(count_strokes)
# summary statistics
self._sumStat_survivalTime = StatCls.SummaryStat('Patient survival time', self._survivalTimes)
self._sumState_number_strokes = StatCls.SummaryStat('Time until stroke', self._count_strokes)
def get_if_developed_stroke(self):
return self._count_strokes
def get_survival_times(self):
return self._survivalTimes
def get_sumStat_survival_times(self):
return self._sumStat_survivalTime
def get_survival_curve(self):
return self._survivalCurve
def get_sumStat_count_strokes(self):
return self._sumState_number_strokes
| [
"[email protected]"
]
| |
419a6d2869390efda11068d061ee37ee55733dac | f9fcdc80c634d651bde314820c0d3286e5743f4f | /python/day5.py | e6f30b88af83f775f5a473b7632e9d26bad17306 | []
| no_license | TamTran72111/aoc-2020 | a5073a40f85f988f1f8d86ccbbde95f3059ad0d2 | 960889b52ef4f55f5773f1939b9c509e80928374 | refs/heads/main | 2023-02-16T21:07:08.657125 | 2020-12-25T15:50:03 | 2020-12-25T15:50:03 | 320,088,818 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 789 | py | from utilities import read_lines
binary_values = {
'F': '0',
'B': '1',
'L': '0',
'R': '1'
}
def convert_id(boarding_pass):
for key, value in binary_values.items():
boarding_pass = boarding_pass.replace(key, value)
row = int(boarding_pass[:7], 2)
col = int(boarding_pass[7:], 2)
return row * 8 + col
def part_1(ids):
return max(ids)
def part_2(ids):
ids = sorted(ids)
for i in range(len(ids) - 1):
if ids[i] + 1 != ids[i+1]:
return ids[i] + 1
raise Exception("This is unreachable")
if __name__ == '__main__':
print('Day 5')
data = read_lines('../inputs/day5.txt')
ids = list(map(convert_id, data))
print('\tPart 1: {}'.format(part_1(ids)))
print('\tPart 2: {}'.format(part_2(ids)))
| [
"[email protected]"
]
| |
83abbe58f6fc2055852b647c54c5920e08777d4d | c214a3d6fbfddcb5473e4499b948e24c367f6746 | /Cesear.py | ef4797181e8b727e7ce883f4c4b52fded4e00460 | []
| no_license | kamouzougan/Cybersecurity | ff2d8df944e2f3513bd4dbd84a5e4e1612dac29e | c6b1a2c21753d1f1a07fb1a1ace3c64cbbe41205 | refs/heads/master | 2020-05-05T08:54:43.947649 | 2020-04-09T11:30:46 | 2020-04-09T11:30:46 | 179,883,232 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,497 | py | MAX_KEY_SIZE = 26
def getMode():
while True :
print("Do you wish to encrypt or decrypt a message?")
mode = input().lower()
if mode in 'encrypt e decrypt d'.split():
return mode
else :
print('Enter either "encrypt" or "e" or "decrypt" or "d".')
def getMessage():
data = open("plaintext.txt","r")
if data.mode == "r" :
contents = data.read()
print(contents)
return contents
def getKey():
key = 0
while True:
print('Enter the key number (1-%s)' % (MAX_KEY_SIZE))
key = int(input())
if (key >= 1 and key <= MAX_KEY_SIZE):
return key
def getTranslatedMessage(mode, message, key):
if mode[0] == 'd':
key = -key
translated = ''
for symbol in message:
if symbol.isalpha():
num = ord(symbol)
num += key
if symbol.isupper():
if num > ord('Z'):
num -= 26
elif num < ord('A'):
num += 26
elif symbol.islower():
if num > ord('z'):
num -= 26
elif num < ord('a'):
num += 26
translated += chr(num)
else:
translated += symbol
return translated
mode = getMode()
message = getMessage()
key = getKey()
print('Your translated text is:')
print(getTranslatedMessage(mode, message, key))
| [
"[email protected]"
]
| |
6a05561304bd78df0efc71b62b3659469610fd24 | f38193df76e7f86ad4017ec62dd7c90ce92e9b91 | /_src/om2py3w/3wex0/diary-server.py | 20b7585eed6f3dee62bf2c7213284c338396cdce | [
"MIT"
]
| permissive | isynch/OMOOC2py | dcf54f9d2012d018c3b280d28d65058e6ae1dc08 | cc7fafc106b56553306988d07f0a4ab61bc39201 | refs/heads/master | 2020-04-25T23:30:15.410512 | 2015-12-05T07:31:56 | 2015-12-05T07:31:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,220 | py | # -*- coding: utf-8 -*-
from datetime import datetime
import socket
import sys
HOST = '' # Symbolic name meaning all available interfaces
PORT = 1234 # Arbitary non-privileged port
# Datagram(udp) socket
try:
s=socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
print '创建中...'
except socket.error, msg:
print 'Failed to create socket. Error Code : '+str(msg[0])+' Message ' +msg[1]
sys.exit()
# Bind socket to local host and port
try:
s.bind((HOST,PORT))
except socket.error, msg:
print 'Bind failed. Error Code: '+str(msg[0])+' Message '+msg[1]
sys.exit()
print '连接中...'
#now keep taling with the client
while 1:
# receive data from client(data, addr)
d=s.recvfrom(1024)
data=d[0]
addr=d[1]
if not data:
break
today=datetime.now()
diary=data.strip()
print diary
diaryFile = open('diary.txt','a')
diaryFile.write('\n'+today.strftime("%y/%m/%d")+' client['+str(addr[1])+'] '+ diary)
diaryFile.close()
diaryFile = open('diary.txt')
diary = diaryFile.read()
print('============日记============')
print(diary)
reply='帮你记录下来啦。日记:'+data
s.sendto(reply,addr)
s.close()
| [
"[email protected]"
]
| |
b622abf6e9bd923f0a07424838136f259defe243 | ffe56a49385b88136a9e8c3a1f90d441c93713a2 | /meiduo/meiduo/apps/orders/admin.py | 87b96ff227b5e90568f1cdaf094dcfe3dd95f2ff | []
| no_license | Jeffrry1828/Django | e51f5208531f006bf77d667db2ccffcb2ec02a03 | 995739ad6c1fba8053253087b2fd809f4d6487ec | refs/heads/master | 2022-10-09T03:40:08.076066 | 2020-06-06T14:53:17 | 2020-06-06T14:53:17 | 270,010,693 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 124 | py | from django.contrib import admin
from .models import OrderInfo
# Register your models here.
admin.site.register(OrderInfo)
| [
"[email protected]"
]
| |
223f0dbfc626e193372122515d1048c3da3e290b | 27d8ccd188636b18ea7542d9aa44e0662123793a | /crude/crude/wsgi.py | c6e230100a461ba0dd4de7bb5f94e41ecfaa94a8 | []
| no_license | KrishnaRauniyar/Django_CRUD_APP-Assignment_II- | c675e9bdf3b0abf1b37c84546a1361c1176e0554 | fda69ede41354c9380f11c0ee98ed83976554376 | refs/heads/master | 2022-11-24T11:42:17.808956 | 2020-07-24T16:37:53 | 2020-07-24T16:37:53 | 282,269,158 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 387 | py | """
WSGI config for crude project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'crude.settings')
application = get_wsgi_application()
| [
"[email protected]"
]
| |
04e63b41a7f0e2b684daa0deadb5d48becf59923 | 8fd2e5d53d7a91d35288ccefdb0c7ef00d927a0a | /book_06_Python黑帽子/Chapter03/网口嗅探多网段版(Bug).py | a690d64efc8c84b89fe615b495c918e4ec44349e | []
| no_license | atlasmao/Python-book-code | 03501f9ca2e81bc1f47464b3227c7f9cda0d387c | 03b6848a15a7e4c2ffebdc3528c24a8b101d9f41 | refs/heads/master | 2022-01-06T23:45:21.951307 | 2019-07-11T10:32:22 | 2019-07-11T10:32:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,745 | py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
import socket
import os
import struct
import threading
import time
from netaddr import IPNetwork, IPAddress
from ctypes import *
# 监听的主机
host = '10.0.76.1'
# 扫描的目标子网
subnet = '10.0.10.0/24'
subnet_list = []
host_up_num = 0
# 自定义的字符串, 我们将在 ICMP 响应中进行核对
magic_message = "PYTHONRULES!"
# 生成子网列表
def add_subnet(subnet):
temp_list = subnet.split(".")
for i in range(256):
temp_list[2] = str(i)
subnet_list.append(".".join(temp_list))
# 批量发送 UDP 数据包
def udp_sender(subnet, magic_message):
time.sleep(5)
sender = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
for ip in IPNetwork(subnet):
try:
sender.sendto(magic_message, ("{}".format(ip), 65212))
except:
pass
finally:
sender.close()
# ip 头定义
class IP(Structure):
_fields_ = [
("ihl", c_ubyte, 4),
("version", c_ubyte, 4),
("tos", c_ubyte),
("len", c_ushort),
("id", c_ushort),
("offset", c_ushort),
("ttl", c_ubyte),
("protocol_num", c_ubyte),
("sum", c_ushort),
# ("src", c_ulong),
("src", c_uint32),
# ("dst", c_ulong)
("dst", c_uint32)
]
def __new__(self, socket_buffer=None):
return self.from_buffer_copy(socket_buffer)
def __init__(self, socket_buffer=None):
# 协议字段与协议名称对应
self.protocol_map = {1: "ICMP", 6: "TCP", 17: "UDP"}
# 可读性更强的 ip 地址
# self.src_address = socket.inet_ntoa(struct.pack("<L", self.src))
self.src_address = socket.inet_ntoa(struct.pack("@I", self.src))
# self.dst_address = socket.inet_ntoa(struct.pack("<L", self.dst))
self.dst_address = socket.inet_ntoa(struct.pack("@I", self.dst))
# 协议类型
try:
self.protocol = self.protocol_map[self.protocol_num]
except:
self.protocol = str(self.protocol_num)
class ICMP(Structure):
_fields_ = [
("type", c_ubyte),
("code", c_ubyte),
("checksum", c_ushort),
("unused", c_ushort),
("next_hop_mtu", c_ushort)
]
def __new__(self, socket_buffer):
return self.from_buffer_copy(socket_buffer)
def __init__(self, socket_buffer):
pass
# 创建原始套接字, 然后绑定在公开接口上
if os.name == "nt":
socket_protocol = socket.IPPROTO_IP
else:
socket_protocol = socket.IPPROTO_ICMP
sniffer = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket_protocol)
sniffer.bind((host, 0))
# 设置在捕获的数据包中包含 ip 头
sniffer.setsockopt(socket.IPPROTO_IP, socket.IP_HDRINCL, 1)
# 在 Windows 平台上, 我们需要设置 IOCTL 以启动混杂模式
if os.name == "nt":
sniffer.ioctl(socket.SIO_RCVALL, socket.RCVALL_ON)
add_subnet(subnet)
for new_subnet in subnet_list:
print(new_subnet)
# 开始发送数据包
t = threading.Thread(target=udp_sender, args=(new_subnet, magic_message))
t.start()
try:
while True:
# 读取数据包
raw_buffer = sniffer.recvfrom(65565)[0]
# 将缓冲区的前 20 个字节按 ip 头进行解析
ip_header = IP(raw_buffer[0:20])
# TODO: 可以开启或关闭
# 输出协议和通信双方 ip 地址
# print "Protocol: {} {} -> {}".format(ip_header.protocol, ip_header.src_address, ip_header.dst_address)
# 如果为 ICMP, 进行处理
if ip_header.protocol == "ICMP":
# 计算 ICMP 包的起始位置
offset = ip_header.ihl * 4
buf = raw_buffer[offset:offset + sizeof(ICMP)]
# 解析 ICMP 数据
icmp_header = ICMP(buf)
# print "ICMP -> Type: {} Code: {}".format(icmp_header.type, icmp_header.code)
# 检查类型和代码值是否为 3
if icmp_header.code == 3 and icmp_header.type == 3:
# 确认响应的主机在我们的目标子网之内
if IPAddress(ip_header.src_address) in IPNetwork(subnet):
# 确认 ICMP 数据中包含我们发送的自定义的字符串
if raw_buffer[len(raw_buffer) - len(magic_message):] == magic_message:
print("Host Up: {}".format(ip_header.src_address))
host_up_num += 1
print("Host Up Number: {}".format(host_up_num))
# 处理 CTRL-C
except KeyboardInterrupt:
# 如果运行在 Windows 上, 关闭混杂模式
if os.name == "nt":
sniffer.ioctl(socket.SIO_RCVALL, socket.RCVALL_OFF)
| [
"[email protected]"
]
| |
c275d436f6f6dec21e35cd80e4f9de92952b5921 | 325c97b94b84f54df18c0a770bbf90cb2cd87186 | /pc/gui/image_viewer.py | a5914c19088dcd8cdd1b0c858adc35e49a7797af | []
| no_license | alexbaraker/DroneGui | c289716e1a61ec1795017529148b8a7f1d1fcedb | 0945567dd0c0d4ed0d59cf0e492f039efa733246 | refs/heads/master | 2020-06-20T12:06:24.679541 | 2019-07-23T18:11:23 | 2019-07-23T18:11:23 | 197,117,217 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,563 | py | #######################################################################################################################################
# By: SupremeDrones Team; Alex Baraker, Dean, Kelsey, Hammad
# Date: 3/06/2019
# Info: Widget for displaying loaded image
#######################################################################################################################################
from threading import Thread
import time
from PyQt4.QtGui import *
from PyQt4.QtCore import *
from gui.opencv_image import OpenCvImageWidget
class ImageViewerWidget(QWidget):
def __init__(self, parent):
super(QWidget, self).__init__(parent)
self.v_layout = QVBoxLayout()
self.opencv_image = OpenCvImageWidget(self)
#self.load_image_btn = QPushButton("Load Image")
#self.load_image_btn.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
#self.load_image_btn.clicked[bool].connect(self.load_file_button_clicked)
self.v_layout.addWidget(self.opencv_image)
#self.v_layout.addWidget(self.load_image_btn)
self.setLayout(self.v_layout)
# self.thread = Thread(target=self.display_loop, args=())
# self.thread.daemon = True
# self.thread.start()
#def load_file_button_clicked(self):
# self.opencv_image.open_cv_image()
#def display_loop(self):
# while True:
# self.opencv_image.refresh_image()
# time.sleep(0.05)
def strName_out(self):
self.opencv_image.strName() | [
"[email protected]"
]
| |
43897fd79e93876b6bb01c316ff69f8ac715aa83 | 4de0c6d3a820d7669fcef5fd035416cf85b35f23 | /ITcoach/爬虫课件/第三章:数据解析/6.xpath解析案例-58二手房.py | d01d163b95860803cf0863b3b681c3a5e230439b | [
"AFL-3.0"
]
| permissive | ww35133634/chenxusheng | 5e1b7391a94387b73bcd7c4d12f1247b79be8016 | 666e0eb3aedde46342faf0d4030f5c72b10c9732 | refs/heads/master | 2022-11-12T03:46:47.953680 | 2020-07-02T20:50:56 | 2020-07-02T20:50:56 | 275,168,080 | 0 | 0 | AFL-3.0 | 2020-07-02T20:58:37 | 2020-06-26T13:54:48 | HTML | UTF-8 | Python | false | false | 800 | py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import requests
from lxml import etree
#需求:爬取58二手房中的房源信息
if __name__ == "__main__":
headers = {
'User-Agent':'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36'
}
#爬取到页面源码数据
url = 'https://bj.58.com/ershoufang/'
page_text = requests.get(url=url,headers=headers).text
#数据解析
tree = etree.HTML(page_text)
#存储的就是li标签对象
li_list = tree.xpath('//ul[@class="house-list-wrap"]/li')
fp = open('58.txt','w',encoding='utf-8')
for li in li_list:
#局部解析
title = li.xpath('./div[2]/h2/a/text()')[0]
print(title)
fp.write(title+'\n')
| [
"[email protected]"
]
| |
cbda399e17220661d757ad64e8c6819db3fb5ed9 | c4bf0642b02a60b17a12c7daa97d54391ef9d752 | /drf_auth/settings.py | 7c9158ab8365930be49312cb978728007cecf85a | [
"MIT"
]
| permissive | yurttav/django_rest_authentication | ce16855a3f7999109210addb995a0123f7dfff2e | f5f31ec9193dbba6d6419d5969127754f0d1a8e8 | refs/heads/main | 2023-07-01T11:51:59.980769 | 2021-08-12T16:58:17 | 2021-08-12T16:58:17 | 395,387,683 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,546 | py | """
Django settings for drf_auth project.
Generated by 'django-admin startproject' using Django 3.2.6.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from corsheaders.defaults import default_headers
from pathlib import Path
from decouple import config
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# my apps
'student_api',
'user_api',
# 3rd party apps
'rest_framework',
'corsheaders',
'rest_framework.authtoken',
'dj_rest_auth',
'django.contrib.sites',
'allauth',
'allauth.account',
'allauth.socialaccount',
'dj_rest_auth.registration',
]
SITE_ID = 1
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
CORS_ALLOW_HEADERS = list(default_headers) + [
'X-CSRFTOKEN',
]
CORS_ORIGIN_ALLOW_ALL = True
CORS_ALLOW_CREDENTIALS = True
CORS_EXPOSE_HEADERS = (
'Access-Control-Allow-Origin: *',
)
ROOT_URLCONF = 'drf_auth.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'drf_auth.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
REST_FRAMEWORK = {
# 'DEFAULT_PERMISSION_CLASSES': [
# 'rest_framework.permissions.IsAuthenticated',
# # 'rest_framework.permissions.AllowAny',
# ],
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework.authentication.TokenAuthentication',
'rest_framework.authentication.SessionAuthentication',
]
# 'DEFAULT_AUTHENTICATION_CLASSES': [
# 'rest_framework.authentication.TokenAuthentication',
# ]
}
# CORS_ALLOW_HEADERS = [
# 'accept',
# 'accept-encoding',
# 'authorization',
# 'content-type',
# 'dnt',
# 'origin',
# 'user-agent',
# 'x-csrftoken',
# 'x-requested-with',
# ]
# CSRF_COOKIE_NAME = "XCSRF-TOKEN"
| [
"[email protected]"
]
| |
ed64e352839fee277680c8be39d3058c38d029a5 | d570fc2e36f0842605ad6e9dda3cbd4910160a07 | /src/webdav/Resource.py | 5b3121865ca3ace9d66cf08ff6f649d0b1b59b89 | [
"ZPL-2.1"
]
| permissive | zopefoundation/ZServer | 8540fc7c411a7857abf4034068f75f2f1c7ba98c | eb047c795a278c22ae77f5af4284411e4689025e | refs/heads/master | 2023-06-21T20:54:53.580461 | 2023-02-10T09:43:55 | 2023-02-10T09:43:55 | 65,092,325 | 6 | 9 | NOASSERTION | 2020-09-17T07:25:50 | 2016-08-06T16:47:48 | Python | UTF-8 | Python | false | false | 27,157 | py | ##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""WebDAV support - resource objects.
"""
import mimetypes
import sys
import re
from urllib import unquote
from AccessControl import getSecurityManager
from AccessControl import ClassSecurityInfo
from AccessControl.class_init import InitializeClass
from AccessControl.Permissions import delete_objects
from AccessControl.Permissions import manage_properties
from AccessControl.Permissions import view as View
from AccessControl.Permissions import webdav_lock_items
from AccessControl.Permissions import webdav_unlock_items
from AccessControl.Permissions import webdav_access
from Acquisition import aq_base
from Acquisition import aq_inner
from Acquisition import aq_parent
from App.Common import rfc1123_date
from ExtensionClass import Base
from OFS.event import ObjectClonedEvent
from OFS.event import ObjectWillBeMovedEvent
from OFS.interfaces import IWriteLock
from OFS.Lockable import LockableItem
from OFS.Lockable import wl_isLockable
from OFS.Lockable import wl_isLocked
from OFS.subscribers import compatibilityCall
from zExceptions import BadRequest
from zExceptions import Forbidden
from zExceptions import MethodNotAllowed
from zExceptions import NotFound
from zExceptions import Unauthorized
import ZServer.Zope2.Startup.config
from ZPublisher.HTTPRangeSupport import HTTPRangeInterface
from zope.interface import implementer
from zope.event import notify
from zope.lifecycleevent import ObjectCopiedEvent
from zope.lifecycleevent import ObjectMovedEvent
from zope.container.contained import notifyContainerModified
from webdav.common import absattr
from webdav.common import Conflict
from webdav.common import IfParser
from webdav.common import isDavCollection
from webdav.common import Locked
from webdav.common import PreconditionFailed
from webdav.common import tokenFinder
from webdav.common import urlbase
from webdav.common import urlfix
from webdav.interfaces import IDAVResource
ms_dav_agent = re.compile("Microsoft.*Internet Publishing.*")
@implementer(IDAVResource)
class Resource(Base, LockableItem):
"""The Resource mixin class provides basic WebDAV support for
non-collection objects. It provides default implementations
for most supported WebDAV HTTP methods, however certain methods
such as PUT should be overridden to ensure correct behavior in
the context of the object type."""
__dav_resource__ = 1
__http_methods__ = ('GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'OPTIONS',
'TRACE', 'PROPFIND', 'PROPPATCH', 'MKCOL', 'COPY',
'MOVE', 'LOCK', 'UNLOCK',
)
security = ClassSecurityInfo()
security.setPermissionDefault(webdav_access, ('Authenticated', 'Manager'))
def dav__init(self, request, response):
# Init expected HTTP 1.1 / WebDAV headers which are not
# currently set by the base response object automagically.
#
# We sniff for a ZServer response object, because we don't
# want to write duplicate headers (since ZS writes Date
# and Connection itself).
if not hasattr(response, '_server_version'):
response.setHeader('Connection', 'close')
response.setHeader('Date', rfc1123_date(), 1)
# HTTP Range support
if HTTPRangeInterface.providedBy(self):
response.setHeader('Accept-Ranges', 'bytes')
else:
response.setHeader('Accept-Ranges', 'none')
def dav__validate(self, object, methodname, REQUEST):
msg = ('<strong>You are not authorized '
'to access this resource.</strong>')
method = None
if hasattr(object, methodname):
method = getattr(object, methodname)
else:
try:
method = object.aq_acquire(methodname)
except Exception:
method = None
if method is not None:
try:
return getSecurityManager().validate(None, object,
methodname,
method)
except Exception:
pass
raise Unauthorized(msg)
def dav__simpleifhandler(self, request, response, method='PUT',
col=0, url=None, refresh=0):
ifhdr = request.get_header('If', None)
lockable = wl_isLockable(self)
if not lockable:
# degenerate case, we shouldnt have even called this method.
return None
locked = self.wl_isLocked()
if locked and (not ifhdr):
raise Locked('Resource is locked.')
if not ifhdr:
return None
# Since we're a simple if handler, and since some clients don't
# pass in the port information in the resource part of an If
# header, we're only going to worry about if the paths compare
if url is None:
url = urlfix(request['URL'], method)
url = urlbase(url) # Gets just the path information
# if 'col' is passed in, an operation is happening on a submember
# of a collection, while the Lock may be on the parent. Lob off
# the final part of the URL (ie '/a/b/foo.html' becomes '/a/b/')
if col:
url = url[:url.rfind('/') + 1]
found = 0
resourcetagged = 0
taglist = IfParser(ifhdr)
for tag in taglist:
if not tag.resource:
# There's no resource (url) with this tag
tag_list = map(tokenFinder, tag.list)
wehave = [t for t in tag_list if self.wl_hasLock(t)]
if not wehave:
continue
if tag.NOTTED:
continue
if refresh:
for token in wehave:
self.wl_getLock(token).refresh()
resourcetagged = 1
found = 1
break
elif urlbase(tag.resource) == url:
resourcetagged = 1
tag_list = map(tokenFinder, tag.list)
wehave = [t for t in tag_list if self.wl_hasLock(t)]
if not wehave:
continue
if tag.NOTTED:
continue
if refresh:
for token in wehave:
self.wl_getLock(token).refresh()
found = 1
break
if resourcetagged and (not found):
raise PreconditionFailed('Condition failed.')
elif resourcetagged and found:
return 1
else:
return 0
# WebDAV class 1 support
security.declareProtected(View, 'HEAD')
def HEAD(self, REQUEST, RESPONSE):
"""Retrieve resource information without a response body."""
self.dav__init(REQUEST, RESPONSE)
content_type = None
if hasattr(self, 'content_type'):
content_type = absattr(self.content_type)
if content_type is None:
url = urlfix(REQUEST['URL'], 'HEAD')
name = unquote(filter(None, url.split('/')[-1]))
content_type, encoding = mimetypes.guess_type(name)
if content_type is None:
if hasattr(self, 'default_content_type'):
content_type = absattr(self.default_content_type)
if content_type is None:
content_type = 'application/octet-stream'
RESPONSE.setHeader('Content-Type', content_type.lower())
if hasattr(aq_base(self), 'get_size'):
RESPONSE.setHeader('Content-Length', absattr(self.get_size))
if hasattr(self, '_p_mtime'):
mtime = rfc1123_date(self._p_mtime)
RESPONSE.setHeader('Last-Modified', mtime)
if hasattr(aq_base(self), 'http__etag'):
etag = self.http__etag(readonly=1)
if etag:
RESPONSE.setHeader('Etag', etag)
RESPONSE.setStatus(200)
return RESPONSE
def PUT(self, REQUEST, RESPONSE):
"""Replace the GET response entity of an existing resource.
Because this is often object-dependent, objects which handle
PUT should override the default PUT implementation with an
object-specific implementation. By default, PUT requests
fail with a 405 (Method Not Allowed)."""
self.dav__init(REQUEST, RESPONSE)
raise MethodNotAllowed('Method not supported for this resource.')
security.declarePublic('OPTIONS')
def OPTIONS(self, REQUEST, RESPONSE):
"""Retrieve communication options."""
self.dav__init(REQUEST, RESPONSE)
RESPONSE.setHeader('Allow', ', '.join(self.__http_methods__))
RESPONSE.setHeader('Content-Length', 0)
RESPONSE.setHeader('DAV', '1,2', 1)
# Microsoft Web Folders compatibility, only enabled if
# User-Agent matches.
if ms_dav_agent.match(REQUEST.get_header('User-Agent', '')):
if ZServer.Zope2.Startup.config.ZSERVER_ENABLE_MS_PUBLIC_HEADER:
RESPONSE.setHeader('Public', ', '.join(self.__http_methods__))
RESPONSE.setStatus(200)
return RESPONSE
security.declarePublic('TRACE')
def TRACE(self, REQUEST, RESPONSE):
"""Return the HTTP message received back to the client as the
entity-body of a 200 (OK) response. This will often usually
be intercepted by the web server in use. If not, the TRACE
request will fail with a 405 (Method Not Allowed), since it
is not often possible to reproduce the HTTP request verbatim
from within the Zope environment."""
self.dav__init(REQUEST, RESPONSE)
raise MethodNotAllowed('Method not supported for this resource.')
security.declareProtected(delete_objects, 'DELETE')
def DELETE(self, REQUEST, RESPONSE):
"""Delete a resource. For non-collection resources, DELETE may
return either 200 or 204 (No Content) to indicate success."""
self.dav__init(REQUEST, RESPONSE)
ifhdr = REQUEST.get_header('If', '')
url = urlfix(REQUEST['URL'], 'DELETE')
name = unquote(filter(None, url.split('/')[-1]))
parent = aq_parent(aq_inner(self))
# Lock checking
if wl_isLocked(self):
if ifhdr:
self.dav__simpleifhandler(REQUEST, RESPONSE, 'DELETE')
else:
# We're locked, and no if header was passed in, so
# the client doesn't own a lock.
raise Locked('Resource is locked.')
elif IWriteLock.providedBy(parent) and parent.wl_isLocked():
if ifhdr:
parent.dav__simpleifhandler(REQUEST, RESPONSE, 'DELETE', col=1)
else:
# Our parent is locked, and no If header was passed in.
# When a parent is locked, members cannot be removed
raise PreconditionFailed(
'Resource is locked, and no condition was passed in.')
# Either we're not locked, or a succesful lock token was submitted
# so we can delete the lock now.
# ajung: Fix for Collector # 2196
if parent.manage_delObjects([name], REQUEST=None) is None:
RESPONSE.setStatus(204)
else:
RESPONSE.setStatus(403)
return RESPONSE
security.declareProtected(webdav_access, 'PROPFIND')
def PROPFIND(self, REQUEST, RESPONSE):
"""Retrieve properties defined on the resource."""
from webdav.davcmds import PropFind
self.dav__init(REQUEST, RESPONSE)
cmd = PropFind(REQUEST)
result = cmd.apply(self)
# work around MSIE DAV bug for creation and modified date
if (REQUEST.get_header('User-Agent') ==
'Microsoft Data Access Internet Publishing Provider DAV 1.1'):
result = result.replace('<n:getlastmodified xmlns:n="DAV:">',
'<n:getlastmodified xmlns:n="DAV:" xmlns:b="urn:uuid:c2f41010-65b3-11d1-a29f-00aa00c14882/" b:dt="dateTime.rfc1123">') # NOQA
result = result.replace('<n:creationdate xmlns:n="DAV:">',
'<n:creationdate xmlns:n="DAV:" xmlns:b="urn:uuid:c2f41010-65b3-11d1-a29f-00aa00c14882/" b:dt="dateTime.tz">') # NOQA
RESPONSE.setStatus(207)
RESPONSE.setHeader('Content-Type', 'text/xml; charset="utf-8"')
RESPONSE.setBody(result)
return RESPONSE
security.declareProtected(manage_properties, 'PROPPATCH')
def PROPPATCH(self, REQUEST, RESPONSE):
"""Set and/or remove properties defined on the resource."""
from webdav.davcmds import PropPatch
self.dav__init(REQUEST, RESPONSE)
if not hasattr(aq_base(self), 'propertysheets'):
raise MethodNotAllowed(
'Method not supported for this resource.')
# Lock checking
ifhdr = REQUEST.get_header('If', '')
if wl_isLocked(self):
if ifhdr:
self.dav__simpleifhandler(REQUEST, RESPONSE, 'PROPPATCH')
else:
raise Locked('Resource is locked.')
cmd = PropPatch(REQUEST)
result = cmd.apply(self)
RESPONSE.setStatus(207)
RESPONSE.setHeader('Content-Type', 'text/xml; charset="utf-8"')
RESPONSE.setBody(result)
return RESPONSE
def MKCOL(self, REQUEST, RESPONSE):
"""Create a new collection resource. If called on an existing
resource, MKCOL must fail with 405 (Method Not Allowed)."""
self.dav__init(REQUEST, RESPONSE)
raise MethodNotAllowed('The resource already exists.')
security.declarePublic('COPY')
def COPY(self, REQUEST, RESPONSE):
"""Create a duplicate of the source resource whose state
and behavior match that of the source resource as closely
as possible. Though we may later try to make a copy appear
seamless across namespaces (e.g. from Zope to Apache), COPY
is currently only supported within the Zope namespace."""
self.dav__init(REQUEST, RESPONSE)
if not hasattr(aq_base(self), 'cb_isCopyable') or \
not self.cb_isCopyable():
raise MethodNotAllowed('This object may not be copied.')
depth = REQUEST.get_header('Depth', 'infinity')
if depth not in ('0', 'infinity'):
raise BadRequest('Invalid Depth header.')
dest = REQUEST.get_header('Destination', '')
while dest and dest[-1] == '/':
dest = dest[:-1]
if not dest:
raise BadRequest('Invalid Destination header.')
try:
path = REQUEST.physicalPathFromURL(dest)
except ValueError:
raise BadRequest('Invalid Destination header')
name = path.pop()
oflag = REQUEST.get_header('Overwrite', 'F').upper()
if oflag not in ('T', 'F'):
raise BadRequest('Invalid Overwrite header.')
try:
parent = self.restrictedTraverse(path)
except ValueError:
raise Conflict('Attempt to copy to an unknown namespace.')
except NotFound:
raise Conflict('Object ancestors must already exist.')
except Exception:
raise
if hasattr(parent, '__null_resource__'):
raise Conflict('Object ancestors must already exist.')
existing = hasattr(aq_base(parent), name)
if existing and oflag == 'F':
raise PreconditionFailed('Destination resource exists.')
try:
parent._checkId(name, allow_dup=1)
except Exception:
raise Forbidden(sys.exc_info()[1])
try:
parent._verifyObjectPaste(self)
except Unauthorized:
raise
except Exception:
raise Forbidden(sys.exc_info()[1])
# Now check locks. The If header on a copy only cares about the
# lock on the destination, so we need to check out the destinations
# lock status.
ifhdr = REQUEST.get_header('If', '')
if existing:
# The destination itself exists, so we need to check its locks
destob = aq_base(parent)._getOb(name)
if IWriteLock.providedBy(destob) and destob.wl_isLocked():
if ifhdr:
itrue = destob.dav__simpleifhandler(
REQUEST, RESPONSE, 'COPY', refresh=1)
if not itrue:
raise PreconditionFailed()
else:
raise Locked('Destination is locked.')
elif IWriteLock.providedBy(parent) and parent.wl_isLocked():
if ifhdr:
parent.dav__simpleifhandler(REQUEST, RESPONSE, 'COPY',
refresh=1)
else:
raise Locked('Destination is locked.')
self._notifyOfCopyTo(parent, op=0)
ob = self._getCopy(parent)
ob._setId(name)
if depth == '0' and isDavCollection(ob):
for id in ob.objectIds():
ob._delObject(id)
notify(ObjectCopiedEvent(ob, self))
if existing:
object = getattr(parent, name)
self.dav__validate(object, 'DELETE', REQUEST)
parent._delObject(name)
parent._setObject(name, ob)
ob = parent._getOb(name)
ob._postCopy(parent, op=0)
compatibilityCall('manage_afterClone', ob, ob)
notify(ObjectClonedEvent(ob))
# We remove any locks from the copied object because webdav clients
# don't track the lock status and the lock token for copied resources
ob.wl_clearLocks()
RESPONSE.setStatus(existing and 204 or 201)
if not existing:
RESPONSE.setHeader('Location', dest)
RESPONSE.setBody('')
return RESPONSE
security.declarePublic('MOVE')
def MOVE(self, REQUEST, RESPONSE):
"""Move a resource to a new location. Though we may later try to
make a move appear seamless across namespaces (e.g. from Zope
to Apache), MOVE is currently only supported within the Zope
namespace."""
self.dav__init(REQUEST, RESPONSE)
self.dav__validate(self, 'DELETE', REQUEST)
if not hasattr(aq_base(self), 'cb_isMoveable') or \
not self.cb_isMoveable():
raise MethodNotAllowed('This object may not be moved.')
dest = REQUEST.get_header('Destination', '')
try:
path = REQUEST.physicalPathFromURL(dest)
except ValueError:
raise BadRequest('No destination given')
flag = REQUEST.get_header('Overwrite', 'F')
flag = flag.upper()
name = path.pop()
parent_path = '/'.join(path)
try:
parent = self.restrictedTraverse(path)
except ValueError:
raise Conflict('Attempt to move to an unknown namespace.')
except 'Not Found':
raise Conflict('The resource %s must exist.' % parent_path)
except Exception:
raise
if hasattr(parent, '__null_resource__'):
raise Conflict('The resource %s must exist.' % parent_path)
existing = hasattr(aq_base(parent), name)
if existing and flag == 'F':
raise PreconditionFailed('Resource %s exists.' % dest)
try:
parent._checkId(name, allow_dup=1)
except Exception:
raise Forbidden(sys.exc_info()[1])
try:
parent._verifyObjectPaste(self)
except Unauthorized:
raise
except Exception:
raise Forbidden(sys.exc_info()[1])
# Now check locks. Since we're affecting the resource that we're
# moving as well as the destination, we have to check both.
ifhdr = REQUEST.get_header('If', '')
if existing:
# The destination itself exists, so we need to check its locks
destob = aq_base(parent)._getOb(name)
if IWriteLock.providedBy(destob) and destob.wl_isLocked():
if ifhdr:
itrue = destob.dav__simpleifhandler(
REQUEST, RESPONSE, 'MOVE', url=dest, refresh=1)
if not itrue:
raise PreconditionFailed
else:
raise Locked('Destination is locked.')
elif IWriteLock.providedBy(parent) and parent.wl_isLocked():
# There's no existing object in the destination folder, so
# we need to check the folders locks since we're changing its
# member list
if ifhdr:
itrue = parent.dav__simpleifhandler(REQUEST, RESPONSE, 'MOVE',
col=1, url=dest, refresh=1)
if not itrue:
raise PreconditionFailed('Condition failed.')
else:
raise Locked('Destination is locked.')
if wl_isLocked(self):
# Lastly, we check ourselves
if ifhdr:
itrue = self.dav__simpleifhandler(REQUEST, RESPONSE, 'MOVE',
refresh=1)
if not itrue:
raise PreconditionFailed('Condition failed.')
else:
raise Locked('Source is locked and no condition was passed in')
orig_container = aq_parent(aq_inner(self))
orig_id = self.getId()
self._notifyOfCopyTo(parent, op=1)
notify(ObjectWillBeMovedEvent(self, orig_container, orig_id,
parent, name))
# try to make ownership explicit so that it gets carried
# along to the new location if needed.
self.manage_changeOwnershipType(explicit=1)
ob = self._getCopy(parent)
ob._setId(name)
orig_container._delObject(orig_id, suppress_events=True)
if existing:
object = getattr(parent, name)
self.dav__validate(object, 'DELETE', REQUEST)
parent._delObject(name)
parent._setObject(name, ob, set_owner=0, suppress_events=True)
ob = parent._getOb(name)
notify(ObjectMovedEvent(ob, orig_container, orig_id, parent, name))
notifyContainerModified(orig_container)
if aq_base(orig_container) is not aq_base(parent):
notifyContainerModified(parent)
ob._postCopy(parent, op=1)
# try to make ownership implicit if possible
ob.manage_changeOwnershipType(explicit=0)
RESPONSE.setStatus(existing and 204 or 201)
if not existing:
RESPONSE.setHeader('Location', dest)
RESPONSE.setBody('')
return RESPONSE
# WebDAV Class 2, Lock and Unlock
security.declareProtected(webdav_lock_items, 'LOCK')
def LOCK(self, REQUEST, RESPONSE):
"""Lock a resource"""
from webdav.davcmds import Lock
self.dav__init(REQUEST, RESPONSE)
security = getSecurityManager()
creator = security.getUser()
body = REQUEST.get('BODY', '')
ifhdr = REQUEST.get_header('If', None)
depth = REQUEST.get_header('Depth', 'infinity')
alreadylocked = wl_isLocked(self)
if body and alreadylocked:
# This is a full LOCK request, and the Resource is
# already locked, so we need to raise the alreadylocked
# exception.
RESPONSE.setStatus(423)
elif body:
# This is a normal lock request with an XML payload
cmd = Lock(REQUEST)
token, result = cmd.apply(self, creator, depth=depth)
if result:
# Return the multistatus result (there were multiple
# errors. Note that davcmds.Lock.apply aborted the
# transaction already.
RESPONSE.setStatus(207)
RESPONSE.setHeader('Content-Type', 'text/xml; charset="utf-8"')
RESPONSE.setBody(result)
else:
# Success
lock = self.wl_getLock(token)
RESPONSE.setStatus(200)
RESPONSE.setHeader('Content-Type', 'text/xml; charset="utf-8"')
RESPONSE.setHeader('Lock-Token', 'opaquelocktoken:' + token)
RESPONSE.setBody(lock.asXML())
else:
# There's no body, so this likely to be a refresh request
if not ifhdr:
raise PreconditionFailed('If Header Missing')
taglist = IfParser(ifhdr)
found = 0
for tag in taglist:
for listitem in tag.list:
token = tokenFinder(listitem)
if token and self.wl_hasLock(token):
lock = self.wl_getLock(token)
timeout = REQUEST.get_header('Timeout', 'Infinite')
lock.setTimeout(timeout) # automatically refreshes
found = 1
RESPONSE.setStatus(200)
RESPONSE.setHeader('Content-Type',
'text/xml; charset="utf-8"')
RESPONSE.setBody(lock.asXML())
break
if found:
break
if not found:
RESPONSE.setStatus(412) # Precondition failed
return RESPONSE
security.declareProtected(webdav_unlock_items, 'UNLOCK')
def UNLOCK(self, REQUEST, RESPONSE):
"""Remove an existing lock on a resource."""
from webdav.davcmds import Unlock
self.dav__init(REQUEST, RESPONSE)
token = REQUEST.get_header('Lock-Token', '')
url = REQUEST['URL']
token = tokenFinder(token)
cmd = Unlock()
result = cmd.apply(self, token, url)
if result:
RESPONSE.setStatus(207)
RESPONSE.setHeader('Content-Type', 'text/xml; charset="utf-8"')
RESPONSE.setBody(result)
else:
RESPONSE.setStatus(204) # No Content response code
return RESPONSE
security.declareProtected(webdav_access, 'manage_DAVget')
def manage_DAVget(self):
"""Gets the document source"""
# The default implementation calls manage_FTPget
return self.manage_FTPget()
security.declareProtected(webdav_access, 'listDAVObjects')
def listDAVObjects(self):
return []
InitializeClass(Resource)
| [
"[email protected]"
]
| |
49f7dbbdfffd887a721bcc1a2ee1ced7e8de18d3 | 26bd175ffb3bd204db5bcb70eec2e3dfd55fbe9f | /exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/modules/network/netvisor/pn_cpu_class.py | fadbed03e41b7d154a3530d1d8ce9f13d78ed446 | [
"MIT",
"GPL-3.0-only",
"GPL-3.0-or-later",
"CC0-1.0",
"GPL-1.0-or-later"
]
| permissive | tr3ck3r/linklight | 37814ed19173d893cdff161355d70a1cf538239b | 5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7 | refs/heads/master | 2021-04-11T04:33:02.727318 | 2020-03-25T17:38:41 | 2020-03-25T17:38:41 | 248,992,437 | 0 | 0 | MIT | 2020-03-21T14:26:25 | 2020-03-21T14:26:25 | null | UTF-8 | Python | false | false | 5,894 | py | #!/usr/bin/python
# Copyright: (c) 2018, Pluribus Networks
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: pn_cpu_class
author: "Pluribus Networks (@rajaspachipulusu17)"
short_description: CLI command to create/modify/delete cpu-class
description:
- This module can be used to create, modify and delete CPU class information.
options:
pn_cliswitch:
description:
- Target switch to run the CLI on.
required: False
type: str
state:
description:
- State the action to perform. Use C(present) to create cpu-class and
C(absent) to delete cpu-class C(update) to modify the cpu-class.
required: True
type: str
choices: ['present', 'absent', 'update']
pn_scope:
description:
- scope for CPU class.
required: false
choices: ['local', 'fabric']
pn_hog_protect:
description:
- enable host-based hog protection.
required: False
type: str
choices: ['disable', 'enable', 'enable-and-drop']
pn_rate_limit:
description:
- rate-limit for CPU class.
required: False
type: str
pn_name:
description:
- name for the CPU class.
required: False
type: str
'''
EXAMPLES = """
- name: create cpu class
pn_cpu_class:
pn_cliswitch: 'sw01'
state: 'present'
pn_name: 'icmp'
pn_rate_limit: '1000'
pn_scope: 'local'
- name: delete cpu class
pn_cpu_class:
pn_cliswitch: 'sw01'
state: 'absent'
pn_name: 'icmp'
- name: modify cpu class
pn_cpu_class:
pn_cliswitch: 'sw01'
state: 'update'
pn_name: 'icmp'
pn_rate_limit: '2000'
"""
RETURN = """
command:
description: the CLI command run on the target node.
returned: always
type: str
stdout:
description: set of responses from the cpu-class command.
returned: always
type: list
stderr:
description: set of error responses from the cpu-class command.
returned: on error
type: list
changed:
description: indicates whether the CLI caused changes on the target.
returned: always
type: bool
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.community.general.plugins.module_utils.network.netvisor.pn_nvos import pn_cli, run_cli
from ansible_collections.community.general.plugins.module_utils.network.netvisor.netvisor import run_commands
def check_cli(module, cli):
"""
This method checks for idempotency using the cpu-class-show command.
If a user with given name exists, return True else False.
:param module: The Ansible module to fetch input parameters
:param cli: The CLI string
"""
name = module.params['pn_name']
clicopy = cli
cli += ' system-settings-show format cpu-class-enable no-show-headers'
out = run_commands(module, cli)[1]
out = out.split()
if 'on' not in out:
module.fail_json(
failed=True,
msg='Enable CPU class before creating or deleting'
)
cli = clicopy
cli += ' cpu-class-show format name no-show-headers'
out = run_commands(module, cli)[1]
if out:
out = out.split()
return True if name in out else False
def main():
""" This section is for arguments parsing """
state_map = dict(
present='cpu-class-create',
absent='cpu-class-delete',
update='cpu-class-modify'
)
module = AnsibleModule(
argument_spec=dict(
pn_cliswitch=dict(required=False, type='str'),
state=dict(required=True, type='str',
choices=state_map.keys()),
pn_scope=dict(required=False, type='str',
choices=['local', 'fabric']),
pn_hog_protect=dict(required=False, type='str',
choices=['disable', 'enable',
'enable-and-drop']),
pn_rate_limit=dict(required=False, type='str'),
pn_name=dict(required=False, type='str'),
),
required_if=(
['state', 'present', ['pn_name', 'pn_scope', 'pn_rate_limit']],
['state', 'absent', ['pn_name']],
['state', 'update', ['pn_name']],
)
)
# Accessing the arguments
cliswitch = module.params['pn_cliswitch']
state = module.params['state']
scope = module.params['pn_scope']
hog_protect = module.params['pn_hog_protect']
rate_limit = module.params['pn_rate_limit']
name = module.params['pn_name']
command = state_map[state]
# Building the CLI command string
cli = pn_cli(module, cliswitch)
NAME_EXISTS = check_cli(module, cli)
cli += ' %s name %s ' % (command, name)
if command == 'cpu-class-modify':
if NAME_EXISTS is False:
module.fail_json(
failed=True,
msg='cpu class with name %s does not exist' % name
)
if command == 'cpu-class-delete':
if NAME_EXISTS is False:
module.exit_json(
skipped=True,
msg='cpu class with name %s does not exist' % name
)
if command == 'cpu-class-create':
if NAME_EXISTS is True:
module.exit_json(
skipped=True,
msg='cpu class with name %s already exists' % name
)
if scope:
cli += ' scope %s ' % scope
if command != 'cpu-class-delete':
if hog_protect:
cli += ' hog-protect %s ' % hog_protect
if rate_limit:
cli += ' rate-limit %s ' % rate_limit
run_cli(module, cli, state_map)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
da3610c3294ce12848a64eefb746c8bed8a6b374 | 5b9fe71c2efe0139205020b038f7d31b6a5ede87 | /lux/utils/debug_utils.py | 14ffa3a4d570fe5723c5243d4bc0143a0f6285ba | [
"Apache-2.0"
]
| permissive | lux-org/lux | 7a7c8534eec5d2b2114b1a95e64497cf9b52871a | 972e5ec24991483370dda67de6bb1e354bcf8ca6 | refs/heads/master | 2023-08-21T04:05:51.279103 | 2023-07-04T23:34:35 | 2023-07-04T23:34:35 | 232,480,726 | 4,811 | 377 | Apache-2.0 | 2023-07-12T17:45:37 | 2020-01-08T04:53:29 | Python | UTF-8 | Python | false | false | 6,852 | py | import json
from pathlib import Path
import re
import subprocess
import typing as tp
import re
import subprocess
from typing import Optional
def show_versions(return_string: bool = False) -> Optional[str]:
"""
Prints the versions of the principal packages used by Lux for debugging purposes.
Parameters
----------
return_string: Whether to return the versions as a string or print them.
Returns
-------
If return_string is True, returns a string with the versions else the versions
are printed and None is returned.
"""
import platform
import altair
import lux
import luxwidget
import matplotlib
import pandas as pd
header = "Package Versions\n----------------\n"
jupyter_versions_str = subprocess.check_output(["jupyter", "--version"])
jupyter_versions = re.findall(r"(\S+)\s+: (.+)\S*", jupyter_versions_str.decode("utf-8"))
str_lux_error = ""
str_lux_error += "lux-api library is not installed. You may need to run the following code in your command line:\n"
str_lux_error += " pip install lux-api"
# Check if correct lux library is installed
try:
import lux
except ModuleNotFoundError:
print(str_lux_error)
lux_version = lux.__version__
str_upgrade = f"The current version of lux is {lux_version}. We recommend upgrading the lux to version 0.3 and above."
str_upgrade += "To upgrade, please run the following code in your command line:\n"
str_upgrade += " pip install --upgrade lux-api"
# Check if lux needs to be upgraded
if str(lux_version) < "0.3":
print(str_upgrade)
df = pd.DataFrame(
[
("python", platform.python_version()),
("lux", lux.__version__),
("pandas", pd.__version__),
("luxwidget", luxwidget.__version__),
("matplotlib", matplotlib.__version__),
("altair", altair.__version__),
]
+ jupyter_versions,
columns=["", "Version"],
)
str_rep = header + df.to_string(index=False, justify="left")
if return_string:
return str_rep
else:
print(str_rep)
def debug_info(return_string: bool = False) -> Optional[str]:
"""
Prints all the informatation that could be useful for debugging purposes.
Currently, this includes:
* The versions of the packages used by Lux
* Info about the current state of luxwidget
Parameters
----------
return_string: Whether to return the versions as a string or print them.
Returns
-------
If return_string is True, returns a string with the debug info else the
string will be printed and None is returned.
"""
str_rep = show_versions(return_string=True)
luxwidget_msg = check_luxwidget_enabled(return_string=True)
assert str_rep is not None
assert luxwidget_msg is not None
header = "Widget Setup\n-------------\n"
str_rep += "\n\n" + header + luxwidget_msg + "\n"
if return_string:
return str_rep
else:
print(str_rep)
def notebook_enabled() -> tp.Tuple[bool, str]:
status, nbextension_list = subprocess.getstatusoutput("jupyter nbextension list")
if status != 0:
return False, "❌ Failed to run 'jupyter nbextension list'\n"
match = re.findall(r"config dir:(.*)\n", nbextension_list)
if match:
config_dir = match[0].strip()
else:
return False, "❌ No 'config dir' found in 'jupyter nbextension list'\n"
notebook_json = Path(config_dir) / "notebook.json"
if not notebook_json.exists():
return False, f"'{notebook_json}' does not exist\n"
extensions = json.loads(notebook_json.read_text())
if "load_extensions" not in extensions:
return False, "❌ 'load_extensions' not in 'notebook.json'\n"
elif "luxwidget/extension" not in extensions["load_extensions"]:
return False, "❌ 'luxwidget/extension' not in 'load_extensions'\n"
extension_enabled = extensions["load_extensions"]["luxwidget/extension"]
if not extension_enabled:
return False, "❌ luxwidget is installed but not enabled\n"
return True, ""
def lab_enabled() -> tp.Tuple[bool, str]:
status_str, lab_list = subprocess.getstatusoutput("jupyter labextension list")
if status_str != 0:
return (
False,
"❌ Failed to run 'jupyter labextension list'. Do you have Jupyter Lab installed in this environment?",
)
match = re.findall(r"luxwidget (\S+) (\S+) (\S+)", lab_list)
if match:
version_str, enabled_str, status_str = (_strip_ansi(s) for s in match[0])
else:
return False, "❌ 'luxwidget' not found in 'jupyter labextension list'\n"
if enabled_str != "enabled":
enabled_str = re.sub(r"\033\[(\d|;)+?m", "", enabled_str)
return False, f"❌ luxwidget is installed but currently '{enabled_str}'\n"
if status_str != "OK":
return False, f"❌ luxwidget is installed but currently '{status_str}'\n"
return True, ""
def is_lab_notebook():
import re
import psutil
cmd = psutil.Process().parent().cmdline()
return any(re.search("jupyter-lab", x) for x in cmd)
def check_luxwidget_enabled(return_string: bool = False) -> Optional[str]:
# get the ipython shell
import IPython
ip = IPython.get_ipython()
# return if the shell is not available
if ip is None:
return "❌ IPython shell note available.\nPlease note that Lux must be used within a notebook interface (e.g., Jupyter notebook, Jupyter Lab, JupyterHub, or VSCode)\n"
is_lab = is_lab_notebook()
if is_lab:
msg = "✅ Jupyter Lab Running\n"
enabled, emsg = lab_enabled()
msg = msg + emsg
if not enabled:
msg += f"❌ WARNING: luxwidget is not enabled in Jupyter Lab."
msg += "You may need to run the following code in your command line:\n"
msg += " jupyter labextension install @jupyter-widgets/jupyterlab-manager\n"
msg += " jupyter labextension install luxwidget"
else:
msg += "✅ luxwidget is enabled"
else:
msg = "✅ Jupyter Notebook Running\n"
enabled, emsg = notebook_enabled()
msg = msg + emsg
if not enabled:
msg += "❌ WARNING: luxwidget is not enabled in Jupyter Notebook.\n"
msg += "You may need to run the following code in your command line:\n"
msg += " jupyter nbextension install --py luxwidget\n"
msg += " jupyter nbextension enable --py luxwidget"
else:
msg += "✅ luxwidget is enabled"
if return_string:
return msg
def _strip_ansi(source):
return re.sub(r"\033\[(\d|;)+?m", "", source)
if __name__ == "__main__":
check_luxwidget_enabled()
| [
"[email protected]"
]
| |
72111d9069d5463f365998e1f2428329f7f7f195 | a79ab025913ba5a96b11bd506d9915f4533f4029 | /golfProj/golf_app/templatetags/golf_extras.py | 0dd0f7aa73defb8cb95847a41ca1632adbae8a5b | []
| no_license | jflynn87/golf_game | 2533548b1b8313661216446ddfa7927b63717118 | a24f710fbc39d25cc93b5b4c5c4d6575ef38c6bb | refs/heads/master | 2022-07-11T00:27:46.765936 | 2019-04-17T03:07:45 | 2019-04-17T03:07:45 | 174,344,305 | 0 | 0 | null | 2022-07-06T20:01:36 | 2019-03-07T12:57:45 | Python | UTF-8 | Python | false | false | 1,733 | py | from django import template
from golf_app.models import Picks, mpScores, Field, Tournament, Group
from django.db.models import Count
register = template.Library()
@register.filter
def model_name(obj):
return obj._meta.verbose_name
@register.filter
def currency(dollars):
dollars = int(dollars)
return '$' + str(dollars)
@register.filter
def line_break(count):
user_cnt = Picks.objects.filter(playerName__tournament__current=True).values('playerName__tournament').annotate(Count('user', distinct=True))
if (count -1) % (user_cnt[0].get('user__count')) == 0 or count == 0:
return True
else:
return False
@register.filter
def first_round(pick):
field = Field.objects.get(tournament__pga_tournament_num='470', playerName=pick)
wins = mpScores.objects.filter(player=field, round__lt=4, result="Yes").count()
losses = mpScores.objects.filter(player=field, round__lt=4, result="No").exclude(score="AS").count()
ties = mpScores.objects.filter(player=field, round__lt=4, score="AS").count()
return str(wins) + '-' + str(losses) + '-' + str(ties)
@register.filter
def leader(group):
#print ('group', group)
tournament = Tournament.objects.get(pga_tournament_num="470")
grp = Group.objects.get(tournament=tournament,number=group)
field = Field.objects.filter(tournament=tournament, group=grp)
golfer_dict = {}
for golfer in field:
golfer_dict[golfer.playerName] = int(first_round(golfer.playerName)[0]) + (.5*int(first_round(golfer.playerName)[4]))
#print ('leader', [k for k, v in golfer_dict.items() if v == max(golfer_dict.values())])
winner= [k for k, v in golfer_dict.items() if v == max(golfer_dict.values())]
return winner
| [
"[email protected]"
]
| |
d0b34b6a3131818d130e8b94fd6733dcbb2ddd3d | 95d9a8ed3ffba64c5e00cf0cc0eebf97c78c8f12 | /06.py | 5699632f559f42905c6467d08182b9ffc50f9009 | []
| no_license | SE6lab/SE6_lab | 0a50dfce3de4992f031ff9b75f35ac6db82a24c7 | 2c206fadefbc64e6fe42152cbd5f15f28d114898 | refs/heads/main | 2022-12-29T20:12:30.223653 | 2020-10-14T23:38:04 | 2020-10-14T23:38:04 | 304,121,984 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,828 | py | Python 3.7.1 (v3.7.1:260ec2c36a, Oct 20 2018, 14:05:16) [MSC v.1915 32 bit (Intel)] on win32
Type "help", "copyright", "credits" or "license()" for more information.
>>>
def add(x, y):
return x + y
def subtract(x, y):
return x - y
def multiply(x, y):
return x * y
def divide(x, y):
return x / y
def fact(N):
F = 1
for i in range(1,N+1):
F *= i
return F
def sqrt(x):
return math.sqrt(x)
print("Select operation.")
print("1.add")
print("2.subtract one number from another")
print("3.multiply")
print("4.divide")
print("5.sqrt")
print("6.factorial")
while True:
choice = input("Enter choice(1/2/3/4/5/6): ")
if choice in ('1', '2', '3', '4', '5','6'):
if choice == '1':
num1 = float(input("enter first number: "))
num2 = float(input("enter second number: "))
print(num1, "+", num2, "=", add(num1, num2))
elif choice == '2':
num1 = float(input("Enter first number: "))
num2 = float(input("Enter second number: "))
print(num1, "-", num2, "=", subtract(num1, num2))
elif choice == '3':
num1 = float(input("Enter first number: "))
num2 = float(input("Enter second number: "))
print(num1, "*", num2, "=", multiply(num1, num2))
elif choice == '4':
num1 = float(input("Enter first number: "))
num2 = float(input("Enter second number: "))
print(num1, "/", num2, "=", divide(num1, num2))
elif choice == '5':
num1 = int(input("Enter first number: "))
print("Square root of ", num1, "is ", square(num1))
elif choice == '6':
num1 = int(input("Enter first number: "))
print("Factorial ", num1, "is ", factorial(num1))
break
else:
print("Error")
| [
"[email protected]"
]
| |
56de2d510c239c7166bacb067023ccf1350afd5c | 6437af616b0752b24e1b62bc98d302b2e04a7c85 | /pagnition/supplier/direct.asda.com/pagination.py | bfbd6256239a5235efa713fa850d5363ea0ad07a | []
| no_license | kangqiwang/imageWebCrawler | 4c7ebc0c93fd52b27f08a0f79302885d95f53a6e | 76fe21802a5a03638e324e6d18fe5698a69aba70 | refs/heads/master | 2022-05-31T00:51:39.649907 | 2019-08-28T15:06:37 | 2019-08-28T15:06:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 391 | py | import pandas as pd
import numpy as np
import requests
def pagnition():
df=pd.read_csv("pagnition/input/direct_asda.csv",usecols=['url'])
tmpList=['url']
for url in df['url']:
if ',default,sc.html' in url:
tmpList.append(url+'?start=0&sz=500')
savedf=pd.Series(tmpList)
savedf.to_csv("pagnition/output/direct_asda_now.csv",index=False)
pagnition() | [
"[email protected]"
]
| |
69c2c24ee9deb0be24c47ae319d6c142d87078be | 33e8dd262bd52681eae5f79b9424a09733d1591c | /sandeep/Tutorial-1-Examples/Test-lint.py | 54c41561b4254ab4f571bf8aef73609e60579d34 | []
| no_license | pradeep122/LearnPython | 50e2dd1b5b4c300a0d8b60b8cdea53bc3426375f | 2cd5c4b0845bfa5f8daee778d69cb01ca077eee9 | refs/heads/master | 2020-03-19T00:07:07.973077 | 2019-05-18T17:51:47 | 2019-05-18T17:51:47 | 135,454,354 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26 | py | print ("Hello World")
2 + | [
"[email protected]"
]
| |
f0fca8f69e4603da09b68522086637b899373e33 | f1a0f58220081ddc592ae98d6482f19030489495 | /hello.py | 8dd33ddccf910b7bc99e01a41a00475ba7f8060e | []
| no_license | evanmei87/hello_python | b4042ce6942fbe3e43888395b3fe1632530675b9 | 855ba20f5b55b407af0b7f130e0ab8565b3e87fd | refs/heads/master | 2020-03-23T21:18:43.320537 | 2018-07-27T02:10:46 | 2018-07-27T02:10:46 | 142,097,032 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 52 | py | msg = "Hello World"
print(msg)
msg += " i am a test" | [
"[email protected]"
]
| |
e014ea574d48dbb9d13b76aa38053a9f2b22e8cd | 970cbdf412d89f97e0b9ddde096aac1b90b3b5f0 | /src/clustering/clustering/visualization.py | ed945cf9e918ef4efad69fc3a9d2b12f7c64a185 | [
"MIT"
]
| permissive | stymy/pipelines | 8aa2e3df6dd3b8d201b2e681c82deaf708221db8 | 1aafbb2fdab118b7750199396402cffbbe5af763 | refs/heads/master | 2021-01-16T19:29:09.527595 | 2014-05-04T18:29:41 | 2014-05-04T18:29:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,667 | py | import surfer
from surfer import Brain
import numpy as np
import nibabel as nb
import nipype.interfaces.freesurfer as fs
#from variables import freesurferdir, resultsdir
import os
import glob
import sys
def annotation():
brain.add_annotation('aparc.a2009s', alpha = .2)
def roi():
brain.add_label('prefrontal', alpha = .4)
def save(filename):
brain.save_montage(filename+'.png',['med', 'lat', 'ros', 'vent'],orientation = 'h') #to save png
def all_brains(dir):
for root, dirs, filenames in os.walk(dir):
for f in filenames:
hemi = f[:2]
if hemi == 'De' or hemi == 'te':
hemi = 'lh'
if f.endswith('nii'):
print f, hemi
clustermap = nb.load(os.path.join(root,f)).get_data()
add_cluster(clustermap, hemi)
save(os.path.join(root,f))
def add_cluster(clustermap, hemi, fsaverage):
brain = Brain(fsaverage, hemi, surface,config_opts=dict(background="lightslategray", cortex="high_contrast"))
brain.add_data(clustermap, colormap='spectral', alpha=.8)
brain.data['colorbar'].number_of_colors = int(clustermap.max())+1
brain.data['colorbar'].number_of_labels = int(clustermap.max())+1 ##because -1 denotes masked regions, cluster labels start at 1
if __name__ == '__main__' :
#fs.FSCommand.set_default_subjects_dir('SCR/data/Final_High')#(freesurferdir)
#pysurfer visualization
fsaverage = 'fsaverage4'
hemi = 'lh'
surface = 'pial'
brain = Brain(fsaverage, hemi, surface, config_opts=dict(background="lightslategray", cortex="high_contrast"))
print('FORMAT: add_cluster(clustermap,hemisphere,fsaverage)')
| [
"[email protected]"
]
| |
f6fb7103e473fba78d2bc7c0011a6cf60f1cc0aa | 2283d7ae2d8c6d2a13b5dbf9e13a395600fb7951 | /pointergen/decode.py | 0bbb7829b61e8fce9c0c38284c6fb72813298c86 | []
| no_license | nishanth01/summary_final | eebb3df6de939a2af955cb904b489d3d5e6a10e1 | b8d9413db87de1a0ce46085f1beae94cb976fa57 | refs/heads/master | 2020-03-25T23:25:00.052768 | 2018-08-11T09:47:53 | 2018-08-11T09:47:53 | 144,273,733 | 12 | 8 | null | null | null | null | UTF-8 | Python | false | false | 8,181 | py | import os
import time
import tensorflow as tf
import beam_search
import data
import json
import logging
import numpy as np
import util
from rouge import Rouge
SECS_UNTIL_NEW_CKPT = 60
def make_html_safe(s):
s.replace("<", "<")
s.replace(">", ">")
return s
def print_results(article, abstract, decoded_output):
print("---------------------------------------------------------------------------")
tf.logging.info('ARTICLE: %s', article)
tf.logging.info('REFERENCE SUMMARY: %s', abstract)
tf.logging.info('GENERATED SUMMARY: %s', decoded_output)
print("---------------------------------------------------------------------------")
class BeamSearchDecoder(object):
def __init__(self, model, session, vocab, hps, saver):
self._model = model
self._vocab = vocab
self._saver = saver
self._session = session
self._hps = hps
self.rouge_data = []
ckpt_path = util.load_ckpt(self._saver, self._session,self._hps,self._hps.cp_dir)
self.setup_dir()
def setup_dir(self):
self._decode_dir = os.path.join(self._hps.log_root, "decode")
if not os.path.exists(self._decode_dir): os.mkdir(self._decode_dir)
self._rouge_ref_dir = os.path.join(self._decode_dir, "reference")
if not os.path.exists(self._rouge_ref_dir): os.mkdir(self._rouge_ref_dir)
self._rouge_dec_dir = os.path.join(self._decode_dir, "generated")
if not os.path.exists(self._rouge_dec_dir): os.mkdir(self._rouge_dec_dir)
def decode(self,batches):
counter = 0
for batch in batches:
if(counter < 10000):
original_article = batch.original_articles[0]
original_abstract = batch.original_abstracts[0]
original_abstract_sents = batch.original_abstracts_sents[0]
article_withunks = data.show_art_oovs(original_article, self._vocab)
abstract_withunks = data.show_abs_oovs(original_abstract,
self._vocab,
batch.art_oovs[0])
best_hypothesis = beam_search.run_beam_search(self._session,
self._model,
self._vocab,
batch,
self._hps)
output_ids = [int(t) for t in best_hypothesis.tokens[1:]]
decoded_words = data.outputids2words(output_ids, self._vocab,batch.art_oovs[0])
try:
fst_stop_idx = decoded_words.index(data.STOP_DECODING) # index of the (first) [STOP] symbol
decoded_words = decoded_words[:fst_stop_idx]
except ValueError:
decoded_words = decoded_words
decoded_output = ' '.join(decoded_words) # single string
self.write_for_rouge(original_abstract_sents, decoded_words, counter,original_article)
counter += 1
else:
break
self.rouge_eval()
def decodeOneSample(self,batches):
batch = batches[0]
original_article = batch.original_articles[0]
original_abstract = batch.original_abstracts[0]
original_abstract_sents = batch.original_abstracts_sents[0]
article_withunks = data.show_art_oovs(original_article, self._vocab)
abstract_withunks = data.show_abs_oovs(original_abstract,
self._vocab,
batch.art_oovs[0])
best_hypothesis = beam_search.run_beam_search(self._session,
self._model,
self._vocab,
batch,
self._hps)
output_ids = [int(t) for t in best_hypothesis.tokens[1:]]
decoded_words = data.outputids2words(output_ids, self._vocab,batch.art_oovs[0])
try:
fst_stop_idx = decoded_words.index(data.STOP_DECODING) # index of the (first) [STOP] symbol
decoded_words = decoded_words[:fst_stop_idx]
except ValueError:
decoded_words = decoded_words
decoded_output = ' '.join(decoded_words) # single string
self.write_for_rouge(original_abstract_sents, decoded_words, 0,original_article)
self.rouge_eval()
print_results(article_withunks, abstract_withunks, decoded_output)
self.write_for_attnvis(article_withunks, abstract_withunks,
decoded_words, best_hypothesis.attn_dists,
best_hypothesis.p_gens)
def rouge_eval(self):
hyps, refs = map(list, zip(*[[d['hyp'], d['ref']] for d in self.rouge_data]))
rouge = Rouge()
scores = rouge.get_scores(hyps, refs, avg=True)
print('======================================================================')
print('======================================================================')
print('ROUGE SCORES')
print(scores)
print('======================================================================')
print('======================================================================')
def write_for_attnvis(self, article, abstract, decoded_words, attn_dists, p_gens):
article_lst = article.split() # list of words
decoded_lst = decoded_words # list of decoded words
to_write = {
'article_lst': [make_html_safe(t) for t in article_lst],
'decoded_lst': [make_html_safe(t) for t in decoded_lst],
'abstract_str': make_html_safe(abstract),
'attn_dists': attn_dists,
'p_gens': p_gens
}
output_fname = os.path.join(self._decode_dir, 'attn_vis_data.json')
with open(output_fname, 'w') as output_file:
json.dump(to_write, output_file)
tf.logging.info('Wrote visualization data to %s', output_fname)
def write_for_rouge(self, reference_sents, decoded_words, ex_index,original_article):
decoded_sents = []
data = {}
while len(decoded_words) > 0:
try:
fst_period_idx = decoded_words.index(".")
except ValueError: # there is text remaining that doesn't end in "."
fst_period_idx = len(decoded_words)
sent = decoded_words[:fst_period_idx+1] # sentence up to and including the period
decoded_words = decoded_words[fst_period_idx+1:] # everything else
decoded_sents.append(' '.join(sent))
decoded_sents = [make_html_safe(w) for w in decoded_sents]
reference_sents = [make_html_safe(w) for w in reference_sents]
orig_article = original_article#' '.join(original_article)
rouge_hyp = ' '.join(decoded_sents)
rouge_ref = ' '.join(reference_sents)
# Write to file
article_file = os.path.join(self._rouge_ref_dir, "%06d_article.txt" % ex_index)
ref_file = os.path.join(self._rouge_ref_dir, "%06d_reference.txt" % ex_index)
decoded_file = os.path.join(self._rouge_dec_dir, "%06d_generated.txt" % ex_index)
with open(article_file, "w") as f:
f.write(orig_article)
with open(ref_file, "w") as f:
f.write(rouge_ref)
with open(decoded_file, "w") as f:
f.write(rouge_hyp)
data['hyp'] = rouge_hyp
data['ref'] = rouge_ref
self.rouge_data.append(data)
tf.logging.info("Added %i to file" % ex_index)
| [
"[email protected] config --global user.name nisnair"
]
| [email protected] config --global user.name nisnair |
ac499025b4a3c16875c1014efa0d58f38731cd89 | 00b4275632eec48ced3f6aa10d74b3d0ed859bff | /seal_server.py | 139fd3aeb1a09147c5b8b5f2898321a9e5bb3b6f | []
| no_license | adityachivu/pyseal-project | 391393bd71d15464e68b550737c945c681fc0407 | 9e1d51ddffa899062e4828b4abe4d77a41d824d5 | refs/heads/master | 2020-03-21T03:24:18.015369 | 2018-07-18T20:46:23 | 2018-07-18T20:46:23 | 138,051,997 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 991 | py | import ahem
import ahem.cloud as cloud
import os
def main():
print("--------------------------PySEAL SERVER------------------------")
ahem.PORT = int(input("Enter port:"))
serversock = ahem.setup_server(ahem.PORT)
close_server = False
while(not close_server):
# process_multiply_request(serversock)
clientsocket, addr = serversock.accept()
option = clientsocket.recv(16)
option = option.decode()
clientsocket.close()
if option == "multiply":
cloud.process_multiply_request(serversock)
elif option == "add":
cloud.process_add_request(serversock)
elif option == "subtract":
cloud.process_subtract_request(serversock)
elif option == "classify":
cloud.process_classify_request(serversock)
elif option == "close":
close_server = True
print("Processed")
serversock.close()
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
16d63d669283023944571a8eb1c81cf668615f6a | 42e81d00c7348b3cc5988f3176b80aa2289a780d | /BrightHorizon/BrightHorizon/wsgi.py | cdc98414ed4a95e3b040bc13fec02460581381f9 | []
| no_license | sysadmin777/BrightHorizon | 79e2d090889256a9dcace14dd113a65542290130 | aa8579ea73696eb43d3c836d0a2e00d8126810fd | refs/heads/main | 2023-05-25T12:15:07.677108 | 2023-05-19T14:36:43 | 2023-05-19T14:36:43 | 306,961,599 | 1 | 0 | null | 2020-10-27T15:34:22 | 2020-10-24T20:00:35 | CSS | UTF-8 | Python | false | false | 403 | py | """
WSGI config for BrightHorizon project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'BrightHorizon.settings')
application = get_wsgi_application()
| [
"[email protected]"
]
| |
3e73dfff7ea5a0792fd116a2aae63f1c1c8cf31a | 1499bf8983e04c0001289192d4a035278fe8a24d | /second_problem.py | cc417dea6ba8de8dc881805eb0da7d8030378d24 | []
| no_license | prachatos/e0226-assign1 | be4e19fe345de13f8c0106d5217ae407e38d4354 | aee3b625ffb13e650dc5e13717aab04fdbf81322 | refs/heads/master | 2020-03-27T19:25:56.362073 | 2018-09-24T17:18:57 | 2018-09-24T17:18:57 | 146,988,511 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,490 | py | import sys
import time
def write_list_to_file(f, l):
for i in range(0, len(l)):
f.write(l[i])
if i != len(l) - 1:
f.write('\n')
def gauss_jordan(aug, m, n):
steps = []
for i in range(m):
pivot = i, i
found_piv = False
for j in range(i, m):
pivot = j, i
if abs(aug[j][i]) > eps:
found_piv = True
break
if not found_piv:
for j in range(i, n):
pivot = i, j
if abs(aug[i][j])> eps:
found_piv = True
break
if not found_piv:
continue
if pivot[0] != i:
steps.append('SWITCH ' + str(i + 1) + ' ' + str(pivot[0] + 1))
for j in range(n):
aug[i][j], aug[pivot[0]][j] = aug[pivot[0]][j], aug[i][j]
# ok now we process
piv_val = aug[pivot[0]][pivot[0]]
if abs(piv_val) < eps:
continue
if piv_val != 1:
steps.append('MULTIPLY '+ str(round(1.0/piv_val, 6)) + ' ' + str(i + 1))
for j in range(n):
aug[i][j] = aug[i][j] / piv_val
for j in range(m):
if j == i:
continue
ratio = aug[j][i]
if ratio > eps:
steps.append('MULTIPLY&ADD ' + str(round(ratio, 6)) + ' ' + str(i + 1)+ ' ' + str(j + 1))
for k in range(n):
aug[j][k] = aug[j][k] - ratio * aug[i][k]
ac_rc_zero = False
for i in range(m):
rc_zero = True
for j in range(m, n):
if aug[i][j] > eps:
rc_zero = False
aug[i][j] = round(aug[i][j] * 100, 6) / 100
if rc_zero:
ac_rc_zero = True
rc_zero = False
for i in range(m, n):
rc_zero = True
for j in range(m):
if aug[j][i] > eps:
rc_zero = False
break
if rc_zero:
break
return aug, rc_zero or ac_rc_zero, steps
def matmult(a, b):
zip_b = list(zip(*b))
return [[sum(a*b for a, b in zip(rows_a, cols_b)) for cols_b in zip_b] for rows_a in a]
def get_aug(mat_a, mat_b, row):
for i in range(len(mat_a)):
mat_a.append(mat_b[row][i])
return mat_a
if __name__ == '__main__':
eps = 1e-7
try:
file = sys.argv[1]
except IndexError:
sys.exit(0)
b = []
A = []
X = []
cur_line = 0
with open(file) as f:
for line in f:
if cur_line == 0:
n = int(line)
cur_line = cur_line + 1
else:
A.append(map(float, line.split()))
cur_line = cur_line + 1
store_A = A
start_time = time.time()
X = matmult(A, A)
aug = []
for row in range(len(X)):
aug.append(get_aug(X[row], A, row))
start_time = time.time()
A, notexists, steps = gauss_jordan(X, len(A), 2*len(A))
ans_list = []
if notexists:
ans_list.append("ALAS! DIDN'T FIND ONE!")
else:
ans_list.append("YAAY! FOUND ONE!")
for x in A:
inv = ''
for y in range(len(A), 2*len(A)):
if abs(x[y] - int(x[y])) < eps:
inv += str(int(x[y])) + ' '
else:
inv += str(x[y]) + ' '
ans_list.append(inv.rstrip())
ans_list += steps
write_list_to_file(open('output_problem2.txt', 'w'), ans_list)
| [
"[email protected]"
]
| |
cbd6add2f6254216315796e6424530a64d91520a | b1599f517e62c6651c930c28c430ac3ff7d52bb9 | /src/apps/competitions/migrations/0011_competition_competition_type.py | f2df235246d0498858215cdf50ce59fc6d625f57 | [
"Apache-2.0"
]
| permissive | HunDeMingMingBaiBai/competitions-v2 | 4d9ef93f14a3bc43c582c67b62904c6bcf0c19fb | 745b56274ada40b78cda6e91dd762f2d547cd841 | refs/heads/develop | 2023-08-11T06:02:49.780503 | 2021-09-09T04:39:42 | 2021-09-09T04:39:42 | 377,348,778 | 0 | 0 | Apache-2.0 | 2021-06-16T02:32:30 | 2021-06-16T02:32:29 | null | UTF-8 | Python | false | false | 500 | py | # Generated by Django 2.2.10 on 2020-07-03 03:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('competitions', '0010_merge_20200217_2316'),
]
operations = [
migrations.AddField(
model_name='competition',
name='competition_type',
field=models.CharField(choices=[('competition', 'competition'), ('benchmark', 'benchmark')], default='competition', max_length=128),
),
]
| [
"[email protected]"
]
| |
fbfbfee07add932f8ce722cef5cedc1a405cbe26 | 70d06fe2cc96b8e9a70ad93cdcd16506ecf4495c | /openleveldb/backend/connectorclient.py | e2de8157b0243c80a8f168c5ea6b522b49643289 | [
"BSD-3-Clause"
]
| permissive | lucmos/openleveldb | 79a79e6b8e49c20c7ba2bff0b85227bf1dde2223 | b0061362e2b74aba47790f6164689b8d2fa87a5d | refs/heads/master | 2023-05-10T21:11:03.743619 | 2021-04-15T15:08:50 | 2021-04-15T16:27:22 | 234,050,110 | 6 | 0 | BSD-3-Clause | 2023-05-02T22:48:00 | 2020-01-15T09:55:28 | Python | UTF-8 | Python | false | false | 5,183 | py | """
Class that communicates with the server module to support dict-like behaviour,
with automatic serialization to bytes and deserialization from bytes provided
by the serialization module
"""
import pickle
from pathlib import Path
from typing import Any, Callable, Iterable, Optional, Union
import requests
from flask import Response
from openleveldb.backend.serializer import DecodeType, decode, encode, normalize_strings
class LevelDBClient:
_instances = {}
@staticmethod
def get_instance(
db_path: Union[str, Path], server_address: str,
) -> "LevelDBClient":
db_path = Path(db_path)
if db_path not in LevelDBClient._instances:
LevelDBClient._instances[(server_address, db_path)] = LevelDBClient(
server_address, db_path
)
return LevelDBClient._instances[(server_address, db_path)]
def __init__(
self,
server_address: str,
db_path: Union[str, Path],
value_encoder: Callable[[Any], bytes] = encode,
value_decoder: Callable[[bytes], Any] = decode,
) -> None:
self.value_encoder, self.value_decoder = value_encoder, value_decoder
self.server_address = server_address
self.db_path = db_path
def _prefixed_db(self, prefixes: Iterable[str]) -> str:
"""
Apply all the prefixes (last one included) to obtain the desired prefixed database
:param prefixes: the prefix or the iterable of prefixes to apply
:returns: the prefixed database
"""
res = requests.get(
url=self.server_address + "/get_prefixed_db_path",
params={"prefixes": prefixes, "dbpath": self.db_path},
)
return res.text
def prefixed_iter(
self,
prefixes: Optional[Union[str, Iterable[str]]] = None,
starting_by: Optional[str] = None,
include_key=True,
include_value=True,
) -> Iterable:
res = requests.get(
url=self.server_address + "/iterator",
params={
"dbpath": self.db_path,
"include_key": include_key,
"include_value": include_value,
"starting_by": starting_by,
"prefixes": prefixes,
},
)
for x in pickle.loads(res.content):
if isinstance(x, bytes):
if include_key:
yield DecodeType.STR.pure_decode_fun(x)
else:
yield self.value_decoder(x)
else:
try:
key, value = x
yield DecodeType.STR.pure_decode_fun(key), self.value_decoder(value)
except TypeError:
yield None
def __iter__(self) -> Iterable:
return self.prefixed_iter([], None, True, True)
def prefixed_len(
self,
prefixes: Optional[Union[str, Iterable[str]]] = None,
starting_by: Optional[str] = None,
) -> int:
res = requests.get(
url=self.server_address + "/dblen",
params={
"dbpath": self.db_path,
"prefixes": prefixes,
"starting_by": starting_by,
},
)
return decode(res.content)
def __len__(self) -> int:
res = requests.get(
url=self.server_address + "/dblen",
params={"dbpath": self.db_path, "prefixes": None, "starting_by": None,},
)
return decode(res.content)
def __setitem__(self, key: Union[str, Iterable[str]], value: Any) -> Response:
*prefixes, key = normalize_strings(lambda x: x, key)
if key is Ellipsis:
raise TypeError(f"str prefix or key expected, got {type(key).__name__}")
res = requests.post(
url=self.server_address + "/setitem",
data=encode(value),
headers={"Content-Type": "application/octet-stream"},
params={"dbpath": self.db_path, "key": key, "prefixes": prefixes},
)
return res
def __getitem__(
self, key: Union[str, Iterable[Union[str, Ellipsis.__class__]]]
) -> Any:
*prefixes, key = normalize_strings(lambda x: x, key)
if key is Ellipsis:
raise NotImplementedError
res = requests.get(
url=self.server_address + "/getitem",
params={"dbpath": self.db_path, "key": key, "prefixes": prefixes},
)
return self.value_decoder(res.content) if res.content else None
def __delitem__(self, key: Union[str, Iterable[str]]) -> Response:
*prefixes, key = normalize_strings(lambda x: x, key)
res = requests.delete(
url=self.server_address + "/delitem",
params={"dbpath": self.db_path, "key": key, "prefixes": prefixes},
)
return res
def __repr__(self) -> str:
res = requests.get(
url=self.server_address + "/repr",
params={"dbpath": self.db_path, "classname": self.__class__.__name__},
)
return res.text
def close(self) -> None:
pass
if __name__ == "__main__":
pass
| [
"[email protected]"
]
| |
edbdbbde6bd4ce846a29e086c5f9a3ae36597398 | eca4fae18c827272104a68ed9c25ab9f112f4f81 | /Data in More Complex Formats/Problem set/Processing All/process.py | 148ded280c9f3753ca3ee1e4c74029e137bb5149 | []
| no_license | gbengaoti/Data-Wrangling-with-MongoDB | a331f41344ce87a1693f07401aa63c8f332f5dfa | 932d86597bd18e1550a84200a995872e14199001 | refs/heads/master | 2021-05-13T14:19:14.892124 | 2018-01-09T15:41:46 | 2018-01-09T15:41:46 | 116,736,076 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,797 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Let's assume that you combined the code from the previous 2 exercises with code
from the lesson on how to build requests, and downloaded all the data locally.
The files are in a directory "data", named after the carrier and airport:
"{}-{}.html".format(carrier, airport), for example "FL-ATL.html".
The table with flight info has a table class="dataTDRight". Your task is to
use 'process_file()' to extract the flight data from that table as a list of
dictionaries, each dictionary containing relevant data from the file and table
row. This is an example of the data structure you should return:
data = [{"courier": "FL",
"airport": "ATL",
"year": 2012,
"month": 12,
"flights": {"domestic": 100,
"international": 100}
},
{"courier": "..."}
]
Note - year, month, and the flight data should be integers.
You should skip the rows that contain the TOTAL data for a year.
There are couple of helper functions to deal with the data files.
Please do not change them for grading purposes.
All your changes should be in the 'process_file()' function.
The 'data/FL-ATL.html' file in the tab above is only a part of the full data,
covering data through 2003. The test() code will be run on the full table, but
the given file should provide an example of what you will get.
"""
from bs4 import BeautifulSoup
from zipfile import ZipFile
import os
datadir = "data"
def open_zip(datadir):
with ZipFile('{0}.zip'.format(datadir), 'r') as myzip:
myzip.extractall()
def process_all(datadir):
files = os.listdir(datadir)
return files
def process_file(f):
"""
This function extracts data from the file given as the function argument in
a list of dictionaries. This is example of the data structure you should
return:
data = [{"courier": "FL",
"airport": "ATL",
"year": 2012,
"month": 12,
"flights": {"domestic": 100,
"international": 100}
},
{"courier": "..."}
]
Note - year, month, and the flight data should be integers.
You should skip the rows that contain the TOTAL data for a year.
"""
data = []
info = {}
info["courier"], info["airport"] = f[:6].split("-")
# Note: create a new dictionary for each entry in the output data list.
# If you use the info dictionary defined here each element in the list
# will be a reference to the same info dictionary.
with open("{}/{}".format(datadir, f), "r") as html:
soup = BeautifulSoup(html, "lxml")
flight_tables = soup.find_all("table", { "class" : "dataTDRight" })
for flight_table in flight_tables:
for flight_row in flight_table.find_all("tr"):
cells = flight_row.find_all('td')
values = []
# skip header and total row
if cells[1].text.strip() == "TOTAL" or cells[1].text.strip()=="Month":
continue
else:
# initialize dict
process_dict = {"courier": "",
"airport": "",
"year": 0,
"month": 0,
"flights": {"domestic": 0,
"international":0}
}
for e in cells:
values.append(int(e.text.replace(',','')))
print(values)
process_dict["courier"] = info["courier"]
process_dict["airport"] = info["airport"]
process_dict["year"] = values[0]
process_dict["month"] = values[1]
process_dict["flights"]["domestic"] = values[2]
process_dict["flights"]["international"] = values[3]
data.append(process_dict)
print (len(data))
return data
def test():
print "Running a simple test..."
open_zip(datadir)
files = process_all(datadir)
data = []
# Test will loop over three data files.
for f in files:
data += process_file(f)
print (len(data))
assert len(data) == 399 # Total number of rows
for entry in data[:3]:
assert type(entry["year"]) == int
assert type(entry["month"]) == int
assert type(entry["flights"]["domestic"]) == int
assert len(entry["airport"]) == 3
assert len(entry["courier"]) == 2
assert data[0]["courier"] == 'FL'
assert data[0]["month"] == 10
assert data[-1]["airport"] == "ATL"
assert data[-1]["flights"] == {'international': 108289, 'domestic': 701425}
print "... success!"
if __name__ == "__main__":
test() | [
"[email protected]"
]
| |
dfeaef8960d9c3c78351dc377c9805836cc90639 | 69cfe57220f789eb1d1966ed22c6823f0beeb8ce | /covid_venv/lib/python3.7/site-packages/dash_html_components/Pre.py | 46214932ac801cfab78d16fc03ee6f01f0cbd582 | [
"MIT"
]
| permissive | paulsavala/Covid19-model | 664e31780ee1c8e4ef2115af2f41b27e832e5e50 | 41aa96d7c9abc117550f904af11815f507f0f0a0 | refs/heads/master | 2022-07-15T17:39:05.842619 | 2020-11-16T20:42:22 | 2020-11-16T20:42:22 | 252,545,888 | 2 | 0 | MIT | 2022-06-22T01:37:35 | 2020-04-02T19:19:25 | Python | UTF-8 | Python | false | false | 4,767 | py | # AUTO GENERATED FILE - DO NOT EDIT
from dash.development.base_component import Component, _explicitize_args
class Pre(Component):
"""A Pre component.
Pre is a wrapper for the <pre> HTML5 element.
For detailed attribute info see:
https://developer.mozilla.org/en-US/docs/Web/HTML/Element/pre
Keyword arguments:
- children (a list of or a singular dash component, string or number; optional): The children of this component
- id (string; optional): The ID of this component, used to identify dash components
in callbacks. The ID needs to be unique across all of the
components in an app.
- n_clicks (number; default 0): An integer that represents the number of times
that this element has been clicked on.
- n_clicks_timestamp (number; default -1): An integer that represents the time (in ms since 1970)
at which n_clicks changed. This can be used to tell
which button was changed most recently.
- key (string; optional): A unique identifier for the component, used to improve
performance by React.js while rendering components
See https://reactjs.org/docs/lists-and-keys.html for more info
- role (string; optional): The ARIA role attribute
- data-* (string; optional): A wildcard data attribute
- aria-* (string; optional): A wildcard aria attribute
- accessKey (string; optional): Keyboard shortcut to activate or add focus to the element.
- className (string; optional): Often used with CSS to style elements with common properties.
- contentEditable (string; optional): Indicates whether the element's content is editable.
- contextMenu (string; optional): Defines the ID of a <menu> element which will serve as the element's context menu.
- dir (string; optional): Defines the text direction. Allowed values are ltr (Left-To-Right) or rtl (Right-To-Left)
- draggable (string; optional): Defines whether the element can be dragged.
- hidden (a value equal to: 'hidden', 'HIDDEN' | boolean; optional): Prevents rendering of given element, while keeping child elements, e.g. script elements, active.
- lang (string; optional): Defines the language used in the element.
- spellCheck (string; optional): Indicates whether spell checking is allowed for the element.
- style (dict; optional): Defines CSS styles which will override styles previously set.
- tabIndex (string; optional): Overrides the browser's default tab order and follows the one specified instead.
- title (string; optional): Text to be displayed in a tooltip when hovering over the element.
- loading_state (dict; optional): Object that holds the loading state object coming from dash-renderer. loading_state has the following type: dict containing keys 'is_loading', 'prop_name', 'component_name'.
Those keys have the following types:
- is_loading (boolean; optional): Determines if the component is loading or not
- prop_name (string; optional): Holds which property is loading
- component_name (string; optional): Holds the name of the component that is loading"""
@_explicitize_args
def __init__(self, children=None, id=Component.UNDEFINED, n_clicks=Component.UNDEFINED, n_clicks_timestamp=Component.UNDEFINED, key=Component.UNDEFINED, role=Component.UNDEFINED, accessKey=Component.UNDEFINED, className=Component.UNDEFINED, contentEditable=Component.UNDEFINED, contextMenu=Component.UNDEFINED, dir=Component.UNDEFINED, draggable=Component.UNDEFINED, hidden=Component.UNDEFINED, lang=Component.UNDEFINED, spellCheck=Component.UNDEFINED, style=Component.UNDEFINED, tabIndex=Component.UNDEFINED, title=Component.UNDEFINED, loading_state=Component.UNDEFINED, **kwargs):
self._prop_names = ['children', 'id', 'n_clicks', 'n_clicks_timestamp', 'key', 'role', 'data-*', 'aria-*', 'accessKey', 'className', 'contentEditable', 'contextMenu', 'dir', 'draggable', 'hidden', 'lang', 'spellCheck', 'style', 'tabIndex', 'title', 'loading_state']
self._type = 'Pre'
self._namespace = 'dash_html_components'
self._valid_wildcard_attributes = ['data-', 'aria-']
self.available_properties = ['children', 'id', 'n_clicks', 'n_clicks_timestamp', 'key', 'role', 'data-*', 'aria-*', 'accessKey', 'className', 'contentEditable', 'contextMenu', 'dir', 'draggable', 'hidden', 'lang', 'spellCheck', 'style', 'tabIndex', 'title', 'loading_state']
self.available_wildcard_properties = ['data-', 'aria-']
_explicit_args = kwargs.pop('_explicit_args')
_locals = locals()
_locals.update(kwargs) # For wildcard attrs
args = {k: _locals[k] for k in _explicit_args if k != 'children'}
for k in []:
if k not in args:
raise TypeError(
'Required argument `' + k + '` was not specified.')
super(Pre, self).__init__(children=children, **args)
| [
"[email protected]"
]
| |
30b09a780f82aae722e4f721ba42f220306bb21e | 3a95e89ce8ecb7434b1f00233ac468cd6d1d07e4 | /simulator.py | 2bfb65f001a423007fd06d60a48ebe91e4b25163 | []
| no_license | yinghuaman/simulator | cd24d5a38f1c096001a7aba4f51f4334a7776611 | 0cf0d7bf316f4bd99a29d3c0070b5a85428d0bae | refs/heads/master | 2020-03-27T01:31:01.870653 | 2018-08-22T14:12:55 | 2018-08-22T14:12:55 | 145,718,314 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,229 | py | import io
from io import StringIO
from tkinter import *
import time
import datetime
import pandas as pd
from tkinter import messagebox
import psycopg2
from sqlalchemy import create_engine
class my_GUI():
def __init__(self,master):
self.master = master
#GUI版面设计
def set_master(self):
self.master.title("数据上传模拟器")
self.master.geometry("800x400")
self.master.resizable(0,0)
self.var_IP = StringVar()
self.var_IP.set("")
Entry(self.master,textvariable = self.var_IP,width=20,font = ("Verdana",15) ).place(x=130,y=30)
Label(self.master,text = "IP:".encode("utf-8"),width = 10,font = ("Arial",15)).place(x=15,y=30)
Label(self.master,text = "*".encode("utf-8"),fg="red",font=10).place(x=87,y=30)
self.var_port = StringVar()
self.var_port.set("")
Entry(self.master, textvariable=self.var_port, width=20, font=("Verdana", 15)).place(x=525, y=30)
Label(self.master, text="port:".encode("utf-8"), width=10, font=("Arial", 15)).place(x=415, y=30)
Label(self.master, text="*".encode("utf-8"), fg="red", font=10).place(x=493, y=30)
self.var_db = StringVar()
self.var_db.set("")
Entry(self.master, textvariable=self.var_db, width=20, font=("Verdana", 15)).place(x=130, y=130)
Label(self.master, text="database:".encode("utf-8"), width=10, font=("Arial", 15)).place(x=15, y=130)
Label(self.master, text="*".encode("utf-8"), fg="red", font=10).place(x=117, y=130)
self.var_user = StringVar()
self.var_user.set("")
Entry(self.master, textvariable=self.var_user, width=20, font=("Verdana", 15)).place(x=525, y=130)
Label(self.master, text="user:".encode("utf-8"), width=10, font=("Arial", 15)).place(x=415, y=130)
Label(self.master, text="*".encode("utf-8"), fg="red", font=10).place(x=493, y=130)
self.var_password = StringVar()
self.var_password.set("")
Entry(self.master, textvariable=self.var_password, width=20, font=("Verdana", 15)).place(x=130, y=230)
Label(self.master, text="password:".encode("utf-8"), width=10, font=("Arial", 15)).place(x=15, y=230)
Label(self.master, text="*".encode("utf-8"), fg="red", font=10).place(x=117, y=230)
self.var_time = StringVar()
self.var_time.set("")
Entry(self.master, textvariable=self.var_time, width=20, font=("Verdana", 15)).place(x=525, y=230)
Label(self.master, text="time:".encode("utf-8"), width=10, font=("Arial", 15)).place(x=415, y=230)
b1 = Button(self.master,text="取消",width=10,font = ("宋体",10),command = self.cancel)
b1.bind("<Return>",self.cancel)
b1.bind("<Button-1>",self.cancel)
b1.place(x=270,y=350)
b2 = Button(self.master, text="上传", width=10, font=("宋体", 10), command=self.upload)
b2.bind("<Return>", self.upload)
b2.bind("<Button-1>", self.upload)
b2.place(x=420, y=350)
Label(self.master,text="*为必填项",width=20,fg="red",font=("Arial", 10)).place(x=10,y=270)
#读取本地文件
def Loaddata(self,filename):
data = pd.read_csv(filename,sep="\t")
return data
#判断是否链接成功
def is_connected(self):
user = self.var_user.get()
ip = self.var_IP.get()
password = self.var_password.get()
database = self.var_db.get()
port = self.var_port.get()
flag = 1
try:
messagebox.showinfo("开始链接数据库")
conn = psycopg2.connect(database = database,user=user,password=password,host=ip,port=port)
return flag
except:
flag=0
messagebox.showinfo("链接数据库失败")
return flag
def write_to_sql(self,flag,tablename):
if flag == 1:
messagebox.showinfo("数据库连接成功")
user = self.var_user.get()
ip = self.var_IP.get()
password = self.var_password.get()
db = self.var_db.get()
port = self.var_port.get()
engine = create_engine("postgresql+psycopg2://"+user+":"+password+"@"+ip+":"+str(port)+"/"+db)
for name in tablename:
df = self.Loaddata("data/%s.txt"%name)
output = StringIO()
df.to_csv(output,sep="\t",index=False,header=False)
output.getvalue()
output.seek(0)
conn = engine.raw_connection()
cur = conn.cursor()
cur.copy_from(output,name,null='')
conn.commit()
cur.close()
#定义上传函数
def upload(self,event):
flag = self.is_connected()
self.write_to_sql(flag)
def cancel(self,event):
self.var_port.set("")
self.var_db.set("")
self.var_password.set("")
self.var_IP.set("")
self.var_user.set("")
def gui_start():
root = Tk()
myApp = my_GUI(root)
myApp.set_master()
root.mainloop()
gui_start()
| [
"[email protected]"
]
| |
4842aac3c5c0f9ea83aff2d2aca4cf85d8a9c248 | 50a0e5b000fd348523b6268ef491cf93cdcf338c | /app/__init__.py | 6b9385c2cd94db0203cf2b7b995380fa9bcff22d | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | caltechlibrary/andor | 40aa391f946915cb4f395a187a123d35105db283 | d58a3e4ccd7cb5ccef939286655bf1b2e091b227 | refs/heads/master | 2020-06-18T22:16:21.836200 | 2019-11-20T21:53:39 | 2019-11-20T21:53:39 | 196,470,116 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py |
from flask import Flask
from app import config
app = Flask(__name__)
app.config.from_object(config.Config)
cfg = config.Config()
# Flask-login expects app.secret_key instead of config object
app.secret_key = cfg.SECRET_KEY
from flask_login import LoginManager
login_manager = LoginManager(app)
# Iniatialize the Flask login manager.
login_manager.init_app(app)
from app import routes
| [
"[email protected]"
]
| |
81ffcc3453da33bcbaf9a58c38f84f2fbe306c8c | 030678c7ab9a410b1319527faf0f194c6eca2810 | /setup.py | 54752f842241a873247b6fb7bb46c30e62ec4061 | []
| no_license | r56jar/ChomikDownloader | 73bb7841fb796687b9c2299818b40b3a4e656123 | 2156f79100e7ff8b126b52a624be2b13d4ffb42d | refs/heads/master | 2021-08-15T21:46:24.721253 | 2017-11-18T11:11:59 | 2017-11-18T11:11:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 944 | py | from distutils.core import setup
import sys
import os
if sys.platform.startswith('win'):
import py2exe
setup(name='chomikDownloader',
version='0.1.2',
author='adam_gr',
author_email='adam_gr [at] gazeta.pl',
description='Downloads files from your account chomikuj.pl',
package_dir = {'chomikdownloader' : 'src'},
packages = ['chomikdownloader'],
options = {"py2exe" : {
"compressed" : True,
"ignores" : ["email.Iterators", "email.Generator"],
"bundle_files" : 1
},
"sdist" : {
'formats': 'zip'
}
},
scripts = ['chomik_downloader'],
console = ['chomik_downloader'],
zipfile = None
)
| [
"[email protected]"
]
| |
daea7e74c85ec4d9426d39be68feb3c88fd5bd3c | ffe754438079fc6c45dfdc2eba04d479c20651d5 | /py_matrix_controller/matrix_controller/matrix_controller/udp_connection.py | 27d93a74c5e8398d6973af1df2a2a93c1c1560f5 | []
| no_license | kant/esp32-big-led-matrix-dmx-controller | d24a05f0ebe57e5465dfa40fa8005a62d85ea101 | 1d7d8ae120a5e4b4ca5ff7e1c397d1991e5252ca | refs/heads/master | 2022-12-21T05:27:10.070273 | 2020-09-28T17:49:53 | 2020-09-28T17:49:53 | 299,659,115 | 3 | 0 | null | 2020-09-29T15:28:52 | 2020-09-29T15:28:52 | null | UTF-8 | Python | false | false | 728 | py | import socket
class UdpConnection(object):
udp_socket: socket.socket = None
def open(self) -> None:
self.udp_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
self.udp_socket.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, True)
self.udp_socket.settimeout(5)
def send_packet(self, endpoint: dict, packet: bytearray) -> None:
self.udp_socket.sendto(packet, (endpoint['ip_address'], endpoint['port']))
def send_packets(self, packets: list) -> None:
for packet in packets:
self.send_packet(packet['endpoint'], packet['packet_data'])
def close(self) -> None:
self.udp_socket.close()
self.udp_socket = None
| [
"[email protected]"
]
| |
089677bfcacaaf22eb438374c2a2c6465a2c88d4 | 28860c57c878da20b17e5991385bfa366f82c523 | /src/weight_calculation_module.py | d1d44644d86e318825911939c2fa54dda61fd6ab | []
| no_license | bloodteller123/Simple-Search-Engine | 92ac8d2c6fcd844b383a834d35f3babcb6b25083 | dcf29ade709b4a237af9cd0a9a84219bfce1b0a0 | refs/heads/master | 2021-08-28T10:13:45.264009 | 2021-08-24T17:21:02 | 2021-08-24T17:21:02 | 235,720,809 | 0 | 0 | null | 2020-04-09T19:03:33 | 2020-01-23T04:12:39 | HTML | UTF-8 | Python | false | false | 2,111 | py | import importlib
import os
import json
import math
import inverted_index_construction_module as iic
indexPath = '../output/index.json'
storagePath = '../output/storage.json'
reuterIndexPath = '../output/reuterIndex.json'
reuterStorage = '../output/reuterStorage.json'
termPath = '../output/terms.json'
UOtermPath = '../output/UOterms.json'
weightedindexPath = '../output/weightedindex.json'
reuterWindexPath = '../output/reuter_weightedindex.json'
def getinvertedindex():
return iic.getIndex()
#set up list of terms with corresponding docId and tf_idf
def tf_idf(iPath,tPath,sPath):
tfidf = {}
docId = 0
tfreq = 0
with open(iPath, 'r') as inde, open(tPath, 'r') as ter , open(sPath, 'r') as storag:
indexf = json.load(inde)
termf = json.load(ter)
storagef = json.load(storag)
n = len(storagef)
for q in indexf:
lis = []
idf = math.log10(n / len(indexf[q]))
for t in indexf[q]:
docId = t[0]
num = findNumOfterms(docId,termf)
#comput term frequency
#tfreq = t[1]/findMaxfrequency(f,docId)
#tfreq = t[1]/num
tfreq = t[1]
#tfreq = 1 + math.log10(t[1])
lis.append([docId,idf,tfreq,num])
tfidf[q] = lis
return tfidf
#def findMaxfrequency(file, docId):
# maxfreq = 0
# for f in file:
# for t in file[f]:
# if docId == t[0] and maxfreq <= t[1]:
# maxfreq = t[1]
# return maxfreq
def findNumOfterms(docId,termf):
maxNum = 0
for g in termf:
maxNum = len(termf[g][docId-1])
return maxNum
def getweightedindex():
weightedindex = tf_idf(indexPath,UOtermPath,storagePath)
with open(weightedindexPath,'w') as f:
json.dump(weightedindex, f, sort_keys=True, indent=4,ensure_ascii=False)
reuterWindex = tf_idf(reuterIndexPath,termPath,reuterStorage)
with open(reuterWindexPath,'w') as f:
json.dump(reuterWindex, f, sort_keys=True, indent=4,ensure_ascii=False)
#getweightedindex()
| [
"[email protected]"
]
| |
2b1d904b895cf46c158f71c1ef2c251adeb0ff84 | 670aefce9619eb112d54b48d57cf0972f4290d0e | /hello2/a.py | 930e790445a678df2c74bb2d16806c60d798dd2a | []
| no_license | TKNRK/cython_examples | 323ab79a3a146d6fe08588070963803fa764d0f6 | 7fcd260c344361d77dba185d36b68dfd764e14e5 | refs/heads/master | 2020-12-02T19:41:35.965733 | 2017-07-06T02:19:06 | 2017-07-06T02:19:06 | 96,376,592 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 109 | py | import pyximport; pyximport.install()
import hello
if __name__ == "__main__":
hello.say_hello_to("Reona")
| [
"[email protected]"
]
| |
97f91e9b062419f031a0bff0689df72f71c43dfe | ce8dc0f97e7431504e7e8cde94420dc0046dedde | /session6/intro3.py | 1e67036b74556e46b65bc39d96e6b198f7c58be2 | []
| no_license | zwwebob/zweebob | 7fa99a9296541ce3a96c0e6c4c99f00e59fa0c28 | c7ca9c6df9e3d19457ed137fa1a3ababdc92a07c | refs/heads/master | 2020-06-12T10:07:24.668589 | 2019-07-19T08:57:10 | 2019-07-19T08:57:10 | 194,267,087 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 106 | py | loop_count = 0
while loop_count<3:
print("Hi")
loop_count += 1
if loop_count>=3:
break | [
"[email protected]"
]
| |
1c796999a547a72525bde4cbd37ec65d07970463 | 8272778a156da3c4ecca549aa6953031abeed11a | /exercises/exercise_10.py | adf2ef6521b3ab8eacaace2cd0e84bd070ba4b54 | []
| no_license | igorjm/python-playground | d8c4e58b69fdf5b0ca6065d765d4fbc23b9b67a1 | 04d273fba395085337931251beb623fadf1c1aee | refs/heads/master | 2023-03-26T17:29:31.998312 | 2019-10-21T19:47:56 | 2019-10-21T19:47:56 | 119,454,373 | 1 | 0 | null | 2021-03-19T23:26:39 | 2018-01-29T23:16:47 | HTML | UTF-8 | Python | false | false | 322 | py | #Please modify the code of the previous exercise so that instead of printing out the lines in the terminal,
#it prints out the length of each line.
#
#Expected output:
#
#4
#5
#6
#8
#10
#11
>>> file = open("fruits.txt", "r")
>>> content = file.readlines()
>>> file.close()
>>> for i in content:
>>> print(len(i) - 1) | [
"[email protected]"
]
| |
95e81629ec5b165d02943f34a71fc1f1080bcef5 | cfb4e8721137a096a23d151f2ff27240b218c34c | /mypower/matpower_ported/mp-opt-model/lib/@opt_model/solve.py | 4ebc7aba4174ad01cb668380102ca885eae9fb2c | [
"Apache-2.0"
]
| permissive | suryo12/mypower | eaebe1d13f94c0b947a3c022a98bab936a23f5d3 | ee79dfffc057118d25f30ef85a45370dfdbab7d5 | refs/heads/master | 2022-11-25T16:30:02.643830 | 2020-08-02T13:16:20 | 2020-08-02T13:16:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 146 | py | def solve(*args,nout=5,oc=None):
if oc == None:
from .....oc_matpower import oc_matpower
oc = oc_matpower()
return oc.solve(*args,nout=nout)
| [
"[email protected]"
]
| |
622a976491583f0150dd9abf3670e13ce26d68f9 | 3c4450ccd471f7720ef32cce3b5f5221981547ec | /openapi_client/__init__.py | 7e87107c5afac574c7c101ec4b4a759e4572590a | []
| no_license | ContatoGrupoOptimus/python-client | eb21d88a5725294609d589474e09463ab659d45b | f84ee64f741c096aadefc1088d1da88e97663fb1 | refs/heads/master | 2022-12-14T10:57:45.041040 | 2020-09-10T21:28:19 | 2020-09-10T21:28:19 | 294,526,557 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,928 | py | # coding: utf-8
# flake8: noqa
"""
Chat API SDK
The SDK allows you to receive and send messages through your WhatsApp account. [Sign up now](https://app.chat-api.com/) The Chat API is based on the WhatsApp WEB protocol and excludes the ban both when using libraries from mgp25 and the like. Despite this, your account can be banned by anti-spam system WhatsApp after several clicking the \"block\" button. # noqa: E501
The version of the OpenAPI document: 1.0.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
__version__ = "1.0.0"
# import apis into sdk package
from openapi_client.api.1_instance_api import 1InstanceApi
from openapi_client.api.2_messages_api import 2MessagesApi
from openapi_client.api.3_chats_api import 3ChatsApi
from openapi_client.api.4_webhooks_api import 4WebhooksApi
from openapi_client.api.5_queues_api import 5QueuesApi
from openapi_client.api.6_ban_api import 6BanApi
from openapi_client.api.7_testing_api import 7TestingApi
# import ApiClient
from openapi_client.api_client import ApiClient
from openapi_client.configuration import Configuration
from openapi_client.exceptions import OpenApiException
from openapi_client.exceptions import ApiTypeError
from openapi_client.exceptions import ApiValueError
from openapi_client.exceptions import ApiKeyError
from openapi_client.exceptions import ApiException
# import models into sdk package
from openapi_client.models.ack import Ack
from openapi_client.models.ban_settings import BanSettings
from openapi_client.models.ban_test_action import BanTestAction
from openapi_client.models.ban_test_status import BanTestStatus
from openapi_client.models.chat import Chat
from openapi_client.models.chat_id_prop import ChatIdProp
from openapi_client.models.chat_update import ChatUpdate
from openapi_client.models.chats import Chats
from openapi_client.models.clear_actions_queue_status import ClearActionsQueueStatus
from openapi_client.models.clear_messages_queue_status import ClearMessagesQueueStatus
from openapi_client.models.create_group_action import CreateGroupAction
from openapi_client.models.create_group_status import CreateGroupStatus
from openapi_client.models.forward_message_request import ForwardMessageRequest
from openapi_client.models.group_participant_action import GroupParticipantAction
from openapi_client.models.group_participant_status import GroupParticipantStatus
from openapi_client.models.inline_response200 import InlineResponse200
from openapi_client.models.inline_response2001 import InlineResponse2001
from openapi_client.models.inline_response2002 import InlineResponse2002
from openapi_client.models.inline_response2003 import InlineResponse2003
from openapi_client.models.inline_response2004 import InlineResponse2004
from openapi_client.models.inline_response2005 import InlineResponse2005
from openapi_client.models.inline_response2005_update import InlineResponse2005Update
from openapi_client.models.inline_response401 import InlineResponse401
from openapi_client.models.instance_status import InstanceStatus
from openapi_client.models.instance_status_action import InstanceStatusAction
from openapi_client.models.instance_status_link import InstanceStatusLink
from openapi_client.models.instance_status_status_data import InstanceStatusStatusData
from openapi_client.models.instance_status_status_data_actions import InstanceStatusStatusDataActions
from openapi_client.models.message import Message
from openapi_client.models.messages import Messages
from openapi_client.models.outbound_action import OutboundAction
from openapi_client.models.outbound_actions import OutboundActions
from openapi_client.models.outbound_message import OutboundMessage
from openapi_client.models.outbound_messages import OutboundMessages
from openapi_client.models.phone_prop import PhoneProp
from openapi_client.models.read_chat_action import ReadChatAction
from openapi_client.models.read_chat_status import ReadChatStatus
from openapi_client.models.send_contact_request import SendContactRequest
from openapi_client.models.send_file_request import SendFileRequest
from openapi_client.models.send_link_request import SendLinkRequest
from openapi_client.models.send_location_request import SendLocationRequest
from openapi_client.models.send_message_request import SendMessageRequest
from openapi_client.models.send_message_status import SendMessageStatus
from openapi_client.models.send_ptt_request import SendPTTRequest
from openapi_client.models.send_v_card_request import SendVCardRequest
from openapi_client.models.set_webhook_status import SetWebhookStatus
from openapi_client.models.settings import Settings
from openapi_client.models.status import Status
from openapi_client.models.statuses import Statuses
from openapi_client.models.webhook_status import WebhookStatus
from openapi_client.models.webhook_url import WebhookUrl
| [
"[email protected]"
]
| |
60b60078954e230b5136cb098beb191fd321e8b0 | 53a7a00f76861a32e957e0345aced81be97c197d | /glavapp/apps.py | d8502b166523bf1139ae66a6e44cacec5877dab5 | []
| no_license | Dimas4/Question-answer-api | 970d540b4d5844161420d4c05bd3f21f64c3e7f0 | 4e9b6074d553e99f68d97aa354ba1cdeb2ec0bab | refs/heads/master | 2021-04-12T04:46:10.427189 | 2018-06-26T15:55:14 | 2018-06-26T15:55:14 | 125,857,009 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 89 | py | from django.apps import AppConfig
class GlavappConfig(AppConfig):
name = 'glavapp'
| [
"[email protected]"
]
| |
57dc9c658162ec54440e50d2e719cd887235fece | b7c87a88487cd7e232f3065a8a9d301503ab398c | /montyHall.py | 8277b45f5c547e6eee8719f26ab4eb5c99ff15a3 | []
| no_license | solean/monty-hall-simulation | 4cdd65918368cefe20a29625a17586ac8229e308 | f8f056a28b3b0d7edfe32ecec2fe1ba5b87bc8c6 | refs/heads/master | 2021-01-01T17:39:51.860585 | 2017-07-23T21:34:48 | 2017-07-23T21:34:48 | 98,126,665 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 792 | py | from __future__ import division
import random
def montyHall(toSwitch):
doors = [0, 0, 1]
random.shuffle(doors)
firstDoorIndex = random.randint(0, 2)
behindFirstDoor = doors[firstDoorIndex]
del doors[firstDoorIndex]
if doors[0] == 0:
del doors[0]
elif doors[1] == 0:
del doors[1]
if toSwitch:
return doors[0]
else:
return behindFirstDoor
def simulate(toSwitch):
wins = 0
for i in range(0, 100000):
result = montyHall(toSwitch)
if result == 1:
wins = wins + 1
return wins
numStay = simulate(False)
numSwitch = simulate(True)
stayPercentage = (numStay / 100000) * 100
print 'Stay with first door: ' + str(stayPercentage) + '%'
switchPercentage = (numSwitch / 100000) * 100
print 'Switch to other door: ' + str(switchPercentage) + '%'
| [
"[email protected]"
]
| |
8b61d31c3cec5c1f20d98b0a2442ff8c95374f96 | 5b90a078ec29a836555050835d40249272654085 | /Hazard/Fishing/Fisherman.py | d1a14f6543608ec8a9bd498f8394bfa47e1cc27b | [
"Unlicense"
]
| permissive | ygtripps/Archives | a165224daca9e06471d9b6e6943260d0677484d7 | 8ca460eff5d60c2b3e61ee4c434c0bfcd6d53673 | refs/heads/master | 2021-06-08T13:46:15.544730 | 2016-12-05T21:16:31 | 2016-12-05T21:16:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,549 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Python3
def main(args):
global sendersFile
global receiversFile
sendersFile = "senders" # username:password type file
receiversFile = "receivers"
mailServer = "mail.company.com"
logfiles = "./logs/massfisher.log"
maxtime = 3 #In hours
sf = open(sendersFile, "r")
rf = open(receiversFile, "r")
sendersList = {}
sendersAuth = {}
receiversList = {}
with rf as fin:
for line in fin:
receiversList[len(receiversList)+1] = str(line)[0:len(str(line))-1]
with sf as fin:
for line in fin:
sendersList[len(sendersList)+1] = str(line)[0:len(str(line))-1].split(":")[0]
sendersAuth[len(sendersAuth)+1] = str(line)[0:len(str(line))-1].split(":")[1]
maxsleep = (maxtime * 60 * 60) / len(receiversList)
minsleep = int((75 * maxtime) / 100)
messages = os.listdir("Templates")
for i in receiversList:
tmp = messages[random.randint(0, len(messages)-1)]
while not os.path.isfile("./Templates/"+tmp):
tmp = messages[random.randint(0, len(messages)-1)]
rc = random.sample(list(sendersList),1)
time.sleep(random.randint(minsleep, maxsleep))
os.system(str("sendemail -f "+sendersList[rc[0]]+" -t "+receiversList[i]+" -xu "+sendersList[i]+" -xp "+sendersAuth[i]+" -s "+mailServer+" -l "+logfiles+"."+str(i)+" -v -o message-content-type=html -o message-file=" + "\"./Templates/"+ tmp + "\" -u \"" + tmp + "\""))
print("Time to go home and eat those fishes!")
if __name__ == '__main__':
import sys
import random
import time
import os
sys.exit(main(sys.argv))
| [
"[email protected]"
]
| |
ce5e898f4e48bb7cd55a5dc57f8a86be77727e90 | 42a833f190b3352eaa89604381d3db500c80f538 | /pentestui/pentest_api/attacks/kerberos/modules/crypto.py | 30dd11fb3dd40ca8b292d740d0939747b96f97b5 | [
"Apache-2.0"
]
| permissive | mustgundogdu/PentestUI | 95c037022c5aad25cf2e1a4b7ad58eedd6df6ed8 | 92263ea73bd2eaa2081fb277c76aa229103a1d54 | refs/heads/main | 2023-08-29T23:12:28.027452 | 2021-11-18T17:53:03 | 2021-11-18T17:53:03 | 389,436,912 | 31 | 4 | null | null | null | null | UTF-8 | Python | false | false | 12,836 | py | from __future__ import division
from __future__ import print_function
from Cryptodome.Cipher import DES, AES
from struct import pack, unpack
from pentestui.pentest_api.attacks.kerberos.modules.structure import Structure
import hmac, hashlib
from six import b
def Generate_Subkey(K):
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# + Algorithm Generate_Subkey +
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# + +
# + Input : K (128-bit key) +
# + Output : K1 (128-bit first subkey) +
# + K2 (128-bit second subkey) +
# +-------------------------------------------------------------------+
# + +
# + Constants: const_Zero is 0x00000000000000000000000000000000 +
# + const_Rb is 0x00000000000000000000000000000087 +
# + Variables: L for output of AES-128 applied to 0^128 +
# + +
# + Step 1. L := AES-128(K, const_Zero); +
# + Step 2. if MSB(L) is equal to 0 +
# + then K1 := L << 1; +
# + else K1 := (L << 1) XOR const_Rb; +
# + Step 3. if MSB(K1) is equal to 0 +
# + then K2 := K1 << 1; +
# + else K2 := (K1 << 1) XOR const_Rb; +
# + Step 4. return K1, K2; +
# + +
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
AES_128 = AES.new(K, AES.MODE_ECB)
L = AES_128.encrypt(bytes(bytearray(16)))
LHigh = unpack('>Q',L[:8])[0]
LLow = unpack('>Q',L[8:])[0]
K1High = ((LHigh << 1) | ( LLow >> 63 )) & 0xFFFFFFFFFFFFFFFF
K1Low = (LLow << 1) & 0xFFFFFFFFFFFFFFFF
if (LHigh >> 63):
K1Low ^= 0x87
K2High = ((K1High << 1) | (K1Low >> 63)) & 0xFFFFFFFFFFFFFFFF
K2Low = ((K1Low << 1)) & 0xFFFFFFFFFFFFFFFF
if (K1High >> 63):
K2Low ^= 0x87
K1 = bytearray(pack('>QQ', K1High, K1Low))
K2 = bytearray(pack('>QQ', K2High, K2Low))
return K1, K2
def XOR_128(N1,N2):
J = bytearray()
for i in range(len(N1)):
#J.append(indexbytes(N1,i) ^ indexbytes(N2,i))
J.append(N1[i] ^ N2[i])
return J
def PAD(N):
padLen = 16-len(N)
return N + b'\x80' + b'\x00'*(padLen-1)
def AES_CMAC(K, M, length):
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# + Algorithm AES-CMAC +
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# + +
# + Input : K ( 128-bit key ) +
# + : M ( message to be authenticated ) +
# + : len ( length of the message in octets ) +
# + Output : T ( message authentication code ) +
# + +
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# + Constants: const_Zero is 0x00000000000000000000000000000000 +
# + const_Bsize is 16 +
# + +
# + Variables: K1, K2 for 128-bit subkeys +
# + M_i is the i-th block (i=1..ceil(len/const_Bsize)) +
# + M_last is the last block xor-ed with K1 or K2 +
# + n for number of blocks to be processed +
# + r for number of octets of last block +
# + flag for denoting if last block is complete or not +
# + +
# + Step 1. (K1,K2) := Generate_Subkey(K); +
# + Step 2. n := ceil(len/const_Bsize); +
# + Step 3. if n = 0 +
# + then +
# + n := 1; +
# + flag := false; +
# + else +
# + if len mod const_Bsize is 0 +
# + then flag := true; +
# + else flag := false; +
# + +
# + Step 4. if flag is true +
# + then M_last := M_n XOR K1; +
# + else M_last := padding(M_n) XOR K2; +
# + Step 5. X := const_Zero; +
# + Step 6. for i := 1 to n-1 do +
# + begin +
# + Y := X XOR M_i; +
# + X := AES-128(K,Y); +
# + end +
# + Y := M_last XOR X; +
# + T := AES-128(K,Y); +
# + Step 7. return T; +
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
const_Bsize = 16
const_Zero = bytearray(16)
AES_128= AES.new(K, AES.MODE_ECB)
M = bytearray(M[:length])
K1, K2 = Generate_Subkey(K)
n = len(M)//const_Bsize
if n == 0:
n = 1
flag = False
else:
if (length % const_Bsize) == 0:
flag = True
else:
n += 1
flag = False
M_n = M[(n-1)*const_Bsize:]
if flag is True:
M_last = XOR_128(M_n,K1)
else:
M_last = XOR_128(PAD(M_n),K2)
X = const_Zero
for i in range(n-1):
M_i = M[(i)*const_Bsize:][:16]
Y = XOR_128(X, M_i)
X = bytearray(AES_128.encrypt(bytes(Y)))
Y = XOR_128(M_last, X)
T = AES_128.encrypt(bytes(Y))
return T
def AES_CMAC_PRF_128(VK, M, VKlen, Mlen):
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# + AES-CMAC-PRF-128 +
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
# + +
# + Input : VK (Variable-length key) +
# + : M (Message, i.e., the input data of the PRF) +
# + : VKlen (length of VK in octets) +
# + : len (length of M in octets) +
# + Output : PRV (128-bit Pseudo-Random Variable) +
# + +
# +-------------------------------------------------------------------+
# + Variable: K (128-bit key for AES-CMAC) +
# + +
# + Step 1. If VKlen is equal to 16 +
# + Step 1a. then +
# + K := VK; +
# + Step 1b. else +
# + K := AES-CMAC(0^128, VK, VKlen); +
# + Step 2. PRV := AES-CMAC(K, M, len); +
# + return PRV; +
# + +
# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
if VKlen == 16:
K = VK
else:
K = AES_CMAC(bytes(bytearray(16)), VK, VKlen)
PRV = AES_CMAC(K, M, Mlen)
return PRV
def KDF_CounterMode(KI, Label, Context, L):
# Implements NIST SP 800-108 Section 5.1, with PRF HMAC-SHA256
# https://tools.ietf.org/html/draft-irtf-cfrg-kdf-uses-00#ref-SP800-108
# Fixed values:
# 1. h - The length of the output of the PRF in bits, and
# 2. r - The length of the binary representation of the counter i.
# Input: KI, Label, Context, and L.
# Process:
# 1. n := [L/h]
# 2. If n > 2r-1, then indicate an error and stop.
# 3. result(0):= empty .
# 4. For i = 1 to n, do
# a. K(i) := PRF (KI, [i]2 || Label || 0x00 || Context || [L]2)
# b. result(i) := result(i-1) || K(i).
# 5. Return: KO := the leftmost L bits of result(n).
h = 256
r = 32
n = L // h
if n == 0:
n = 1
if n > (pow(2,r)-1):
raise Exception("Error computing KDF_CounterMode")
result = b''
K = b''
for i in range(1,n+1):
input = pack('>L', i) + Label + b'\x00' + Context + pack('>L',L)
K = hmac.new(KI, input, hashlib.sha256).digest()
result = result + K
return result[:(L//8)]
# [MS-LSAD] Section 5.1.2 / 5.1.3
class LSA_SECRET_XP(Structure):
structure = (
('Length','<L=0'),
('Version','<L=0'),
('_Secret','_-Secret', 'self["Length"]'),
('Secret', ':'),
)
def transformKey(InputKey):
# Section 5.1.3
OutputKey = []
OutputKey.append( chr(ord(InputKey[0:1]) >> 0x01) )
OutputKey.append( chr(((ord(InputKey[0:1])&0x01)<<6) | (ord(InputKey[1:2])>>2)) )
OutputKey.append( chr(((ord(InputKey[1:2])&0x03)<<5) | (ord(InputKey[2:3])>>3)) )
OutputKey.append( chr(((ord(InputKey[2:3])&0x07)<<4) | (ord(InputKey[3:4])>>4)) )
OutputKey.append( chr(((ord(InputKey[3:4])&0x0F)<<3) | (ord(InputKey[4:5])>>5)) )
OutputKey.append( chr(((ord(InputKey[4:5])&0x1F)<<2) | (ord(InputKey[5:6])>>6)) )
OutputKey.append( chr(((ord(InputKey[5:6])&0x3F)<<1) | (ord(InputKey[6:7])>>7)) )
OutputKey.append( chr(ord(InputKey[6:7]) & 0x7F) )
for i in range(8):
OutputKey[i] = chr((ord(OutputKey[i]) << 1) & 0xfe)
return b("".join(OutputKey))
def decryptSecret(key, value):
# [MS-LSAD] Section 5.1.2
plainText = b''
key0 = key
for i in range(0, len(value), 8):
cipherText = value[:8]
tmpStrKey = key0[:7]
tmpKey = transformKey(tmpStrKey)
Crypt1 = DES.new(tmpKey, DES.MODE_ECB)
plainText += Crypt1.decrypt(cipherText)
key0 = key0[7:]
value = value[8:]
# AdvanceKey
if len(key0) < 7:
key0 = key[len(key0):]
secret = LSA_SECRET_XP(plainText)
return (secret['Secret'])
def encryptSecret(key, value):
# [MS-LSAD] Section 5.1.2
cipherText = b''
key0 = key
value0 = pack('<LL', len(value), 1) + value
for i in range(0, len(value0), 8):
if len(value0) < 8:
value0 = value0 + b'\x00'*(8-len(value0))
plainText = value0[:8]
tmpStrKey = key0[:7]
print(type(tmpStrKey))
print(tmpStrKey)
tmpKey = transformKey(tmpStrKey)
Crypt1 = DES.new(tmpKey, DES.MODE_ECB)
cipherText += Crypt1.encrypt(plainText)
key0 = key0[7:]
value0 = value0[8:]
# AdvanceKey
if len(key0) < 7:
key0 = key[len(key0):]
return cipherText
def SamDecryptNTLMHash(encryptedHash, key):
# [MS-SAMR] Section 2.2.11.1.1
Block1 = encryptedHash[:8]
Block2 = encryptedHash[8:]
Key1 = key[:7]
Key1 = transformKey(Key1)
Key2 = key[7:14]
Key2 = transformKey(Key2)
Crypt1 = DES.new(Key1, DES.MODE_ECB)
Crypt2 = DES.new(Key2, DES.MODE_ECB)
plain1 = Crypt1.decrypt(Block1)
plain2 = Crypt2.decrypt(Block2)
return plain1 + plain2
def SamEncryptNTLMHash(encryptedHash, key):
# [MS-SAMR] Section 2.2.11.1.1
Block1 = encryptedHash[:8]
Block2 = encryptedHash[8:]
Key1 = key[:7]
Key1 = transformKey(Key1)
Key2 = key[7:14]
Key2 = transformKey(Key2)
Crypt1 = DES.new(Key1, DES.MODE_ECB)
Crypt2 = DES.new(Key2, DES.MODE_ECB)
plain1 = Crypt1.encrypt(Block1)
plain2 = Crypt2.encrypt(Block2)
return plain1 + plain2
| [
"[email protected]"
]
| |
3e5877789ec26a238d9d774c1aafe05850e3ff29 | e1d00589e31e3c067db4bb1006d7a6fad454d374 | /53.py | 9dbb0282d3dd4813ec7cc75014bc4da7681bbcf4 | []
| no_license | bjornars/project-euler | e8944bfb897eea1dc523daf98091c8b3060f1452 | 5b664a610fecbb195698aff4304bb67fd3fb6d08 | refs/heads/master | 2021-01-18T15:16:47.939325 | 2012-09-24T15:08:15 | 2012-09-24T15:08:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 130 | py | import lib
coeffs = (lib.choose(n, k) for n in range(1,101) for k in range(1, n+1))
print sum(1 for n in coeffs if n >= 1000000)
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.